Merge "Allow to set TBS pattern on/off from config" am: eac16ca226 am: 7d73116ba3
am: 301e25fc2e

Change-Id: I32e36e02a46af8ecc1701ef34488682b1428c865
diff --git a/acts/framework/acts/controllers/buds_controller.py b/acts/framework/acts/controllers/buds_controller.py
new file mode 100644
index 0000000..bb56e1f
--- /dev/null
+++ b/acts/framework/acts/controllers/buds_controller.py
@@ -0,0 +1,87 @@
+#!/usr/bin/env python3.4
+#
+#   Copyright 2016 - The Android Open Source Project
+#
+#   Licensed under the Apache License, Version 2.0 (the "License");
+#   you may not use this file except in compliance with the License.
+#   You may obtain a copy of the License at
+#
+#       http://www.apache.org/licenses/LICENSE-2.0
+#
+#   Unless required by applicable law or agreed to in writing, software
+#   distributed under the License is distributed on an "AS IS" BASIS,
+#   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#   See the License for the specific language governing permissions and
+#   limitations under the License.
+
+"""This is the controller module for Pixel Buds devices.
+
+For the device definition, see buds_lib.apollo_lib.
+"""
+
+from acts.controllers.buds_lib.apollo_lib import ParentDevice
+
+
+ACTS_CONTROLLER_CONFIG_NAME = 'BudsDevice'
+ACTS_CONTROLLER_REFERENCE_NAME = 'buds_devices'
+
+
+class ConfigError(Exception):
+    """Raised when the configuration is malformatted."""
+
+
+def create(configs):
+    """Creates a Pixel Buds device for each config found within the configs.
+
+    Args:
+        configs: The configs can be structured in the following ways:
+
+                    ['serial1', 'serial2', ... ]
+
+                    [
+                        {
+                            'serial': 'serial1',
+                            'label': 'some_info',
+                            ...
+                        },
+                        {
+                            'serial': 'serial2',
+                            'label': 'other_info',
+                            ...
+                        }
+                    ]
+    """
+    created_controllers = []
+
+    if not isinstance(configs, list):
+        raise ConfigError('Malformatted config %s. Must be a list.' % configs)
+
+    for config in configs:
+        if isinstance(config, str):
+            created_controllers.append(ParentDevice(config))
+        elif isinstance(config, dict):
+            serial = config.get('serial', None)
+            if not serial:
+                raise ConfigError('Buds Device %s is missing entry "serial".' %
+                                  config)
+            commander_port = config.get('commander_port', None)
+            log_port = config.get('log_port', None)
+            created_controllers.append(
+                ParentDevice(serial, commander_port=commander_port,
+                           log_port=log_port))
+        else:
+            raise ConfigError('Malformatted config: "%s". Must be a string or '
+                              'dict' % config)
+    return created_controllers
+
+
+def destroy(buds_device_list):
+    pass
+
+
+def get_info(buds_device_list):
+    device_infos = []
+    for buds_device in buds_device_list:
+        device_infos.append({'serial': buds_device.serial_number,
+                             'name': buds_device.device_name})
+    return device_infos
diff --git a/acts/framework/acts/controllers/buds_lib/__init__.py b/acts/framework/acts/controllers/buds_lib/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/acts/framework/acts/controllers/buds_lib/__init__.py
diff --git a/acts/framework/acts/controllers/buds_lib/apollo_lib.py b/acts/framework/acts/controllers/buds_lib/apollo_lib.py
new file mode 100644
index 0000000..9b971ff
--- /dev/null
+++ b/acts/framework/acts/controllers/buds_lib/apollo_lib.py
@@ -0,0 +1,1516 @@
+#!/usr/bin/env python3
+#
+#   Copyright 2018 - The Android Open Source Project
+#
+#   Licensed under the Apache License, Version 2.0 (the "License");
+#   you may not use this file except in compliance with the License.
+#   You may obtain a copy of the License at
+#
+#       http://www.apache.org/licenses/LICENSE-2.0
+#
+#   Unless required by applicable law or agreed to in writing, software
+#   distributed under the License is distributed on an "AS IS" BASIS,
+#   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#   See the License for the specific language governing permissions and
+#   limitations under the License.
+
+"""Apollo Commander through USB/UART interface.
+
+It uses python serial lib to communicate to a Apollo device.
+Some of the commander may not work yet, pending on the final version of the
+commander implementation.
+
+Typical usage examples:
+
+    To get a list of all apollo devices:
+    >>> devices = apollo_lib.get_devices()
+
+    To work with a specific apollo device:
+    >>> apollo = apollo_lib.Device(serial_number='ABCDEF0123456789',
+    >>> commander_port='/dev/ttyACM0')
+
+    To send a single command:
+    >>> apollo.cmd('PowOff')
+
+    To send a list of commands:
+    >>> apollo.cmd(['PowOff', 'PowOn', 'VolUp', 'VolDown']
+"""
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+
+import atexit
+import os
+import re
+import subprocess
+import time
+
+import serial
+from acts import tracelogger
+from acts.controllers.buds_lib import logserial
+from acts.controllers.buds_lib.b29_lib import B29Device
+from acts.controllers.buds_lib.dev_utils import apollo_log_decoder
+from acts.controllers.buds_lib.dev_utils import apollo_log_regex
+from acts.controllers.buds_lib.dev_utils import apollo_sink_events
+from logging import Logger
+from retry import retry
+
+logging = tracelogger.TakoTraceLogger(Logger('apollo'))
+
+BAUD_RATE = 115200
+BYTE_SIZE = 8
+PARITY = 'N'
+STOP_BITS = 1
+DEFAULT_TIMEOUT = 3
+WRITE_TO_FLASH_WAIT = 30  # wait 30 sec when writing to external flash.
+LOG_REGEX = re.compile(r'(?P<time_stamp>\d+)\s(?P<msg>.*)')
+STATUS_REGEX = r'(?P<time_stamp>\d+)\s(?P<key>.+?): (?P<value>.+)'
+APOLLO_CHIP = '_Apollo_'
+DEVICE_REGEX = (
+    r'_(?P<device_serial>[A-Z0-9]+)-(?P<interface>\w+)'
+    r'\s->\s(\.\./){2}(?P<port>\w+)'
+)
+OTA_VERIFICATION_FAILED = 'OTA verification failed. corrupt image?'
+OTA_ERASING_PARTITION = 'INFO OTA eras ptns'
+OTA_RECEIVE_CSR_REGEX = r'INFO OTA CSR rcv begin'
+CODEC_REGEX = r'(?P<time_stamp>\d+)\s(?P<codec>\w+) codec is used.'
+BUILD_REGEX = r'\d+\.\d+\.(?P<build>\d+)-?(?P<psoc_build>\d*)-?(?P<debug>\w*)'
+
+
+class Error(Exception):
+    """Module Level Error."""
+
+
+class ResponseError(Error):
+    """cmd Response Error."""
+
+
+class DeviceError(Error):
+    """Device Error."""
+
+
+class ConnectError(Error):
+    """Connection Error."""
+
+
+def get_devices():
+    """Get all available Apollo devices.
+
+    Returns:
+        (list) A list of available devices or empty list if none found
+
+    Raises:
+        Error: raises Error if no Apollo devices or wrong interfaces were found.
+    """
+    devices = []
+    result = os.popen('ls -l /dev/serial/by-id/*%s*' % APOLLO_CHIP).read()
+    if not result:
+        raise Error('No Apollo Devices found.')
+    for line in result.splitlines():
+        match = re.search(DEVICE_REGEX, line)
+        interface = match.group('interface')
+        # TODO: The commander port will always be None.
+        commander_port = None
+        if interface == 'if00':
+            commander_port = '/dev/' + match.group('port')
+            continue
+        elif interface == 'if02':
+            log_port = '/dev/' + match.group('port')
+        else:
+            raise Error('Wrong interface found.')
+        device_serial = match.group('device_serial')
+
+        device = {
+            'commander_port': commander_port,
+            'log_port': log_port,
+            'serial_number': device_serial
+        }
+        devices.append(device)
+    return devices
+
+
+class BudsDevice(object):
+    """Provides a simple class to interact with Apollo."""
+
+    def __init__(self, serial_number, commander_port=None, log_port=None,
+                 serial_logger=None):
+        """Establish a connection to a Apollo.
+
+        Open a connection to a device with a specific serial number.
+
+        Raises:
+            ConnectError: raises ConnectError if cannot open the device.
+        """
+        self.set_log = False
+        self.connection_handle = None
+        self.device_closed = False
+        if serial_logger:
+            self.set_logger(serial_logger)
+        self.pc = logserial.PortCheck()
+        self.serial_number = serial_number
+        # TODO (kselvakumaran): move this to an interface device class that
+        # apollo_lib.BudsDevice should derive from
+        if not commander_port and not log_port:
+            self.get_device_ports(self.serial_number)
+        if commander_port:
+            self.commander_port = commander_port
+        if log_port:
+            self.log_port = log_port
+        self.apollo_log = None
+        self.cmd_log = None
+        self.apollo_log_regex = apollo_log_regex
+        self.dut_type = 'apollo'
+
+        # TODO (kselvakumaran): move this to an interface device class that
+        # apollo_lib.BudsDevice should derive from
+
+        try:  # Try to open the device
+            self.connection_handle = logserial.LogSerial(
+                self.commander_port, BAUD_RATE, flush_output=False,
+                serial_logger=logging)
+            self.wait_for_commander()
+        except (serial.SerialException, AssertionError, ConnectError) as e:
+            logging.error(
+                'error opening device {}: {}'.format(serial_number, e))
+            raise ConnectError('Error open the device.')
+        # disable sleep on idle
+        self.stay_connected_state = 1
+        atexit.register(self.close)
+
+    def set_logger(self, serial_logger):
+        global logging
+        logging = serial_logger
+        self.set_log = True
+        if self.connection_handle:
+            self.connection_handle.set_logger(serial_logger)
+
+    def get_device_ports(self, serial_number):
+        commander_query = {'ID_SERIAL_SHORT': serial_number,
+                           'ID_USB_INTERFACE_NUM': '00'}
+        log_query = {'ID_SERIAL_SHORT': serial_number,
+                     'ID_USB_INTERFACE_NUM': '02'}
+        self.commander_port = self.pc.search_port_by_property(commander_query)
+        self.log_port = self.pc.search_port_by_property(log_query)
+        if not self.commander_port and not self.log_port:
+            raise ConnectError(
+                'BudsDevice serial number %s not found' % serial_number)
+        else:
+            if not self.commander_port:
+                raise ConnectError('No devices found')
+            self.commander_port = self.commander_port[0]
+            self.log_port = self.log_port[0]
+
+    def get_all_log(self):
+        return self.connection_handle.get_all_log()
+
+    def query_log(self, from_timestamp, to_timestamp):
+        return self.connection_handle.query_serial_log(
+            from_timestamp=from_timestamp, to_timestamp=to_timestamp)
+
+    def send(self, cmd):
+        """Sends the command to serial port.
+
+        It does not care about whether the cmd is successful or not.
+
+        Args:
+            cmd: The passed command
+
+        Returns:
+            The number of characters written
+        """
+        logging.debug(cmd)
+        # with self._lock:
+        self.connection_handle.write(cmd)
+        result = self.connection_handle.read()
+        return result
+
+    def cmd(self, cmds, wait=None):
+        """Sends the commands and check responses.
+
+        Valid cmd will return something like '585857269 running cmd VolUp'.
+        Invalid cmd will log an error and return something like '585826369 No
+        command vol exists'.
+
+        Args:
+            cmds: The commands to the commander.
+            wait: wait in seconds for the cmd response.
+
+        Returns:
+            (list) The second element of the array returned by _cmd.
+        """
+        if isinstance(cmds, str):
+            cmds = [cmds]
+        results = []
+        for cmd in cmds:
+            _, result = self._cmd(cmd, wait=wait)
+            results.append(result)
+        return results
+
+    def _cmd(self, cmd, wait=None, throw_error=True):
+        """Sends a single command and check responses.
+
+        Valid cmd will return something like '585857269 running cmd VolUp'.
+        Invalid cmd will log an error and return something like '585826369 No
+        command vol exists'. Some cmd will return multiple lines of output.
+        eg. 'menu'.
+
+        Args:
+            cmd: The command to the commander.
+            wait: wait in seconds for the cmd response.
+            throw_error: Throw exception on True
+
+        Returns:
+            (list) containing such as the following:
+            [<return value>, [<protobuf dictionary>, str]]
+            Hex strings (protobuf) are replaced by its decoded dictionaries
+            and stored in an arry along with other string returned fom the
+            device.
+
+        Raises:
+            DeviceError: On Error.(Optional)
+        """
+        self.connection_handle.write(cmd)
+
+        while self.connection_handle.is_logging:
+            time.sleep(.01)
+        if wait:
+            self.wait(wait)
+        # Using read_serial_port as readlines is a blocking call until idle.
+        res = self.read_serial_port()
+        result = []
+        self.cmd_log = res
+        command_resv = False
+        # TODO: Cleanup the usage of the two booleans below.
+        command_finish = False
+        command_rejected = False
+        # for line in iter_res:
+        for line in res:
+            if isinstance(line, dict):
+                if 'COMMANDER_RECV_COMMAND' in line.values():
+                    command_resv = True
+                elif 'COMMANDER_REJECT_COMMAND' in line.values():
+                    logging.info('Command rejected')
+                    command_rejected = True
+                    break
+                elif 'COMMANDER_FINISH_COMMAND' in line.values():
+                    command_finish = True
+                    break
+                elif (command_resv and not command_finish and
+                      not command_rejected):
+                    result.append(line)
+            # TODO(jesussalinas): Remove when only encoded lines are required
+            elif command_resv and not command_finish and not command_rejected:
+                if 'running cmd' not in line:
+                    result.append(line)
+        success = True
+        if command_rejected or not command_resv:
+            success = False
+            if throw_error:
+                logging.info(res)
+                raise DeviceError('Unknown command %s' % cmd)
+        return success, result
+
+    def get_pdl(self):
+        """Returns the PDL stack dictionary.
+
+        The PDL stack stores paired devices of Apollo. Each PDL entry include
+        mac_address, flags, link_key, priority fields.
+
+        Returns:
+            list of pdl dicts.
+        """
+        # Get the mask from CONNLIB41:
+        # CONNLIB41 typically looks something like this: 2403 fff1
+        # 2403 fff1 is actually two 16-bit words of a 32-bit integer
+        # like 0xfff12403 . This tells the chronological order of the entries
+        # in the paired device list one nibble each. LSB to MSB corresponds to
+        # CONNLIB42 through CONNLIB49. So, the above tells us that the device at
+        # 0x2638 is the 3rd most recent entry 0x2639 the latest entry etc. As
+        # a device re-pairs the masks are updated.
+        response = []
+        mask = 'ffffffff'
+        res = self.cmd('GetPSHex 0x2637')
+        if len(res[0]) == 0:
+            logging.warning('Error reading PDL mask @ 0x2637')
+            return response
+        else:
+            regexp = r'\d+\s+(?P<m1>....)\s(?P<m2>....)'
+            match = re.match(regexp, res[0][0])
+            if match:
+                connlib41 = match.group('m2') + match.group('m1')
+                mask = connlib41[::-1]
+                logging.debug('PDL mask: %s' % mask)
+
+        # Now get the MAC/link key
+        mask_idx = 0
+        for i in range(9784, 9883):
+            types = {}
+            res = self.cmd('GetPSHex ' + '%0.2x' % i)
+            if len(res[0]) == 0:
+                break
+            else:
+                regexp = ('\d+\s+(?P<Mac>....\s....\s....)\s'
+                          '(?P<Flags>....\s....)\s(?P<Linkkey>.*)')
+                match = re.search(regexp, res[0][0])
+                if match:
+                    mac_address = match.group('Mac').replace(' ', '').upper()
+                    formatted_mac = ''
+                    for i in range(len(mac_address)):
+                        formatted_mac += mac_address[i]
+                        if i % 2 != 0 and i < (len(mac_address) - 1):
+                            formatted_mac += ':'
+                    types['mac_address'] = formatted_mac
+                    types['flags'] = match.group('Flags').replace(' ', '')
+                    types['link_key'] = match.group('Linkkey').replace(' ', '')
+                    types['priority'] = int(mask[mask_idx], 16)
+                    mask_idx += 1
+                    response.append(types)
+
+        return response
+
+    def set_pairing_mode(self):
+        """Enter Bluetooth Pairing mode."""
+        logging.debug('Inside set_pairing_mode()...')
+        try:
+            return self.cmd('Pair')
+        except DeviceError:
+            logging.exception('Pair cmd failed')
+
+    # TODO (kselvakumaran): move this to an interface BT class that
+    # apollo_lib.BudsDevice should derive from
+    def turn_on_bluetooth(self):
+        return True
+
+    # TODO (kselvakumaran): move this to an interface BT class that
+    # apollo_lib.BudsDevice should derive from
+    def is_bt_enabled(self):
+        """Check if BT is enabled.
+
+        (TODO:weisu)Currently it is always true since there is no way to disable
+        BT in apollo
+
+        Returns:
+            True if BT is enabled.
+        """
+        logging.debug('Inside is_bt_enabled()...')
+        return True
+
+    def panic(self):
+        """Hitting a panic, device will be automatically reset after 5s."""
+        logging.debug('Inside panic()...')
+        try:
+            self.send('panic')
+        except serial.SerialException:
+            logging.exception('panic cmd failed')
+
+    def power(self, cmd):
+        """Controls the power state of the device.
+
+        Args:
+            cmd: If 'Off', powers the device off. Otherwise, powers the device
+                 on.
+        """
+        logging.debug('Inside power({})...'.format(cmd))
+        mode = '0' if cmd == 'Off' else '1'
+        cmd = 'Pow ' + mode
+        try:
+            return self.cmd(cmd)
+        except DeviceError:
+            logging.exception('{} cmd failed'.format(cmd))
+
+    def charge(self, state):
+        """Charging Control of the device.
+
+        Args:
+          state: '1/0' to enable/disable charging.
+        """
+        logging.debug('Inside charge({})...'.format(state))
+        cmd = 'chg ' + state
+        try:
+            self.cmd(cmd)
+        except DeviceError:
+            logging.exception('{} cmd failed'.format(cmd))
+
+    def get_battery_level(self):
+        """Get the battery charge level.
+
+        Returns:
+            charge percentage string.
+
+        Raises:
+            DeviceError: GetBatt response error.
+        """
+        response = self.cmd('GetBatt')
+        for line in response[0]:
+            if line.find('Batt:') > -1:
+                # Response if in this format '<messageID> Batt: <percentage>'
+                return line.split()[2]
+        raise DeviceError('Battery Level not found in GetBatt response')
+
+    def get_gas_gauge_current(self):
+        """Get the Gauge current value.
+
+        Returns:
+            Float value with the info
+
+        Raises:
+            DeviceError: I2CRead response error.
+        """
+        response = self.cmd('I2CRead 2 0x29')
+        for line in response[0]:
+            if line.find('value') > -1:
+                return float.fromhex(line.split()[6].replace(',', ''))
+        raise DeviceError('Current Level not found in I2CRead response')
+
+    def get_gas_gauge_voltage(self):
+        """Get the Gauge voltage value.
+
+        Returns:
+            Float value with the info
+
+        Raises:
+            DeviceError: I2CRead response error.
+        """
+        response = self.cmd('I2CRead 2 0x2A')
+        for line in response[0]:
+            if line.find('value') > -1:
+                return float.fromhex(line.split()[6].replace(',', ''))
+        raise DeviceError('Voltage Level not found in I2CRead response')
+
+    def reset(self, wait=5):
+        """Resetting the device."""
+        logging.debug('Inside reset()...')
+        self.power('Off')
+        self.wait(wait)
+        self.power('On')
+
+    def close(self):
+        if not self.device_closed:
+            self.connection_handle.close()
+            self.device_closed = True
+            if not self.set_log:
+                logging.flush_log()
+
+    def get_serial_log(self):
+        """Retrieve the logs from connection handle."""
+        return self.connection_handle.get_all_log()
+
+    def factory_reset(self):
+        """Erase paired device(s) (bond) data and reboot device."""
+        cmd = 'FactoryReset 1'
+        self.send(cmd)
+        self.wait(5)
+        self.reconnect()
+
+    def reboot(self, reconnect=10, retry_timer=30):
+        """Rebooting the device.
+
+        Args:
+            reconnect: reconnect attempts after reboot, None for no reconnect.
+            retry_timer: wait time in seconds before next connect retry.
+
+        Returns:
+            True if successfully reboot or reconnect.
+        """
+        logging.debug('Inside reboot()...')
+        self.panic()
+        if not reconnect:
+            return True
+        ini_time = time.time()
+        message = 'waiting for {} to shutdown'.format(self.serial_number)
+        logging.info(message)
+        while True:
+            alive = self.connection_handle.is_port_alive()
+            if not alive:
+                logging.info('rebooted')
+                break
+            if time.time() - ini_time > 60:
+                logging.info('Shutdown timeouted')
+                break
+            time.sleep(0.5)
+        return self.reconnect(reconnect, retry_timer)
+
+    def reconnect(self, iterations=30, retry_timer=20):
+        """Reconnect to the device.
+
+        Args:
+            iterations: Number of retry iterations.
+            retry_timer: wait time in seconds before next connect retry.
+
+        Returns:
+            True if reconnect to the device successfully.
+
+        Raises:
+            DeviceError: Failed to reconnect.
+        """
+        logging.debug('Inside reconnect()...')
+        for i in range(iterations):
+            try:
+                # port might be changed, refresh the port list.
+                self.get_device_ports(self.serial_number)
+                message = 'commander_port: {}, log_port: {}'.format(
+                    self.commander_port, self.log_port)
+                logging.info(message)
+                self.connection_handle.refresh_port_connection(
+                    self.commander_port)
+                # Sometimes there might be sfome delay when commander is
+                # functioning.
+                self.wait_for_commander()
+                return True
+            except Exception as e:  # pylint: disable=broad-except
+                message = 'Fail to connect {} times due to {}'.format(
+                    i + 1, e)
+                logging.warning(message)
+                # self.close()
+                time.sleep(retry_timer)
+        raise DeviceError('Cannot reconnect to %s with %d attempts.',
+                          self.commander_port, iterations)
+
+    @retry(Exception, tries=4, delay=1, backoff=2)
+    def wait_for_commander(self):
+        """Wait for commander to function.
+
+        Returns:
+            True if commander worked.
+
+        Raises:
+            DeviceError: Failed to bring up commander.
+        """
+        # self.Flush()
+        result = self.cmd('menu')
+        if result:
+            return True
+        else:
+            raise DeviceError('Cannot start commander.')
+
+    def wait(self, timeout=1):
+        """Wait for the device."""
+        logging.debug('Inside wait()...')
+        time.sleep(timeout)
+
+    def led(self, cmd):
+        """LED control of the device."""
+        message = 'Inside led({})...'.format(cmd)
+        logging.debug(message)
+        cmd = 'EventUsrLeds' + cmd
+        try:
+            return self.cmd(_evt_hex(cmd))
+        except DeviceError:
+            logging.exception('LED cmd failed')
+
+    def volume(self, key, times=1):
+        """Volume Control. (Down/Up).
+
+        Args:
+            key: Down --Decrease a volume.
+                 Up --Increase a volume.
+            times: Simulate number of swipes.
+
+        Returns:
+            (int) Volume level.
+
+        Raises:
+            DeviceError
+        """
+        message = 'Inside volume({}, {})...'.format(key, times)
+        logging.debug(message)
+        updown = {
+            'Up': '1',
+            'Down': '0',
+        }
+        cmds = ['ButtonSwipe ' + updown[key]] * times
+        logging.info(cmds)
+        try:
+            self.cmd(cmds)
+            for line in self.cmd_log:
+                if isinstance(line, dict):
+                    if 'id' in line and line['id'] == 'VOLUME_CHANGE':
+                        if 'data' in line and line['data']:
+                            return int(line['data'])
+        except DeviceError:
+            logging.exception('ButtonSwipe cmd failed')
+
+    def menu(self):
+        """Return a list of supported commands."""
+        logging.debug('Inside menu()...')
+        try:
+            return self.cmd('menu')
+        except DeviceError:
+            logging.exception('menu cmd failed')
+
+    def set_ohd(self, mode='AUTO'):
+        """Manually set the OHD status and override auto-detection.
+
+        Args:
+            mode: ON --OHD manual mode with on-ear state.
+                  OFF --OHD manual mode with off-ear state.
+                  AUTO --OHD auto-detection mode.
+        Raises:
+            DeviceError: OHD Command failure.
+        """
+        logging.debug('Inside set_ohd()...')
+        try:
+            if mode != 'AUTO':
+                # Set up OHD manual mode
+                self.cmd('Test 14 0 2 1')
+                if mode == 'ON':
+                    # Detects on-ear
+                    self.cmd('Test 14 0 2 1 0x3')
+                else:
+                    # Detects off-ear
+                    self.cmd('Test 14 0 2 1 0x0')
+            else:
+                # Default mode (auto detect.)
+                self.cmd('Test 14 0 2 0')
+        except DeviceError:
+            logging.exception('OHD cmd failed')
+
+    def music_control_events(self, cmd, regexp=None, wait=.5):
+        """Sends the EvtHex to control media player.
+
+        Arguments:
+            cmd: the command to perform.
+            regexp: Optional pattern to validate the event logs.
+
+        Returns:
+            Boolean: True if the command triggers the correct events on the
+                     device, False otherwise.
+
+        # TODO(nviboonchan:) Add more supported commands.
+        Supported commands:
+            'PlayPause'
+            'VolumeUp'
+            'VolumeDown',
+        """
+        cmd_regexp = {
+            # Play/ Pause would need to pass the regexp argument since it's
+            # sending the same event but returns different responses depending
+            # on the device state.
+            'VolumeUp': apollo_log_regex.VOLUP_REGEX,
+            'VolumeDown': apollo_log_regex.VOLDOWN_REGEX,
+        }
+        if not regexp:
+            if cmd not in cmd_regexp:
+                logmsg = 'Expected pattern is not defined for event %s' % cmd
+                logging.exception(logmsg)
+                return False
+            regexp = cmd_regexp[cmd]
+        self.cmd('EvtHex %s' % apollo_sink_events.SINK_EVENTS['EventUsr' + cmd],
+                 wait=wait)
+        for line in self.cmd_log:
+            if isinstance(line, str):
+                if re.search(regexp, line):
+                    return True
+            elif isinstance(line, dict):
+                if line.get('id', None) == 'AVRCP_PLAY_STATUS_CHANGE':
+                    return True
+        return False
+
+    def avrcp(self, cmd):
+        """sends the Audio/Video Remote Control Profile (avrcp) control command.
+
+        Supported commands:
+            'PlayPause'
+            'Stop'
+            'SkipForward',
+            'SkipBackward',
+            'FastForwardPress',
+            'FastForwardRelease',
+            'RewindPress',
+            'RewindRelease',
+            'ShuffleOff',
+            'ShuffleAllTrack',
+            'ShuffleGroup',
+            'RepeatOff':,
+            'RepeatSingleTrack',
+            'RepeatAllTrack',
+            'RepeatGroup',
+            'Play',
+            'Pause',
+            'ToggleActive',
+            'NextGroupPress',
+            'PreviousGroupPress',
+            'NextGroupRelease',
+            'PreviousGroupRelease',
+
+        Args:
+            cmd: The avrcp command.
+
+        """
+        cmd = 'EventUsrAvrcp' + cmd
+        logging.debug(cmd)
+        try:
+            self.cmd(_evt_hex(cmd))
+        except DeviceError:
+            logging.exception('avrcp cmd failed')
+
+    def enable_log(self, levels=None):
+        """Enable specified logs."""
+        logging.debug('Inside enable_log()...')
+        if levels is None:
+            levels = ['ALL']
+        masks = hex(
+            sum([int(apollo_sink_events.LOG_FEATURES[x], 16) for x in levels]))
+        try:
+            self.cmd('LogOff %s' % apollo_sink_events.LOG_FEATURES['ALL'])
+            return self.cmd('LogOn %s' % masks)
+        except DeviceError:
+            logging.exception('Enable log failed')
+
+    def disable_log(self, levels=None):
+        """Disable specified logs."""
+        logging.debug('Inside disable_log()...')
+        if levels is None:
+            levels = ['ALL']
+        masks = hex(
+            sum([int(apollo_sink_events.LOG_FEATURES[x], 16) for x in levels]))
+        try:
+            self.cmd('LogOn %s' % apollo_sink_events.LOG_FEATURES['ALL'])
+            return self.cmd('LogOff %s' % masks)
+        except DeviceError:
+            logging.exception('Disable log failed')
+
+    def write_to_flash(self, file_name=None):
+        """Write file to external flash.
+
+        Note: Assume pv is installed. If not, install it by
+              'apt-get install pv'.
+
+        Args:
+            file_name: Full path file name.
+
+        Returns:
+            Boolean: True if write to partition is successful. False otherwise.
+        """
+        logging.debug('Inside write_to_flash()...')
+        if not os.path.isfile(file_name):
+            message = 'DFU file %s not found.'.format(file_name)
+            logging.exception(message)
+            return False
+        logging.info(
+            'Write file {} to external flash partition ...'.format(file_name))
+        image_size = os.path.getsize(file_name)
+        logging.info('image size is {}'.format(image_size))
+        results = self.cmd('Ota {}'.format(image_size), wait=3)
+        logging.debug('Result of Ota command' + str(results))
+        if any(OTA_VERIFICATION_FAILED in result for result in results[0]):
+            return False
+        # finished cmd Ota
+        if (any('OTA_ERASE_PARTITION' in result.values() for result in
+                results[0] if
+                isinstance(result, dict)) or
+                any('OTA erasd ptns' in result for result in results[0])):
+            try:
+                # -B: buffer size in bytes, -L rate-limit in B/s.
+                subcmd = ('pv --force -B 160 -L 10000 %s > %s' %
+                          (file_name, self.commander_port))
+                logging.info(subcmd)
+                p = subprocess.Popen(subcmd, stdout=subprocess.PIPE, shell=True)
+            except OSError:
+                logging.exception(
+                    'pv not installed, please install by: apt-get install pv')
+                return False
+            try:
+                res = self.read_serial_port(read_until=6)
+            except DeviceError:
+                logging.exception('Unable to read the device port')
+                return False
+            for line in res:
+                if isinstance(line, dict):
+                    logging.info(line)
+                else:
+                    match = re.search(OTA_RECEIVE_CSR_REGEX, line)
+                    if match:
+                        logging.info(
+                            'OTA Image received. Transfer is in progress...')
+                        # Polling during a transfer could miss the final message
+                        # when the device reboots, so we wait until the transfer
+                        # completes.
+                        p.wait()
+                        return True
+            # No image transfer in progress.
+            return False
+        else:
+            return False
+
+    def flash_from_file(self, file_name, reconnect=True):
+        """Upgrade Apollo from an image file.
+
+        Args:
+            file_name: DFU file name. eg. /google/data/ro/teams/wearables/
+                       apollo/ota/master/v76/apollo.dfu
+            reconnect: True to reconnect the device after flashing
+        Returns:
+            Bool: True if the upgrade is successful. False otherwise.
+        """
+        logging.debug('Inside flash_from_file()...')
+        if self.write_to_flash(file_name):
+            logging.info('OTA image transfer is completed')
+            if reconnect:
+                # Transfer is completed; waiting for the device to reboot.
+                logging.info('wait to make sure old connection disappears.')
+                self.wait_for_reset(timeout=150)
+                self.reconnect()
+                logging.info('BudsDevice reboots successfully after OTA.')
+            return True
+
+    def open_mic(self, post_delay=5):
+        """Open Microphone on the device using EvtHex command.
+
+        Args:
+            post_delay: time delay in seconds after the microphone is opened.
+
+        Returns:
+            Returns True or False based on whether the command was executed.
+        """
+        logging.debug('Inside open_mic()...')
+        success, _ = self._cmd('Voicecmd 1', post_delay)
+        return success
+
+    def close_mic(self, post_delay=5):
+        """Close Microphone on the device using EvtHex command.
+
+        Args:
+            post_delay: time delay in seconds after the microphone is closed.
+
+        Returns:
+            Returns true or false based on whether the command was executed.
+        """
+        logging.debug('Inside close_mic()...')
+        success, _ = self._cmd('Voicecmd 0', post_delay)
+        return success
+
+    def touch_key_press_event(self, wait=1):
+        """send key press event command.
+
+        Args:
+            wait: Inject delay after key press to simulate real touch event .
+        """
+        logging.debug('Inside KeyPress()...')
+        self._cmd('Touch 6')
+        self.wait(wait)
+
+    def touch_tap_event(self, wait_if_pause=10):
+        """send key release event after key press to simulate single tap.
+
+        Args:
+            wait_if_pause: Inject delay after avrcp pause was detected.
+
+        Returns:
+            Returns False if avrcp play orp ause not detected else True.
+        """
+        logging.debug('Inside Touch Tap event()...')
+        self._cmd('Touch 4')
+        for line in self.cmd_log:
+            if 'avrcp play' in line:
+                logging.info('avrcp play detected')
+                return True
+            if 'avrcp pause' in line:
+                logging.info('avrcp pause detected')
+                self.wait(wait_if_pause)
+                return True
+        return False
+
+    def touch_hold_up_event(self):
+        """Open Microphone on the device using touch hold up command.
+
+        Returns:
+            Returns True or False based on whether the command was executed.
+        """
+        logging.debug('Inside open_mic()...')
+        self._cmd('Touch 3')
+        for line in self.cmd_log:
+            if 'Button 1 LONG_BEGIN' in line:
+                logging.info('mic open success')
+                return True
+        return False
+
+    def touch_hold_down_event(self):
+        """Close Microphone on the device using touch hold down command.
+
+        Returns:
+            Returns true or false based on whether the command was executed.
+        """
+        logging.debug('Inside close_mic()...')
+        self._cmd('Touch 8')
+        for line in self.cmd_log:
+            if 'Button 1 LONG_END' in line:
+                logging.info('mic close success')
+                return True
+        return False
+
+    def tap(self):
+        """Performs a Tap gesture."""
+        logging.debug('Inside tap()')
+        self.cmd('ButtonTap 0')
+
+    def hold(self, duration):
+        """Tap and hold a button.
+
+        Args:
+            duration: (int) duration in milliseconds.
+        """
+        logging.debug('Inside hold()')
+        self.cmd('ButtonHold ' + str(duration))
+
+    def swipe(self, direction):
+        """Perform a swipe gesture.
+
+        Args:
+            direction: (int) swipe direction 1 forward, 0 backward.
+        """
+        logging.debug('Inside swipe()')
+        self.cmd('ButtonSwipe ' + direction)
+
+    def get_pskey(self, key):
+        """Fetch value from persistent store."""
+        try:
+            cmd = 'GetPSHex ' + apollo_sink_events.PSKEY[key]
+        except KeyError:
+            raise DeviceError('PS Key: %s not found' % key)
+        pskey = ''
+        try:
+            ret = self.cmd(cmd)
+            for result in ret[0]:
+                if not re.search(r'pskey', result.lower()) and LOG_REGEX.match(
+                        result):
+                    # values are broken into words separated by spaces.
+                    pskey += LOG_REGEX.match(result).group('msg').replace(' ',
+                                                                          '')
+                else:
+                    continue
+        except DeviceError:
+            logging.exception('GetPSHex cmd failed')
+        return pskey
+
+    def get_version(self):
+        """Return a device version information.
+
+        Note: Version information is obtained from the firmware loader. Old
+        information is lost when firmware is updated.
+        Returns:
+            A dictionary of device version info. eg.
+            {
+                'Fw Build': '73',
+                'OTA Status': 'No OTA performed before this boot',
+            }
+
+        """
+        logging.debug('Inside get_version()...')
+        success, result = self._cmd('GetVer', throw_error=False)
+        status = {}
+        if result:
+            for line in result:
+                if isinstance(line, dict):
+                    status['build'] = line['vm_build_number']
+                    status['psoc_build'] = line['psoc_version']
+                    status['debug'] = line['csr_fw_debug_build']
+                    status['Fw Build Label'] = line['build_label']
+                    if 'last_ota_status' in line.keys():
+                        # Optional value in the proto response
+                        status['OTA Status'] = line['last_ota_status']
+                    else:
+                        status['OTA Status'] = 'No info'
+        return success, status
+
+    def get_earcon_version(self):
+        """Return a device Earson version information.
+
+        Returns:
+            Boolean:  True if success, False otherwise.
+            String: Earon Version e.g. 7001 0201 6100 0000
+
+        """
+        # TODO(nviboonchan): Earcon version format would be changed in the
+        # future.
+        logging.debug('Inside get_earcon_version()...')
+        result = self.get_pskey('PSKEY_EARCON_VERSION')
+        if result:
+            return True, result
+        else:
+            return False, None
+
+    def get_bt_status(self):
+        """Return a device bluetooth connection information.
+
+        Returns:
+            A dictionary of bluetooth status. eg.
+            {
+                'Comp. App': 'FALSE',
+               'HFP (pri.)', 'FALSE',
+               'HFP (sec.)': 'FALSE',
+               'A2DP (pri.)': 'FALSE',
+               'A2DP (sec.)': 'FALSE',
+               'A2DP disconnects': '3',
+               'A2DP Role (pri.)': 'slave',
+               'A2DP RSSI (pri.)': '-Touch'
+            }
+        """
+        logging.debug('Inside get_bt_status()...')
+        return self._get_status('GetBTStatus')
+
+    def get_conn_devices(self):
+        """Gets the BT connected devices.
+
+        Returns:
+            A dictionary of BT connected devices. eg.
+            {
+                'HFP Pri': 'xxxx',
+                'HFP Sec': 'xxxx',
+                'A2DP Pri': 'xxxx',
+                'A2DP Sec': 'xxxx',
+                'RFCOMM devices': 'xxxx',
+                'CTRL': 'xxxx',
+                'AUDIO': 'None',
+                'DEBUG': 'None',
+                'TRANS': 'None'
+             }
+
+        Raises:
+            ResponseError: If unexpected response occurs.
+        """
+        response_regex = re.compile('[0-9]+ .+: ')
+        connected_status = {}
+        response = self.cmd('GetConnDevices')
+        if not response:
+            raise ResponseError(
+                'No response returned by GetConnDevices command')
+        for line in response[0]:
+            if response_regex.search(line):
+                profile, value = line[line.find(' '):].split(':', 1)
+                connected_status[profile] = value
+        if not connected_status:
+            raise ResponseError('No BT Profile Status in response.')
+        return connected_status
+
+    def _get_status(self, cmd):
+        """Return a device status information."""
+        status = {}
+        try:
+            results = self.cmd(cmd)
+        except DeviceError as ex:
+            # logging.exception('{} cmd failed'.format(cmd))
+            logging.warning('Failed to get device status info.')
+            raise ex
+        results = results[0]
+        for result in results:
+            match = re.match(STATUS_REGEX, result)
+            if match:
+                key = match.group('key')
+                value = match.group('value')
+                status.update({key: value})
+        return status
+
+    def is_streaming(self):
+        """Returns the music streaming status on Apollo.
+
+        Returns:
+            Boolean: True if device is streaming music. False otherwise.
+        """
+
+        status = self.cmd('GetDSPStatus')
+        if any('active feature mask: 0' in log for log in
+               status[0]):
+            return False
+        elif any('active feature mask: 2' in log for log in
+                 status[0]):
+            return True
+        else:
+            return False
+
+    def is_in_call(self):
+        """Returns the phone call status on Apollo.
+
+        Returns:
+            Boolean: True if device has incoming call. False otherwise.
+        """
+
+        status = self.cmd('GetDSPStatus')
+        if not any('Inc' or 'out' in log for log in status[0]):
+            return False
+        return True
+
+    def is_device_limbo(self):
+        """Check if device is in Limbo state.
+
+        Returns:
+            Boolean: True if device is in limbo state, False otherwise.
+        """
+        device_state = self.get_device_state()
+        logging.info('BudsDevice "{}" state {}'.format(self.serial_number,
+                                                       device_state))
+        return device_state == 'limbo'
+
+    def get_device_state(self):
+        """Get state of the device.
+
+        Returns:
+            String representing the device state.
+
+        Raises:
+            DeviceError: If command fails.
+        """
+        _, status = self._cmd('GetDSPStatus')
+        for stat in status:
+            if isinstance(stat, dict):
+                logging.info(stat)
+                return stat['sink_state'].lower()
+        raise DeviceError('BudsDevice state not found in GetDSPStatus.')
+
+    def set_stay_connected(self, value):
+        """Run command to set the value for SetAlwaysConnected.
+
+        Args:
+            value: (int) 1 to keep connection engages at all time,
+                         0 for restoring
+        Returns:
+            the set state of type int (0 or 1) or None if not applicable
+        """
+
+        if int(self.version) >= 1663:
+            self._cmd('SetAlwaysConnected {}'.format(value))
+            logging.info('Setting sleep on idle to {}'.format(value))
+            return value
+
+    def get_codec(self):
+        """Get device's current audio codec.
+
+        Returns:
+            String representing the audio codec.
+
+        Raises:
+            DeviceError: If command fails.
+        """
+        success, status = self._cmd('get_codec')
+        logging.info('---------------------------------------')
+        logging.info(status)
+        logging.info('---------------------------------------')
+        if success:
+            for line in status:
+                if isinstance(line, dict):
+                    logging.info('Codec found: %s'.format(line['codec']))
+                    return line['codec']
+        raise DeviceError('BudsDevice state not found in get_codec.')
+
+    def crash_dump_detection(self):
+        """Reads crash dump determines if a crash is detected.
+
+        Returns:
+            True if crash detection is supported and if a new crash is found.
+            False otherwise.
+        """
+        # Detects if crashdump output is new
+        new_crash_regex = r'new crash = ([01]+)'
+        # filter crashdump for just the trace
+        crash_stack_regex = r'BASIC(.*)\n[\d]+ APP_STACK(.*)\n'
+        # remove time stamp commander output
+        timestamp_remover_regex = '\n[\\d]+ '
+
+        logging.debug('Inside IsCrashDumpDetection()...')
+        cmd_return = self.cmd('CrashDump', wait=1)
+        crash_dump_str = '\n'.join(cmd_return[0])
+        logging.info(crash_dump_str)
+        try:
+            # check for crash
+            match = re.search(new_crash_regex, crash_dump_str)
+            if match is not None:
+                if match.groups()[0] == '1':  # new crash found
+                    logging.error('Crash detected!!')
+                    basic, app_stack = re.search(crash_stack_regex,
+                                                 crash_dump_str,
+                                                 re.DOTALL).groups()
+                    # remove time stamps from capture
+                    basic = re.sub(timestamp_remover_regex, '', basic)
+                    app_stack = re.sub(timestamp_remover_regex, '', app_stack)
+                    # write to log
+                    # pylint: disable=bad-whitespace
+                    logging.info(
+                        '\n&270d = %s\n&270e = %s\n' % (basic, app_stack))
+                    # pylint: enable=bad-whitespace
+                    return True
+                else:  # no new crash
+                    logging.info('No crash detected')
+                    return False
+        except AttributeError:
+            logging.exception(
+                'Apollo crash dump output is not in expected format')
+            raise DeviceError('Apollo crash dump not in expected format')
+
+    @property
+    def version(self):
+        """Application version.
+
+        Returns:
+            (String) Firmware version.
+        """
+        _, result = self.get_version()
+        return result['build']
+
+    @property
+    def bluetooth_address(self):
+        """Bluetooth MAC address.
+
+        Returns:
+            a string representing 48bit BT MAC address in Hex.
+
+        Raises:
+            DeviceError: Unable to find BT Address
+        """
+        results = self.get_pskey('PSKEY_BDADDR')
+        if not results:
+            raise DeviceError('Unable to find BT Address')
+        logging.info(results)
+        # Bluetooth lower address part, upper address part and non-significant
+        # address part.
+        bt_lap = results[2:8]
+        bt_uap = results[10:12]
+        bt_nap = results[12:16]
+        results = bt_nap + bt_uap + bt_lap
+
+        return ':'.join(map(''.join, zip(*[iter(results)] * 2))).upper()
+
+    @property
+    def device_name(self):
+        """Device Friendly Name.
+
+        Returns:
+            a string representing device friendly name.
+
+        Raises:
+            DeviceError: Unable to find a wearable device name.
+        """
+        result = self.get_pskey('PSKEY_DEVICE_NAME')
+        if not result:
+            raise DeviceError('Unable to find BudsDevice Name')
+        logging.info(_to_ascii(result))
+        return _to_ascii(result)
+
+    @property
+    def stay_connected(self):
+        return self.stay_connected_state
+
+    @stay_connected.setter
+    def stay_connected(self, value):
+        self.stay_connected_state = self.set_stay_connected(value)
+
+    def read_serial_port(self, read_until=None):
+        """Read serial port until specified read_until value in seconds."""
+        # use default read_until value if not specified
+        if read_until:
+            time.sleep(read_until)
+        res = self.connection_handle.read()
+        buf_read = []
+        for line in res:
+            if apollo_log_decoder.is_automation_protobuf(line):
+                decoded = apollo_log_decoder.decode(line)
+                buf_read.append(decoded)
+            else:
+                buf_read.append(line)
+        return buf_read
+
+    def wait_for_reset(self, timeout=30):
+        """waits for the device to reset by check serial enumeration.
+
+        Checks every .5 seconds for the port.
+
+        Args:
+            timeout: The max time to wait for the device to disappear.
+
+        Returns:
+            Bool: True if the device reset was detected. False if not.
+        """
+        start_time = time.time()
+        while True:
+            res = subprocess.Popen(['ls', self.commander_port],
+                                   stdout=subprocess.PIPE,
+                                   stderr=subprocess.PIPE)
+            res.communicate()
+            if res.returncode != 0:
+                logging.info('BudsDevice reset detected')
+                return True
+            elif (time.time() - start_time) > timeout:
+                logging.info('Timeout waiting for device to reset.....')
+                return False
+            else:
+                time.sleep(.5)
+
+    def set_in_case(self, reconnect=True):
+        """Simulates setting apollo in case and wait for device to come up.
+
+        Args:
+            reconnect: bool - if method should block until reconnect
+        """
+        logging.info('Setting device in case')
+        out = self.send('Pow 2')
+        for i in out:
+            if 'No OTA wakeup condition' in i:
+                logging.info('No wake up condition.')
+            elif 'STM Wakeup 10s' in i:
+                logging.info('Wake up condition detected.')
+        if reconnect:
+            self.wait_for_reset()
+            self.reconnect()
+
+
+class ParentDevice(BudsDevice):
+    """Wrapper object for Device that addresses b10 recovery and build flashing.
+
+    Recovery mechanism:
+    In case a serial connection could not be established to b10, the recovery
+    mechanism is activated  ONLY if'recover_device' is set to 'true' and
+    b29_serial is defined in config file. This helps recover a device that has a
+    bad build installed.
+    """
+
+    def __init__(self, serial_number, recover_device=False, b29_serial=None):
+        # if recover device parameter is supplied and there is an error in
+        # instantiating B10 try to recover device instantiating b10 has to fail
+        # at most $tries_before_recovery time before initiating a recovery
+        # try to run the recovery at most $recovery_times before raising Error
+        # after the first recovery attempt failure try to reset b29 each
+        # iteration
+        self.b29_device = None
+        if recover_device:
+            if b29_serial is None:
+                logging.error('B29 serial not defined')
+                raise Error(
+                    'Recovery failed because "b29_serial" definition not '
+                    'present in device manifest file')
+            else:
+                self.b29_device = B29Device(b29_serial)
+            tries_before_recovery = 5
+            recovery_tries = 5
+            for attempt in range(tries_before_recovery):
+                try:
+                    # build crash symptoms varies based on the nature of the
+                    # crash connectError is thrown if the device never shows up
+                    # in /dev/ sometimes device shows and can connect but
+                    # sending commands fails or crashes apollo in that case,
+                    # DeviceError is thrown
+                    super().__init__(serial_number, commander_port=None,
+                                     log_port=None, serial_logger=None)
+                    break
+                except (ConnectError, DeviceError) as ex:
+                    logging.warning(
+                        'Error initializing apollo object - # of attempt '
+                        'left : %d' % (tries_before_recovery - attempt - 1))
+                    if attempt + 1 >= tries_before_recovery:
+                        logging.error(
+                            'Retries exhausted - now attempting to restore '
+                            'golden image')
+                        for recovery_attempt in range(recovery_tries):
+                            if not self.b29_device.restore_golden_image():
+                                logging.error('Recovery failed - retrying...')
+                                self.b29_device.reset_charger()
+                                continue
+                            # try to instantiate now
+                            try:
+                                super().__init__(serial_number,
+                                                 commander_port=None,
+                                                 log_port=None,
+                                                 serial_logger=None)
+                                break
+                            except (ConnectError, DeviceError):
+                                if recovery_attempt == recovery_tries - 1:
+                                    raise Error(
+                                        'Recovery failed - ensure that there '
+                                        'is no mismatching serial numbers of '
+                                        'b29 and b10 is specified in config')
+                                else:
+                                    logging.warning(
+                                        'Recovery attempt failed - retrying...')
+                    time.sleep(2)
+        else:
+            super().__init__(serial_number, commander_port=None, log_port=None,
+                             serial_logger=None)
+        # set this to prevent sleep
+        self.set_stay_connected(1)
+
+    def get_info(self):
+        information_dictionary = {}
+        information_dictionary['type'] = self.dut_type
+        information_dictionary['serial'] = self.serial_number
+        information_dictionary['log port'] = self.log_port
+        information_dictionary['command port'] = self.commander_port
+        information_dictionary['bluetooth address'] = self.bluetooth_address
+        success, build_dict = self.get_version()
+        information_dictionary['build'] = build_dict
+        # Extract the build number as a separate key. Useful for BigQuery.
+        information_dictionary['firmware build number'] = build_dict.get(
+            'build', '9999')
+        information_dictionary['name'] = self.device_name
+        if self.b29_device:
+            information_dictionary['b29 serial'] = self.b29_device.serial
+            information_dictionary['b29 firmware'] = self.b29_device.fw_version
+            information_dictionary['b29 commander port'] = self.b29_device.port
+            information_dictionary[
+                'b29 app version'] = self.b29_device.app_version
+        return information_dictionary
+
+    def setup(self, **kwargs):
+        """
+
+        Args:
+            apollo_build: if specified, will be used in flashing the device to
+                          that build prior to running any of the tests. If not
+                          specified flashing is skipped.
+        """
+        if 'apollo_build' in kwargs and kwargs['apollo_build'] is not None:
+            build = kwargs['apollo_build']
+            X20_REGEX = re.compile(r'/google/data/')
+            if not os.path.exists(build) or os.stat(build).st_size == 0:
+                # if x20 path, retry on file-not-found error or if file size is
+                # zero b/c X20 path does not update immediately
+                if X20_REGEX.match(build):
+                    for i in range(20):
+                        # wait until file exists and size is > 0 w/ 6 second
+                        # interval on retry
+                        if os.path.exists(build) and os.stat(build).st_size > 0:
+                            break
+
+                        if i == 19:
+                            logging.error('Build path (%s) does not exist or '
+                                          'file size is 0 - aborted' % build)
+
+                            raise Error('Specified build path (%s) does not '
+                                        'exist or file size is 0' % build)
+                        else:
+                            logging.warning('Build path (%s) does not exist or '
+                                            'file size is 0 - retrying...' %
+                                            build)
+                            time.sleep(6)
+                else:
+                    raise Error('Specified build path (%s) does not exist or '
+                                'file size is 0' % build)
+                self.flash_from_file(file_name=build, reconnect=True)
+        else:
+            logging.info('Not flashing apollo.')
+
+    def teardown(self, **kwargs):
+        self.close()
+
+
+def _evt_hex(cmd):
+    return 'EvtHex ' + apollo_sink_events.SINK_EVENTS[cmd]
+
+
+def _to_ascii(orig):
+    # Returned value need to be byte swapped. Remove last octet if it is 0.
+    result = _byte_swap(orig)
+    result = result[:-2] if result[-2:] == '00' else result
+    return bytearray.fromhex(result).decode()
+
+
+def _byte_swap(orig):
+    """Simple function to swap bytes order.
+
+    Args:
+        orig: original string
+
+    Returns:
+        a string with bytes swapped.
+        eg. orig = '6557276920736952006f'.
+        After swap, return '57656927732052696f00'
+    """
+    return ''.join(
+        sum([(c, d, a, b) for a, b, c, d in zip(*[iter(orig)] * 4)], ()))
diff --git a/acts/framework/acts/controllers/buds_lib/b29_lib.py b/acts/framework/acts/controllers/buds_lib/b29_lib.py
new file mode 100644
index 0000000..bb4ea7d
--- /dev/null
+++ b/acts/framework/acts/controllers/buds_lib/b29_lib.py
@@ -0,0 +1,221 @@
+#!/usr/bin/env python3
+#
+#   Copyright 2018 - The Android Open Source Project
+#
+#   Licensed under the Apache License, Version 2.0 (the "License");
+#   you may not use this file except in compliance with the License.
+#   You may obtain a copy of the License at
+#
+#       http://www.apache.org/licenses/LICENSE-2.0
+#
+#   Unless required by applicable law or agreed to in writing, software
+#   distributed under the License is distributed on an "AS IS" BASIS,
+#   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#   See the License for the specific language governing permissions and
+#   limitations under the License.
+
+"""
+Class definition of B29 device for controlling the device.
+
+B29 is an engineering device with serial capabilities. It is almost like
+b20 except it has additional features that allow sending commands
+to b10 via one-wire and to pull logs from b10 via one-wire.
+
+Please see https://docs.google.com/document/d/17yJeJRNWxv5E9
+fBvw0sXkgwCBkshU_l4SxWkKgAxVmk/edit for details about available operations.
+"""
+
+import os
+import re
+import time
+
+from acts import tracelogger
+from acts import utils
+from logging import Logger
+
+logging = tracelogger.TakoTraceLogger(Logger(__file__))
+DEVICE_REGEX = (
+    r'_(?P<device_serial>[A-Z0-9]+)-(?P<interface>\w+)\s->\s'
+    r'(\.\./){2}(?P<port>\w+)'
+)
+# TODO: automate getting the latest version from x20
+DEBUG_BRIDGE = ('/google/data/ro/teams/wearables/apollo/ota/jenkins-presubmit/'
+                'ovyalov/master/apollo-sw/CL14060_v2-build13686/v13686/'
+                'automation/apollo_debug_bridge/linux2/apollo_debug_bridge')
+B29_CHIP = 'Cypress_Semiconductor_USBUART'
+
+
+# TODO:
+# as the need arises, additional functionalities of debug_bridge should be
+# integrated
+# TODO:
+# https://docs.google.com/document/d/17yJeJRNWxv5E9fBvw0sXkgwCBkshU_
+# l4SxWkKgAxVmk/edit
+
+class B29Error(Exception):
+    """Module Level Error."""
+
+
+def get_b29_devices():
+    """ Get all available B29 devices.
+
+    Returns:
+      (list) A list of available devices (ex: ['/dev/ttyACM4',...]) or empty
+      list if none found
+    """
+    devices = []
+    result = os.popen('ls -l /dev/serial/by-id/*%s*' % B29_CHIP).read()
+    for line in result.splitlines():
+        match = re.search(DEVICE_REGEX, line)
+        device_serial = match.group('device_serial')
+        log_port = None
+        commander_port = '/dev/' + match.group('port')
+        device = {
+            'commander_port': commander_port,
+            'log_port': log_port,
+            'serial_number': device_serial
+        }
+        devices.append(device)
+    return devices
+
+
+class B29Device(object):
+    """Class to control B29 device."""
+
+    def __init__(self, b29_serial):
+        """ Class to control B29 device
+        Args: String type of serial number (ex: 'D96045152F121B00'
+        """
+        self.serial = b29_serial
+        b29_port = [d['commander_port'] for d in get_b29_devices() if
+                    d['serial_number'] == b29_serial]
+        if not b29_port:
+            logging.error("unable to find b29 with serial number %s" %
+                          b29_serial)
+            raise B29Error(
+                "Recovery failed because b29_serial specified in device "
+                "manifest file is not found or invalid")
+        self.port = b29_port[0]
+        self.ping_match = {'psoc': r'Pings: tx=[\d]* rx=[1-9][0-9]',
+                           'csr': r'count=100, sent=[\d]*, received=[1-9][0-9]',
+                           'charger': r'Pings: tx=[\d]* rx=[1-9][0-9]'}
+        self.fw_version = self._get_version('fw')
+        self.app_version = self._get_version('app')
+
+    def _get_version(self, type='fw'):
+        """ Method to get version of B29
+        Returns:
+            String version if found (ex: '0006'), None otherwise
+        """
+        command = '--serial={}'.format(self.port)
+        debug_bridge_process = self._send_command(command=command)
+        if type == 'fw':
+            version_match = re.compile(r'CHARGER app version: version=([\d]*)')
+        elif type == 'app':
+            version_match = re.compile(r'APP VERSION: ([\d]*)')
+        version_str = self._parse_output_of_running_process(
+            debug_bridge_process, version_match)
+        debug_bridge_process.kill()
+        if version_str:
+            match = version_match.search(version_str)
+            version = match.groups()[0]
+            return version
+        return None
+
+    def _parse_output_of_running_process(self, subprocess, match, timeout=30):
+        """ Parses the logs from subprocess objects and checks to see if a
+        match is found within the allotted time
+        Args:
+            subprocess: object returned by _send_command (which is the same as
+            bject returned by subprocess.Popen()) match: regex match object
+            (what is returned by re.compile(r'<regex>') timeout: int - time to
+            keep retrying before bailing
+
+        """
+        start_time = time.time()
+        success_match = re.compile(match)
+        while start_time + timeout > time.time():
+            out = subprocess.stderr.readline()
+            if success_match.search(out):
+                return out
+            time.sleep(.5)
+        return False
+
+    def _send_command(self, command):
+        """ Send command to b29 using apollo debug bridge
+        Args:
+          command: The command for apollo debug to execute
+        Returns:
+          subprocess object
+        """
+        return utils.start_standing_subprocess(
+            '{} {} {}'.format(DEBUG_BRIDGE, '--rpc_port=-1', command),
+            shell=True)
+
+    def restore_golden_image(self):
+        """ Start a subprocess that calls the debug-bridge executable with
+        options that restores golden image of b10 attached to the b29. The
+        recovery restores the 'golden image' which is available in b10 partition
+         8. The process runs for 120 seconds which is adequate time for the
+         recovery to have completed.
+        """
+        # TODO:
+        # because we are accessing x20, we need to capture error resulting from
+        #  expired prodaccess and report it explicitly
+        # TODO:
+        # possibly file not found error?
+
+        # start the process, wait for two minutes and kill it
+        logging.info('Restoring golden image...')
+        command = '--serial=%s --debug_spi=dfu --sqif_partition=8' % self.port
+        debug_bridge_process = self._send_command(command=command)
+        success_match = re.compile('DFU on partition #8 successfully initiated')
+        if self._parse_output_of_running_process(debug_bridge_process,
+                                                 success_match):
+            logging.info('Golden image restored successfully')
+            debug_bridge_process.kill()
+            return True
+        logging.warning('Failed to restore golden image')
+        debug_bridge_process.kill()
+        return False
+
+    def ping_component(self, component, timeout=30):
+        """ Send ping to the specified component via B290
+        Args:
+            component = 'csr' or 'psoc' or 'charger'
+        Returns:
+            True if successful and False otherwise
+        """
+        if component not in ('csr', 'psoc', 'charger'):
+            raise B29Error('specified parameter for component is not valid')
+        logging.info('Pinging %s via B29...' % component)
+        command = '--serial={} --ping={}'.format(self.port, component)
+        debug_bridge_process = self._send_command(command=command)
+        if self._parse_output_of_running_process(debug_bridge_process,
+                                                 self.ping_match[component],
+                                                 timeout):
+            logging.info('Ping passes')
+            debug_bridge_process.kill()
+            return True
+        else:
+            logging.warning('Ping failed')
+            debug_bridge_process.kill()
+            return False
+
+    def reset_charger(self):
+        """ Send reset command to B29
+        Raises: TimeoutError (lib.utils.TimeoutError) if the device does not
+        come back within 120 seconds
+        """
+        # --charger_reset
+        if int(self.fw_version) >= 6:
+            logging.info('Resetting B29')
+            command = '--serial={} --charger_reset'.format(self.port)
+            reset_charger_process = self._send_command(command=command)
+            time.sleep(2)
+            reset_charger_process.kill()
+            logging.info('Waiting for B29 to become available..')
+            utils.wait_until(lambda: self.ping_component('charger'), 120)
+        else:
+            logging.warning('B20 firmware version %s does not support '
+                            'charger_reset argument' % self.fw_version)
diff --git a/acts/framework/acts/controllers/buds_lib/data_storage/__init__.py b/acts/framework/acts/controllers/buds_lib/data_storage/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/acts/framework/acts/controllers/buds_lib/data_storage/__init__.py
diff --git a/acts/framework/acts/controllers/buds_lib/data_storage/_sponge/SimpleXMLWriter.py b/acts/framework/acts/controllers/buds_lib/data_storage/_sponge/SimpleXMLWriter.py
new file mode 100644
index 0000000..bb31631
--- /dev/null
+++ b/acts/framework/acts/controllers/buds_lib/data_storage/_sponge/SimpleXMLWriter.py
@@ -0,0 +1,305 @@
+#/usr/bin/env python3
+#
+# Copyright (C) 2018 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may not
+# use this file except in compliance with the License. You may obtain a copy of
+# the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations under
+# the License.
+
+#
+# SimpleXMLWriter
+# $Id: SimpleXMLWriter.py 3265 2007-09-06 20:42:00Z fredrik $
+#
+# a simple XML writer
+#
+# history:
+# 2001-12-28 fl   created
+# 2002-11-25 fl   fixed attribute encoding
+# 2002-12-02 fl   minor fixes for 1.5.2
+# 2004-06-17 fl   added pythondoc markup
+# 2004-07-23 fl   added flush method (from Jay Graves)
+# 2004-10-03 fl   added declaration method
+#
+# Copyright (c) 2001-2004 by Fredrik Lundh
+#
+# fredrik@pythonware.com
+# http://www.pythonware.com
+#
+# --------------------------------------------------------------------
+# The SimpleXMLWriter module is
+#
+# Copyright (c) 2001-2004 by Fredrik Lundh
+#
+# By obtaining, using, and/or copying this software and/or its
+# associated documentation, you agree that you have read, understood,
+# and will comply with the following terms and conditions:
+#
+# Permission to use, copy, modify, and distribute this software and
+# its associated documentation for any purpose and without fee is
+# hereby granted, provided that the above copyright notice appears in
+# all copies, and that both that copyright notice and this permission
+# notice appear in supporting documentation, and that the name of
+# Secret Labs AB or the author not be used in advertising or publicity
+# pertaining to distribution of the software without specific, written
+# prior permission.
+#
+# SECRET LABS AB AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH REGARD
+# TO THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANT-
+# ABILITY AND FITNESS.  IN NO EVENT SHALL SECRET LABS AB OR THE AUTHOR
+# BE LIABLE FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY
+# DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS,
+# WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS
+# ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE
+# OF THIS SOFTWARE.
+# --------------------------------------------------------------------
+
+##
+# Tools to write XML files, without having to deal with encoding
+# issues, well-formedness, etc.
+# <p>
+# The current version does not provide built-in support for
+# namespaces. To create files using namespaces, you have to provide
+# "xmlns" attributes and explicitly add prefixes to tags and
+# attributes.
+#
+# <h3>Patterns</h3>
+#
+# The following example generates a small XHTML document.
+# <pre>
+#
+# from elementtree.SimpleXMLWriter import XMLWriter
+# import sys
+#
+# w = XMLWriter(sys.stdout)
+#
+# html = w.start("html")
+#
+# w.start("head")
+# w.element("title", "my document")
+# w.element("meta", name="generator", value="my application 1.0")
+# w.end()
+#
+# w.start("body")
+# w.element("h1", "this is a heading")
+# w.element("p", "this is a paragraph")
+#
+# w.start("p")
+# w.data("this is ")
+# w.element("b", "bold")
+# w.data(" and ")
+# w.element("i", "italic")
+# w.data(".")
+# w.end("p")
+#
+# w.close(html)
+# </pre>
+##
+
+import re, sys, string
+
+try:
+    unicode("")
+except NameError:
+
+    def encode(s, encoding):
+        # 1.5.2: application must use the right encoding
+        return s
+
+    _escape = re.compile(r"[&<>\"\x80-\xff]+")  # 1.5.2
+else:
+
+    def encode(s, encoding):
+        return s.encode(encoding)
+
+    _escape = re.compile(eval(r'u"[&<>\"\u0080-\uffff]+"'))
+
+
+def encode_entity(text, pattern=_escape):
+    # map reserved and non-ascii characters to numerical entities
+    def escape_entities(m):
+        out = []
+        for char in m.group():
+            out.append("&#%d;" % ord(char))
+        return string.join(out, "")
+
+    return encode(pattern.sub(escape_entities, text), "ascii")
+
+
+del _escape
+
+#
+# the following functions assume an ascii-compatible encoding
+# (or "utf-16")
+
+
+def escape_cdata(s, encoding=None):
+    s = s.replace("&", "&amp;")
+    s = s.replace("<", "&lt;")
+    s = s.replace(">", "&gt;")
+    if encoding:
+        try:
+            return encode(s, encoding)
+        except UnicodeError:
+            return encode_entity(s)
+    return s
+
+
+def escape_attrib(s, encoding=None):
+    s = s.replace("&", "&amp;")
+    s = s.replace("'", "&apos;")
+    s = s.replace("\"", "&quot;")
+    s = s.replace("<", "&lt;")
+    s = s.replace(">", "&gt;")
+    if encoding:
+        try:
+            return encode(s, encoding)
+        except UnicodeError:
+            return encode_entity(s)
+    return s
+
+
+##
+# XML writer class.
+#
+# @param file A file or file-like object.  This object must implement
+#    a <b>write</b> method that takes an 8-bit string.
+# @param encoding Optional encoding.
+
+
+class XMLWriter:
+    def __init__(self, file, encoding="us-ascii"):
+        if not hasattr(file, "write"):
+            file = open(file, "w")
+        self.__write = file.write
+        if hasattr(file, "flush"):
+            self.flush = file.flush
+        self.__open = 0  # true if start tag is open
+        self.__tags = []
+        self.__data = []
+        self.__encoding = encoding
+
+    def __flush(self):
+        # flush internal buffers
+        if self.__open:
+            self.__write(">")
+            self.__open = 0
+        if self.__data:
+            data = string.join(self.__data, "")
+            self.__write(escape_cdata(data, self.__encoding))
+            self.__data = []
+
+    ##
+    # Writes an XML declaration.
+
+    def declaration(self):
+        encoding = self.__encoding
+        if encoding == "us-ascii" or encoding == "utf-8":
+            self.__write("<?xml version='1.0'?>\n")
+        else:
+            self.__write("<?xml version='1.0' encoding='%s'?>\n" % encoding)
+
+    ##
+    # Opens a new element.  Attributes can be given as keyword
+    # arguments, or as a string/string dictionary. You can pass in
+    # 8-bit strings or Unicode strings; the former are assumed to use
+    # the encoding passed to the constructor.  The method returns an
+    # opaque identifier that can be passed to the <b>close</b> method,
+    # to close all open elements up to and including this one.
+    #
+    # @param tag Element tag.
+    # @param attrib Attribute dictionary.  Alternatively, attributes
+    #    can be given as keyword arguments.
+    # @return An element identifier.
+
+    def start(self, tag, attrib={}, **extra):
+        self.__flush()
+        tag = escape_cdata(tag, self.__encoding)
+        self.__data = []
+        self.__tags.append(tag)
+        self.__write("<%s" % tag)
+        if attrib or extra:
+            attrib = attrib.copy()
+            attrib.update(extra)
+            attrib = attrib.items()
+            attrib.sort()
+            for k, v in attrib:
+                k = escape_cdata(k, self.__encoding)
+                v = escape_attrib(v, self.__encoding)
+                self.__write(" %s=\"%s\"" % (k, v))
+        self.__open = 1
+        return len(self.__tags) - 1
+
+    ##
+    # Adds a comment to the output stream.
+    #
+    # @param comment Comment text, as an 8-bit string or Unicode string.
+
+    def comment(self, comment):
+        self.__flush()
+        self.__write("<!-- %s -->\n" % escape_cdata(comment, self.__encoding))
+
+    ##
+    # Adds character data to the output stream.
+    #
+    # @param text Character data, as an 8-bit string or Unicode string.
+
+    def data(self, text):
+        self.__data.append(text)
+
+    ##
+    # Closes the current element (opened by the most recent call to
+    # <b>start</b>).
+    #
+    # @param tag Element tag.  If given, the tag must match the start
+    #    tag.  If omitted, the current element is closed.
+
+    def end(self, tag=None):
+        if tag:
+            assert self.__tags, "unbalanced end(%s)" % tag
+            assert escape_cdata(tag, self.__encoding) == self.__tags[-1],\
+                   "expected end(%s), got %s" % (self.__tags[-1], tag)
+        else:
+            assert self.__tags, "unbalanced end()"
+        tag = self.__tags.pop()
+        if self.__data:
+            self.__flush()
+        elif self.__open:
+            self.__open = 0
+            self.__write(" />")
+            return
+        self.__write("</%s>" % tag)
+
+    ##
+    # Closes open elements, up to (and including) the element identified
+    # by the given identifier.
+    #
+    # @param id Element identifier, as returned by the <b>start</b> method.
+
+    def close(self, id):
+        while len(self.__tags) > id:
+            self.end()
+
+    ##
+    # Adds an entire element.  This is the same as calling <b>start</b>,
+    # <b>data</b>, and <b>end</b> in sequence. The <b>text</b> argument
+    # can be omitted.
+
+    def element(self, tag, text=None, attrib={}, **extra):
+        apply(self.start, (tag, attrib), extra)
+        if text:
+            self.data(text)
+        self.end()
+
+    ##
+    # Flushes the output stream.
+
+    def flush(self):
+        pass  # replaced by the constructor
diff --git a/acts/framework/acts/controllers/buds_lib/data_storage/_sponge/__init__.py b/acts/framework/acts/controllers/buds_lib/data_storage/_sponge/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/acts/framework/acts/controllers/buds_lib/data_storage/_sponge/__init__.py
diff --git a/acts/framework/acts/controllers/buds_lib/data_storage/_sponge/sponge_client_lite.py b/acts/framework/acts/controllers/buds_lib/data_storage/_sponge/sponge_client_lite.py
new file mode 100644
index 0000000..77b8e35
--- /dev/null
+++ b/acts/framework/acts/controllers/buds_lib/data_storage/_sponge/sponge_client_lite.py
@@ -0,0 +1,1031 @@
+#/usr/bin/env python3
+#
+# Copyright (C) 2018 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may not
+# use this file except in compliance with the License. You may obtain a copy of
+# the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations under
+# the License.
+
+#
+# Copyright 2009 Google Inc. All Rights Reserved.
+"""Lightweight Sponge client, supporting upload via the HTTP Redirector.
+
+Does not depend on protobufs, Stubby, works on Windows, builds without blaze.
+"""
+
+__author__ = 'klm@google.com (Michael Klepikov)'
+
+import collections
+import os
+import re
+import socket
+import time
+
+try:
+    import httpclient as httplib
+except ImportError:
+    import httplib
+
+try:
+    import StringIO
+except ImportError:
+    from io import StringIO
+
+try:
+    import google3  # pylint: disable=g-import-not-at-top
+    from google3.testing.coverage.util import bitfield  # pylint: disable=g-import-not-at-top
+except ImportError:
+    pass  # Running outside of google3
+
+import SimpleXMLWriter  # pylint: disable=g-import-not-at-top
+
+
+class Entity(object):
+    """Base class for all Sponge client entities. Provides XML s11n basics."""
+
+    def WriteXmlToStream(self, ostream, encoding='UTF-8'):
+        """Writes out all attributes with string/numeric value to supplied ostream.
+
+    Args:
+      ostream: A file or file-like object. This object must implement a write
+               method.
+      encoding: Optionally specify encoding to be used.
+    """
+        xml_writer = SimpleXMLWriter.XMLWriter(ostream, encoding)
+        self.WriteXml(xml_writer)
+
+    def WriteXml(self, xml_writer):
+        """Writes out all attributes that have a string or numeric value.
+
+    Args:
+      xml_writer: google3.third_party.python.elementtree.SimpleXMLWriter.
+    """
+        for attr_name in dir(self):  # Guaranteed sorted alphabetically
+            assert attr_name
+            if attr_name.startswith(
+                    '_') or attr_name[0].upper() == attr_name[0]:
+                continue  # Skip non-public attributes and public constants
+            if hasattr(self, '_permitted_attributes'):
+                assert attr_name in self._permitted_attributes
+            if (hasattr(self, '_custom_write_attributes')
+                    and attr_name in self._custom_write_attributes):
+                # An attribute that has custom serialization code
+                continue
+            value = self.__getattribute__(attr_name)
+            if callable(value):
+                continue  # Skip methods
+            Entity._WriteValue(xml_writer, attr_name, value)
+
+    def GetXmlString(self):
+        """Returns a string with XML produced by WriteXml()."""
+        xml_out = StringIO.StringIO()
+        self.WriteXmlToStream(xml_out)
+        xml_str = xml_out.getvalue()
+        xml_out.close()
+        return xml_str
+
+    @staticmethod
+    def _WriteValue(xml_writer, name, value):
+        if value is None:
+            return  # Do not serialize None (but do serialize 0 or empty string)
+        elif isinstance(value, unicode):
+            xml_writer.element(name, value)  # Will write out as UTF-8
+        elif isinstance(value, str):
+            # A non-Unicode string. By default the encoding is 'ascii',
+            # where 8-bit characters cause an encoding exception
+            # when a protobuf encodes itself on the HTTP Redirector side.
+            # Force 'latin' encoding, which allows 8-bit chars.
+            # Still it's only a guess which could be wrong, so use errors='replace'
+            # to produce an 'invalid character' Unicode placeholder in such cases.
+            # For the caller, the cleanest thing to do is pass a proper
+            # Unicode string if it may contain international characters.
+            xml_writer.element(
+                name, unicode(value, encoding='latin', errors='replace'))
+        elif isinstance(value, bool):
+            # Careful! Check for this before isinstance(int) -- true for bools
+            xml_writer.element(name, str(value).lower())
+        elif (isinstance(value, int) or isinstance(value, long)
+              or isinstance(value, float)):
+            xml_writer.element(name, str(value))
+        elif hasattr(value, 'WriteXml'):
+            # An object that knows how to write itself
+            xml_writer.start(name)
+            value.WriteXml(xml_writer)
+            xml_writer.end()
+        elif isinstance(value, list) or isinstance(value, tuple):
+            # Sequence names are often plural, but the element name must be single
+            if name.endswith('s'):
+                value_element_name = name[0:len(name) - 1]
+            else:
+                value_element_name = name
+            for sequence_value in value:
+                Entity._WriteValue(xml_writer, value_element_name,
+                                   sequence_value)
+        elif hasattr(value, 'iteritems'):  # A mapping type
+            # Map names are often plural, but the element name must be single
+            if name.endswith('s'):
+                map_element_name = name[0:len(name) - 1]
+            else:
+                map_element_name = name
+            Entity._WriteNameValuesXml(xml_writer, map_element_name, value,
+                                       'name', 'value')
+
+    @staticmethod
+    def _WriteNameValuesXml(xml_writer, element_name, name_value_dict,
+                            name_elem, value_elem):
+        """Writes a dict as XML elements with children as keys (names) and values.
+
+    Args:
+      xml_writer: google3.third_party.python.elementtree.SimpleXMLWriter.
+      element_name: name of enclosing element for the name-value pair elements.
+      name_value_dict: the dict to write.
+      name_elem: name of the "name" element.
+      value_elem: name of the "value" element.
+    """
+        if name_value_dict:
+            for name in sorted(
+                    name_value_dict):  # Guarantee order for testability
+                value = name_value_dict[name]
+                xml_writer.start(element_name)
+                Entity._WriteValue(xml_writer, name_elem, name)
+                Entity._WriteValue(xml_writer, value_elem, value)
+                xml_writer.end()
+
+
+class LcovUtils(object):
+    """Just groups Lcov handling."""
+
+    @staticmethod
+    def GetFilename(lcov_section):
+        return lcov_section.split('\n', 1)[0].strip()[3:]
+
+    @staticmethod
+    def LcovSectionToBitFields(lcov_section):
+        """Fill in bit fields that represent covered and instrumented lines.
+
+    Note that lcov line numbers start from 1 while sponge expects line numbers
+    to start from 0, hence the line_num-1 is required.
+
+    Args:
+      lcov_section: string, relevant section of lcov
+
+    Returns:
+      Tuple of google3.testing.coverage.util.bitfield objects. First bitfield
+      represents lines covered. Second bitfield represents total lines
+      instrumented.
+    """
+        covered_bf = bitfield.BitField()
+        instrumented_bf = bitfield.BitField()
+        for line in lcov_section.split('\n'):
+            if line.startswith('DA:'):
+                line_num, times_hit = line.strip()[3:].split(',')
+                instrumented_bf.SetBit(int(line_num) - 1)
+                if times_hit != '0':
+                    covered_bf.SetBit(int(line_num) - 1)
+            elif line.startswith('FN:'):
+                pass  # Function coverage will be supported soon.
+        return covered_bf, instrumented_bf
+
+    @staticmethod
+    def UrlEncode(bit_field):
+        """Convert bit field into url-encoded string of hex representation."""
+        if not bit_field.CountBitsSet():
+            return '%00'
+        else:
+            ret_str = ''
+            for c in bit_field.Get():
+                ret_str += '%%%02x' % ord(c)
+        return ret_str.upper()
+
+    @staticmethod
+    def WriteBitfieldXml(xml_writer, name, value):
+        encoded_value = LcovUtils.UrlEncode(value)
+        xml_writer.element(
+            name, unicode(encoded_value, encoding='latin', errors='replace'))
+
+
+class FileCoverage(Entity):
+    """Represents Sponge FileCoverage.
+
+  instrumented_lines and executed_lines are bit fields with following format:
+  Divide line number by 8 to get index into string.
+  Mod line number by 8 to get bit number (0 = LSB, 7 = MSB).
+
+  Attributes:
+    file_name: name of the file this entry represents.
+    location: the location of the file: PERFORCE, MONDRIAN, UNKNOWN.
+    revision: stores the revision number of the file when location is PERFORCE.
+    instrumented_lines: bitfield of line numbers that have been instrumented
+    executed_lines: bitfield of line numbers that have been executed
+    md5: string. Hex representation of the md5 checksum for the file
+         "file_name". This should only be set if file_name is open in the
+         client.
+    pending_cl: string. CL containing the file "file_name" if it is checked out
+                at the time this invocation is sent out. Should only be set if
+                location is MONDRIAN.
+    sourcerer_depot: string. [optional] The sourcerer depot to use in coverage
+        tab. Only required if your code is stored in one of the PerforceN
+        servers and therefore has it's own Sourcerer instance. For example,
+        Perforce11 code should set sourcerer_depot to "s11".
+  """
+
+    # location
+    PERFORCE = 0
+    MONDRIAN = 1
+    UNKNOWN = 2
+
+    def __init__(self):
+        super(FileCoverage, self).__init__()
+        self.file_name = None
+        self.location = None
+        self.revision = None
+        self.md5 = None
+        self.pending_cl = None
+        self.executed_lines = None
+        self.instrumented_lines = None
+        self.sourcerer_depot = None
+        self._custom_write_attributes = [
+            'executed_lines', 'instrumented_lines'
+        ]
+
+    def WriteXml(self, xml_writer):
+        """Writes this object as XML suitable for Sponge HTTP Redirector.
+
+    Args:
+      xml_writer: google3.third_party.python.elementtree.SimpleXMLWriter.
+    """
+        super(FileCoverage, self).WriteXml(xml_writer)
+        for attr_name in self._custom_write_attributes:
+            value = self.__getattribute__(attr_name)
+            if value:
+                LcovUtils.WriteBitfieldXml(xml_writer, attr_name, value)
+
+    def Combine(self, other_file_coverage):
+        """Combines 2 FileCoverage objects.
+
+    This method expects all fields of the 2 FileCoverage objects to be identical
+    except for the executed_lines and instrumented_lines fields which it will
+    combine into 1 by performing logical OR operation on executed_lines and
+    instrumented_lines bitfields. All other fields are copied directly from
+    source.
+
+    Args:
+      other_file_coverage: FileCoverage object to combine with
+
+    Returns:
+      The combined FileCoverage object
+    """
+        assert self.file_name == other_file_coverage.file_name
+        assert self.location == other_file_coverage.location
+        assert self.revision == other_file_coverage.revision
+        assert self.md5 == other_file_coverage.md5
+        assert self.pending_cl == other_file_coverage.pending_cl
+
+        result_file_coverage = FileCoverage()
+        result_file_coverage.file_name = self.file_name
+        result_file_coverage.location = self.location
+        result_file_coverage.revision = self.revision
+        result_file_coverage.md5 = self.md5
+        result_file_coverage.pending_cl = self.pending_cl
+
+        result_file_coverage.executed_lines = self.executed_lines.Or(
+            other_file_coverage.executed_lines)
+        result_file_coverage.instrumented_lines = self.instrumented_lines.Or(
+            other_file_coverage.instrumented_lines)
+
+        return result_file_coverage
+
+    def FromLcovSection(self, lcov_section):
+        """Fill in coverage from relevant lcov section.
+
+    An lcov section starts with a line starting with 'SF:' followed by filename
+    of covered file and is followed by 1 or more lines of coverage data starting
+    with 'DA:' or 'FN:'.
+
+    'DA:'lines have the format:
+      'DA: line_num, times_covered'
+
+    line_num is the line number of source file starting from 1.
+    times_covered is the number of times the line was covered, starting from 0.
+
+    'FN:' is for function coverage and is not supported yet.
+
+    An example section would look like this:
+      SF:/Volumes/BuildData/PulseData/data/googleclient/picasa4/yt/safe_str.h
+      DA:1412,12
+      DA:1413,12
+      DA:1414,0
+      DA:1415,0
+
+    Args:
+      lcov_section: string, relevant section of lcov file.
+    """
+        if lcov_section:
+            assert lcov_section.startswith('SF:')
+
+            self.file_name = LcovUtils.GetFilename(lcov_section)
+            self.executed_lines, self.instrumented_lines = (
+                LcovUtils.LcovSectionToBitFields(lcov_section))
+
+
+class TargetCodeCoverage(Entity):
+    """Represents Sponge TargetCodeCoverage.
+
+  Attributes:
+    file_coverage: list of FileCoverage object.
+    instrumentation: method of instrumentation: ONTHEFLY, OFFLINE, UNKNOWN
+  """
+
+    # instrumentation
+    ONTHEFLY = 0
+    OFFLINE = 1
+    UNKNOWN = 2
+
+    def __init__(self):
+        super(TargetCodeCoverage, self).__init__()
+        self.file_coverage = []
+        self.instrumentation = None
+
+        # Warning: *DO NOT* switch to Python 2.7 OrderedDict. This code needs to
+        # run on Windows and other environments where Python 2.7 may not be
+        # available.
+        self._file_coverage_map = collections.OrderedDict()
+
+    def FromLcovString(self, lcov_str):
+        """Fill in coverage from lcov-formatted string.
+
+    Args:
+      lcov_str: contents of lcov file as string
+    """
+        for entry in lcov_str.split('end_of_record\n'):
+            file_coverage = FileCoverage()
+            file_coverage.FromLcovSection(entry.strip())
+
+            if not file_coverage.file_name:
+                continue
+
+            prev_file_coverage = self._file_coverage_map.get(
+                file_coverage.file_name)
+            if prev_file_coverage:
+                self._file_coverage_map[file_coverage.file_name] = (
+                    prev_file_coverage.Combine(file_coverage))
+            else:
+                self._file_coverage_map[
+                    file_coverage.file_name] = file_coverage
+
+        self.file_coverage = self._file_coverage_map.values()
+
+    def IndexOf(self, filename):
+        """Index of filename in the FileCoverage map. Must exist!"""
+        return self._file_coverage_map.keys().index(filename)
+
+
+class Sample(Entity):
+    """Represents a single data sample within a Metric object.
+
+  Attributes:
+    value: the data value of this sample -- the thing that we measured.
+    timestamp_in_millis: the time when this particular sample was taken.
+       Milliseconds since the Epoch. Not required, but highly recommended for
+       a proper single-CL view in LoadViz that shows all samples of one run.
+    outcome: SUCCESSFUL_OUTCOME or FAILED_OUTCOME.
+    metadata: a dict of arbitrary user defined name-value pairs.
+      For example, when measuring page load times, one can store the page URL
+      under the key "url" in the metadata.
+  """
+
+    SUCCESSFUL_OUTCOME = 0
+    FAILED_OUTCOME = 1
+
+    def __init__(self):
+        super(Sample, self).__init__()
+        self.value = None
+        self.timestamp_in_millis = None
+        self.outcome = None
+        self.metadata = {}
+
+
+class Percentile(Entity):
+    """Represents a percentile within an Aggregation object.
+
+  Percentile objects only give enough info to filter samples by percentiles,
+  Sponge doesn't store per-percentile means etc.
+
+  Attributes:
+    percentage: upper bracket of the percentile: integer number of percent.
+       Lower bracket is always zero.
+    value: maximum value for the this percentile.
+  """
+
+    def __init__(self):
+        super(Percentile, self).__init__()
+        self.percentage = None
+        self.value = None
+
+
+class Aggregation(Entity):
+    """Represents aggregated values from samples in a Metric object.
+
+  As also noted in Metric, Sponge would compute a default Aggregation
+  if it's not supplied explicitly with a Metric. Sponge currently computes
+  the following percentiles: 50, 80, 90, 95, 99, with no way to control it.
+  If you want other percentiles, you need to provide the Aggregatioin yourself.
+
+  Attributes:
+    count: the number of samples represented by this aggregation.
+    min: minimum sample value.
+    max: maximum sample value.
+    mean: mean of all sample values.
+    standard_deviation: standard deviation of all sample values.
+    percentiles: a sequence of Percentile objects.
+    error_count: the number of samples with error outcomes.
+  """
+
+    def __init__(self):
+        super(Aggregation, self).__init__()
+        self.count = None
+        self.min = None
+        self.max = None
+        self.mean = None
+        self.standard_deviation = None
+        self.error_count = None
+        self.percentiles = []
+
+
+class Metric(Entity):
+    """Represents a single metric under PerformanceData.
+
+  See the comment in PerformanceData about the mapping to sponge.proto.
+
+  Attributes:
+    name: the metric name.
+    time_series: if True, this is a time series, otherwise not a time series.
+    unit: string name of the unit of measure for sample values in this metric.
+    machine_name: hostname where the test was run.
+        If None, use Invocation.hostname.
+    aggregation: an Aggregation object.
+        If None, Sponge will compute it from samples.
+    samples: a sequence of Sample objects.
+  """
+
+    def __init__(self):
+        super(Metric, self).__init__()
+        self.name = None
+        self.time_series = True
+        self.unit = None
+        self.machine_name = None
+        self.aggregation = None
+        self.samples = []
+
+
+class PerformanceData(Entity):
+    """Represents Sponge PerformanceData, only moved under a TargetResult.
+
+  Currently sponge.proto defines PerformanceData as a top level object,
+  stored in a separate table from Invocations. There is an idea to move it
+  under a TargetResult, allowing it to have labels and generally play
+  by the same rules as all other test runs -- coverage etc.
+
+  So far the interim solution is to try to have PerformanceData under
+  a TargetResult only in sponge_client_lite, and do an on the fly
+  conversion to sponge.proto structures in the HTTP Redirector.
+  If all goes well there, then a similar conversion in the other direction
+  (top level PerformanceData -> PerformanceData under a TargetResult)
+  can be implemented in Sponge Java upload code, together with a data model
+  change, allowing backward compatibility with older performance test clients.
+
+  The mapping of the PerformanceData fields missing here is as follows:
+  id -> Invocation.id
+  timestamp_in_millis -> TargetResult.run_date
+  cl -> Invocation.cl
+  config -> TargetResult.configuration_values
+  user -> Invocation.user
+  description, project_name, project_id -- not mapped, if necessary should
+      be added to Invocation and/or TargetResult, as they are not
+      performance-specific. TODO(klm): discuss use cases with havardb@.
+
+  For LoadViz to work properly, Invocation.cl must be supplied even though
+  it's formally optional in the Invocation. It doesn't have to be an actual
+  Perforce CL number, could be an arbitrary string, but these strings must
+  sort in the chronological order -- e.g. may represent a date and time,
+  for example may use an ISO date+time string notation of the run_date.
+
+  Attributes:
+    benchmark: benchmark name -- the most important ID in LoadViz.
+        Must not be None for results to be usable in LoadViz.
+    experiment: experiment name.
+    thread_count: for load tests, the number of concurrent threads.
+    aggregator_strategy: NONE or V1 or V1_NO_DOWNSAMPLE.
+    metrics: a sequence of Metric objects.
+  """
+
+    NONE = 0
+    V1 = 1
+    V1_NO_DOWNSAMPLE = 2
+
+    def __init__(self):
+        super(PerformanceData, self).__init__()
+        self.benchmark = None
+        self.experiment = None
+        self.thread_count = None
+        self.aggregator_strategy = None
+        self.metrics = []
+
+
+class TestFault(Entity):
+    """Test failure/error data.
+
+  Attributes:
+    message: message for the failure/error.
+    exception_type: the type of failure/error.
+    detail: details of the failure/error.
+  """
+
+    def __init__(self):
+        super(TestFault, self).__init__()
+
+        self._permitted_attributes = set(
+            ['message', 'exception_type', 'detail'])
+        self.message = None
+        self.exception_type = None
+        self.detail = None
+
+
+class TestResult(Entity):
+    """Test case data.
+
+  Attributes:
+    child: List of TestResult representing test suites or test cases
+    name: Test result name
+    class_name: Required for test cases, otherwise not
+    was_run: true/false, default true, optional
+    run_duration_millis: -
+    property: List of TestProperty entities.
+    test_case_count: number of test cases
+    failure_count: number of failures
+    error_count: number of errors
+    disabled_count: number of disabled tests
+    test_file_coverage: List of TestCaseFileCoverage
+    test_failure: List of TestFault objects describing test failures
+    test_error: List of TestFault objects describing test errors
+    result: The result of running a test case: COMPLETED, INTERRUPTED, etc
+  """
+
+    # result
+    COMPLETED = 0
+    INTERRUPTED = 1
+    CANCELLED = 2
+    FILTERED = 3
+    SKIPPED = 4
+    SUPPRESSED = 5
+
+    # Match DA lines claiming nonzero execution count.
+    _lcov_executed_re = re.compile(r'^DA:\d+,[1-9][0-9]*', re.MULTILINE)
+
+    def __init__(self):
+        super(TestResult, self).__init__()
+
+        self._permitted_attributes = set([
+            'child', 'name', 'class_name', 'was_run', 'run_duration_millis',
+            'property', 'test_case_count', 'failure_count', 'error_count',
+            'disabled_count', 'test_file_coverage', 'test_failure',
+            'test_error', 'result'
+        ])
+        self.child = []
+        self.name = None
+        self.class_name = None
+        self.was_run = True
+        self.run_duration_millis = None
+        self.property = []
+        self.test_case_count = None
+        self.failure_count = None
+        self.error_count = None
+        self.disabled_count = None
+        self.test_file_coverage = []
+        self.test_error = []
+        self.test_failure = []
+        self.result = None
+
+    def FromLcovString(self, lcov_str, target_code_coverage):
+        """Fill in hit coverage from lcov-formatted string and target_code_coverage.
+
+    Ignores files with zero hit bitmaps; presumes target_code_coverage is final
+    for the purposes of determining the index of filenames.
+
+    Args:
+      lcov_str: contents of lcov file as string
+      target_code_coverage: TargetCodeCoverage for filename indexing
+    """
+        for entry in lcov_str.split('end_of_record\n'):
+
+            if not TestResult._lcov_executed_re.search(entry):
+                continue
+
+            test_file_coverage = TestCaseFileCoverage()
+            test_file_coverage.FromLcovSection(entry.strip(),
+                                               target_code_coverage)
+
+            self.test_file_coverage.append(test_file_coverage)
+
+
+class TestProperty(Entity):
+    """Test property data.
+
+  Attributes:
+    key: A string representing the property key.
+    value: A string representing the property value.
+  """
+
+    def __init__(self):
+        super(TestProperty, self).__init__()
+        self._permitted_attributes = set(['key', 'value'])
+        self.key = None
+        self.value = None
+
+
+class TestCaseFileCoverage(Entity):
+    """Test case file coverage data.
+
+  Attributes:
+    file_coverage_index: index into associated test target's file coverage.
+    executed_lines: bitfield representing executed lines, as for FileCoverage.
+    zipped_executed_lines: zip of executed_lines data, if smaller.
+  """
+
+    def __init__(self):
+        super(TestCaseFileCoverage, self).__init__()
+
+        self._permitted_attributes = set(
+            ['file_coverage_index', 'executed_lines', 'zipped_executed_lines'])
+
+        self.file_coverage_index = None
+        self.executed_lines = 0
+        self.zipped_executed_lines = 0
+        self._custom_write_attributes = [
+            'executed_lines', 'zipped_executed_lines'
+        ]
+
+    def WriteXml(self, xml_writer):
+        """Writes this object as XML suitable for Sponge HTTP Redirector.
+
+    Args:
+      xml_writer: google3.third_party.python.elementtree.SimpleXMLWriter.
+    """
+        super(TestCaseFileCoverage, self).WriteXml(xml_writer)
+        for attr_name in self._custom_write_attributes:
+            value = self.__getattribute__(attr_name)
+            if value:
+                LcovUtils.WriteBitfieldXml(xml_writer, attr_name, value)
+                # TODO(weasel): Mmmaybe lift bitfield handling to the base class.
+
+    def FromLcovSection(self, lcov_section, tcc):
+        if lcov_section:
+            assert lcov_section.startswith('SF:')
+
+            file_name = LcovUtils.GetFilename(lcov_section)
+            self.file_coverage_index = tcc.IndexOf(file_name)
+            self.executed_lines, unused_instrumented_lines = (
+                LcovUtils.LcovSectionToBitFields(lcov_section))
+            # TODO(weasel): compress executed_lines to zipped_* if smaller.
+
+
+class GoogleFilePointer(Entity):
+    """Represents a Google File system path.
+
+  Attributes:
+    name: str name for use by Sponge
+    path: str containing the target Google File.
+    length: integer size of the file; used purely for display purposes.
+  """
+
+    def __init__(self, name, path, length):
+        super(GoogleFilePointer, self).__init__()
+        self.name = name
+        self.path = path
+        self.length = length
+
+    def WriteXml(self, xml_writer):
+        """Writes this object as XML suitable for Sponge HTTP Redirector.
+
+    Args:
+      xml_writer: google3.third_party.python.elementtree.SimpleXMLWriter.
+    """
+        Entity._WriteValue(xml_writer, 'name', self.name)
+        xml_writer.start('google_file_pointer')
+        Entity._WriteValue(xml_writer, 'path', self.path)
+        Entity._WriteValue(xml_writer, 'length', self.length)
+        xml_writer.end()
+
+
+class TargetResult(Entity):
+    """Represents Sponge TargetResult.
+
+  Attributes:
+    index: index of the target result within its parent Invocation.
+        Needed only for update requests, not for initial creation.
+    run_date: execution start timestamp in milliseconds.
+    build_target: the name of the build target that was executed.
+    size: one of size constants: SMALL, MEDIUM, LARGE, OTHER_SIZE, ENORMOUS.
+    environment: how we ran: FORGE, LOCAL_*, OTHER_*, UNKNOWN_*.
+    status: test outcome: PASSED, FAILED, etc.
+    test_result: tree of TestResults representing test suites and test cases.
+    language: programming language of the source code: CC, JAVA, etc.
+    run_duration_millis: execution duration in milliseconds.
+    status_details: a string explaining the status in more detail.
+    attempt_number: for flaky reruns, the number of the run attempt. Start at 1.
+    total_attempts: for flaky reruns, the total number of run attempts.
+    coverage: a TargetCodeCoverage object.
+    performance_data: a PerformanceData object.
+    configuration_values: a dict of test configuration parameters.
+    type: the type of target: TEST, BINARY, LIBRARY, APPLICATION.
+    large_texts: a dict of logs associated with this run. A magic key 'XML Log'
+      allows to upload GUnit/JUnit XML and auto-convert it to TestResults.
+    large_text_pointers: a list of GoogleFilePointers - distinction for
+      formatting only, these are conceptually the same as large_texts.
+  """
+
+    # size - if you update these values ensure to also update the appropriate
+    # enum list in uploader_recommended_options.py
+    SMALL = 0
+    MEDIUM = 1
+    LARGE = 2
+    OTHER_SIZE = 3
+    ENORMOUS = 4
+
+    # environment
+    FORGE = 0
+    LOCAL_PARALLEL = 1
+    LOCAL_SEQUENTIAL = 2
+    OTHER_ENVIRONMENT = 3
+    UNKNOWN_ENVIRONMENT = 4
+
+    # status - if you update these values ensure to also update the appropriate
+    # enum list in uploader_optional_options.py
+    PASSED = 0
+    FAILED = 1
+    CANCELLED_BY_USER = 2
+    ABORTED_BY_TOOL = 3
+    FAILED_TO_BUILD = 4
+    BUILT = 5
+    PENDING = 6
+    UNKNOWN_STATUS = 7
+    INTERNAL_ERROR = 8
+
+    # language - if you update these values ensure to also update the appropriate
+    # enum list in uploader_recommended_options.py
+    UNSPECIFIED_LANGUAGE = 0
+    BORGCFG = 1
+    CC = 2
+    GWT = 3
+    HASKELL = 4
+    JAVA = 5
+    JS = 6
+    PY = 7
+    SH = 8
+    SZL = 9
+
+    # type
+    UNSPECIFIED_TYPE = 0
+    TEST = 1
+    BINARY = 2
+    LIBRARY = 3
+    APPLICATION = 4
+
+    def __init__(self):
+        super(TargetResult, self).__init__()
+        self.index = None
+        self.run_date = long(round(time.time() * 1000))
+        self.build_target = None
+        self.size = None
+        self.environment = None
+        self.status = None
+        self.test_result = None
+        self.language = None
+        self.run_duration_millis = None
+        self.status_details = None
+        self.attempt_number = None
+        self.total_attempts = None
+        self.coverage = None
+        self.performance_data = None
+        self.configuration_values = {}
+        self.type = None
+        self.large_texts = {}
+        self.large_text_pointers = []
+        self._custom_write_attributes = ['large_text_pointers']
+
+    def MarkRunDuration(self):
+        """Assigns run_duration_millis to the current time minus run_date."""
+        assert self.run_date
+        self.run_duration_millis = long(round(
+            time.time() * 1000)) - self.run_date
+        assert self.run_duration_millis > 0
+
+    def WriteXml(self, xml_writer):
+        """Writes this object as XML suitable for Sponge HTTP Redirector.
+
+    Args:
+      xml_writer: google3.third_party.python.elementtree.SimpleXMLWriter.
+    """
+        super(TargetResult, self).WriteXml(xml_writer)
+        # Write out GoogleFilePointers as large_text fields
+        for google_file_pointer in self.large_text_pointers:
+            Entity._WriteValue(xml_writer, 'large_text', google_file_pointer)
+
+
+class Invocation(Entity):
+    """Represents a Sponge Invocation.
+
+  Attributes:
+    id: the ID of an invocation to update.
+        Needed only for update requests, not for initial creation.
+    run_date: execution start timestamp in milliseconds
+    user: username.
+    client: P4 client name.
+    cl: P4 changelist ID.
+    hostname: the host where the tests ran.
+    working_dir: the dir where the tests ran.
+    args: command line arguments of the test command.
+    environment_variables: a dict of notable OS environment variables.
+    configuration_values: a dict of test configuration parameters.
+    large_texts: a dict of logs associated with the entire set of target runs.
+    labels: a list of labels associated with this invocation.
+    target_results: a list of TargetResult objects.
+    large_text_pointers: a list of GoogleFilePointers - distinction for
+      formatting only, these are conceptually the same as large_texts.
+  """
+
+    def __init__(self):
+        super(Invocation, self).__init__()
+        self.id = None
+        self.run_date = long(round(time.time() * 1000))
+        self.user = None
+        self.target_results = []
+        self.client = None
+        self.cl = None
+        self.hostname = socket.gethostname().lower()
+        self.working_dir = os.path.abspath(os.curdir)
+        self.args = None
+        self.environment_variables = {}
+        self.configuration_values = {}
+        self.large_texts = {}
+        self.large_text_pointers = []
+        self.labels = []
+        self._custom_write_attributes = [
+            'environment_variables',
+            'large_text_pointers',
+        ]
+
+    def WriteXml(self, xml_writer):
+        """Writes this object as XML suitable for Sponge HTTP Redirector.
+
+    Args:
+      xml_writer: google3.third_party.python.elementtree.SimpleXMLWriter.
+    """
+        super(Invocation, self).WriteXml(xml_writer)
+        Entity._WriteNameValuesXml(
+            xml_writer,
+            'environment_variable',
+            self.environment_variables,
+            name_elem='variable',
+            value_elem='value')
+        # Write out GoogleFilePointers as large_text fields
+        for google_file_pointer in self.large_text_pointers:
+            Entity._WriteValue(xml_writer, 'large_text', google_file_pointer)
+
+
+# Constants for Uploader.server
+SERVER_PROD = 'backend'
+SERVER_QA = 'backend-qa'
+
+
+class Uploader(Entity):
+    """Uploads Sponge Invocations to the Sponge HTTP Redirector service."""
+
+    def __init__(self,
+                 url_host='sponge-http.appspot.com',
+                 upload_url_path='/create_invocation',
+                 update_url_path='/update_target_result',
+                 server=None):
+        """Initializes the object.
+
+    Args:
+      url_host: host or host:port for the Sponge HTTP Redirector server.
+      upload_url_path: the path after url_host.
+      update_url_path: the path after update_url_host.
+      server: name of the Sponge backend, if None use SERVER_QA.
+    """
+        super(Uploader, self).__init__()
+        self.server = server or SERVER_QA
+        self.invocations = []
+        self._url_host = url_host
+        self._upload_url_path = upload_url_path
+        self._update_url_path = update_url_path
+        self._proxy = None
+        self._https_connection_factory = httplib.HTTPSConnection
+
+    def WriteXml(self, xml_writer):
+        """Writes this object as XML suitable for Sponge HTTP Redirector.
+
+    Args:
+      xml_writer: google3.third_party.python.elementtree.SimpleXMLWriter.
+    """
+        xml_writer.start('xml')
+        super(Uploader, self).WriteXml(xml_writer)
+        xml_writer.end()
+
+    def UseProxy(self, proxy):
+        """Forward requests through a given HTTP proxy.
+
+    Args:
+      proxy: the proxy address as '<host>' or '<host>:<port>'
+    """
+        self._proxy = proxy
+
+    def UseHTTPSConnectionFactory(self, https_connection_factory):
+        """Use the given function to create HTTPS connections.
+
+    This is helpful for clients on later version of Python (2.7.9+) that wish to
+    do client-side SSL authentication via ssl.SSLContext.
+
+    Args:
+      https_connection_factory: A function that takes a string url and returns
+                                an httplib.HTTPSConnection.
+    """
+        self._https_connection_factory = https_connection_factory
+
+    def Upload(self):
+        """Uploads Sponge invocations to the Sponge HTTP Redirector service.
+
+    Returns:
+      A string with Sponge invocation IDs, as returned by the HTTP Redirector.
+
+    Raises:
+      ValueError: when at least one invocation id is not None.
+    """
+        for invocation in self.invocations:
+            if invocation.id:
+                raise ValueError(
+                    'Invocation id must be None for new invocation.')
+        return self._UploadHelper(self._url_host, self._upload_url_path)
+
+    def UploadUpdatedResults(self):
+        """Uploads updated Sponge invocations to the Sponge HTTP Redirector service.
+
+    Returns:
+      A string with Sponge invocation IDs, as returned by the HTTP Redirector.
+
+    Raises:
+      ValueError: when at least one invocation id is None or at least one
+        target result has index of None.
+    """
+        for invocation in self.invocations:
+            if invocation.id is None:
+                raise ValueError('Invocation id must not be None for update.')
+            for target_result in invocation.target_results:
+                if target_result.index is None:
+                    raise ValueError(
+                        'Target result index can not be None for update.')
+        return self._UploadHelper(self._url_host, self._update_url_path)
+
+    def _UploadHelper(self, host, url):
+        """A helper function to perform actual upload of Sponge invocations.
+
+    Args:
+      host: host server to connect to.
+      url: url for Sponge end point.
+
+    Returns:
+      A string represent Sponge invocation IDs.
+    """
+        if self._proxy:
+            # A simple HTTP proxy request is the same as a regular HTTP request
+            # via the proxy host:port, except the path after the method (GET or POST)
+            # is the full actual request URL.
+            url = 'https://%s%s' % (host, url)
+            # Assume proxy does not support HTTPS.
+            http_connect = httplib.HTTPConnection(self._proxy)
+        else:
+            http_connect = self._https_connection_factory(host)
+        xml_str = self.GetXmlString()
+        http_connect.connect()
+        http_connect.request('PUT', url, body=xml_str)
+        response = http_connect.getresponse()
+        response_str = response.read().strip()
+        if response_str.startswith('id: "'):
+            response_str = response_str[5:-1]
+        return response_str
+
+
+def GetInvocationUrl(server, invocation_id):
+    if server == 'backend-qa':
+        return 'http://sponge-qa/%s' % invocation_id
+    else:
+        return 'http://tests/%s' % invocation_id
diff --git a/acts/framework/acts/controllers/buds_lib/data_storage/bigquery/__init__.py b/acts/framework/acts/controllers/buds_lib/data_storage/bigquery/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/acts/framework/acts/controllers/buds_lib/data_storage/bigquery/__init__.py
diff --git a/acts/framework/acts/controllers/buds_lib/data_storage/bigquery/bigquery_buffer.py b/acts/framework/acts/controllers/buds_lib/data_storage/bigquery/bigquery_buffer.py
new file mode 100644
index 0000000..6c8c56b
--- /dev/null
+++ b/acts/framework/acts/controllers/buds_lib/data_storage/bigquery/bigquery_buffer.py
@@ -0,0 +1,155 @@
+#!/usr/bin/env python3
+#
+# Copyright (C) 2018 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may not
+# use this file except in compliance with the License. You may obtain a copy of
+# the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations under
+# the License.
+"""Simple buffer interface that sends rows to specified tables in wearables-qa project in BigQuery."""
+import acts.controllers.buds_lib.data_storage.bigquery.bigquery_logger_utils as bq_utils
+import os
+import time
+import yaml
+
+CONFIG = 'config.yml'
+PATH_TO_CONFIG = os.path.join(os.path.dirname(__file__), CONFIG)
+
+queue = None
+
+
+class BigqueryBufferError(Exception):
+    """To be thrown if data storage queue malfunctions or cannot be reached"""
+
+
+class BigQueryProcessManager:
+    def __init__(self, config_path):
+        self.config_path = config_path
+        self.ip_address = None
+        self.port = None
+        self.load_config()
+
+    def load_config(self):
+        config = yaml.load(open(self.config_path, 'r'))
+        new_ip_address = config['ip_address']
+        new_port = config['port']
+        new_queue_size = config['queue_size']
+        new_authkey = config['authkey']
+        if new_ip_address == self.ip_address and new_port == self.port:
+            if new_authkey != self.authkey or new_queue_size != self.queue_size:
+                raise BigqueryBufferError(
+                    'To change queue size or server authkey, choose an unused port for a new server.'
+                )
+        self.project_id = config['project_id']
+        self.credentials_path = config['credentials_path']
+        self.queue_size = config['queue_size']
+        self.ip_address = config['ip_address']
+        self.port = config['port']
+        self.authkey = config['authkey']
+        self.flush_period = config['flush_period']
+
+    def start_subprocesses(self):
+        old_server_pid, old_queue = None, None
+
+        if not self.server_pid():
+            try:
+                # check if a BigqueryLoggerQueue currently exists but with different args
+                old_server_pid, old_queue = bq_utils.get_current_queue_and_server_pid(
+                )
+            except TypeError:
+                pass
+
+            # Start server to initialize new shared BigqueryLoggerQueue
+            bq_utils.start_queue_server(
+                queue_size=self.queue_size,
+                ip_address=self.ip_address,
+                port=self.port,
+                authkey=self.authkey)
+            time.sleep(5)
+
+        # Retrieve proxy object for new shared BigqueryLoggerQueue
+        global queue
+        queue = bq_utils.get_queue(
+            ip_address=self.ip_address, port=self.port, authkey=self.authkey)
+
+        if queue:
+
+            if old_queue and old_server_pid:  # If and older queue exists, transfer its items to new one
+                while not old_queue.empty():
+                    queue.put(old_queue.get())
+                bq_utils.kill_pid(old_server_pid)
+
+            # noinspection PyUnresolvedReferences
+            queue.set_flush_period(self.flush_period)
+
+            # noinspection PyUnresolvedReferences
+            if not self.automatic_logger_pid():
+                bq_utils.kill_current_scheduled_automatic_logger()
+
+                bq_utils.start_scheduled_automatic_logger(
+                    ip_address=self.ip_address,
+                    port=self.port,
+                    authkey=self.authkey,
+                    project_id=self.project_id,
+                    credentials_path=self.credentials_path)
+
+        if self.server_pid() and self.automatic_logger_pid():
+            return True
+
+        return False
+
+    def automatic_logger_pid(self):
+        return bq_utils.get_scheduled_automatic_logger_pid(
+            ip_address=self.ip_address,
+            port=self.port,
+            authkey=self.authkey,
+            project_id=self.project_id,
+            credentials_path=self.credentials_path)
+
+    def server_pid(self):
+        return bq_utils.get_logger_server_pid(
+            queue_size=self.queue_size,
+            ip_address=self.ip_address,
+            port=self.port,
+            authkey=self.authkey)
+
+
+process_manager = BigQueryProcessManager(PATH_TO_CONFIG)
+
+
+def log(dataset_id, table_id, row_dict):
+    """Sends a row dict to be flushed to a table in BigQuery.
+
+  Arguments:
+    dataset_id: dataset in which table resides.
+    table_id: table to update with row.
+    row_dict: dictionary for field: value pairs to send to table.
+  """
+    global queue
+
+    try:
+        process_manager.load_config()
+    except BigqueryBufferError as e:
+        print(e.message)
+        subprocesses_started = True
+    else:
+        subprocesses_started = process_manager.start_subprocesses()
+
+    if not subprocesses_started:
+        raise BigqueryBufferError('Could not start subprocesses')
+    if queue:
+        try:
+            # noinspection PyUnresolvedReferences
+            queue.add_row(dataset_id, table_id, row_dict)
+        except EOFError:
+            raise BigqueryBufferError(
+                'Could not push data to storage queue (EOFError)')
+    else:
+        raise BigqueryBufferError('No data queue exists to push data to...')
diff --git a/acts/framework/acts/controllers/buds_lib/data_storage/bigquery/bigquery_logger.py b/acts/framework/acts/controllers/buds_lib/data_storage/bigquery/bigquery_logger.py
new file mode 100644
index 0000000..74f5e7d
--- /dev/null
+++ b/acts/framework/acts/controllers/buds_lib/data_storage/bigquery/bigquery_logger.py
@@ -0,0 +1,57 @@
+#!/usr/bin/env python3
+#
+# Copyright (C) 2018 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may not
+# use this file except in compliance with the License. You may obtain a copy of
+# the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations under
+# the License.
+"""Client object for testing infrastructure to store information in BigQuery"""
+
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+
+from acts.controllers.buds_lib.data_storage.bigquery.bigquery_logger_utils import add_row, BigqueryLoggerClient
+
+PROJECT_ID = 'google.com:wearables-qa'
+CREDENTIALS_PATH = '/google/data/ro/teams/wearables/test/automation/bigquery/wearables-service-key.json'
+
+
+class BigqueryLogger:
+    """Bigquery Logger specialized for automated test logging."""
+
+    def __init__(self, dataset_id, table_id):
+        """Initialization method for BigqueryLogger class."""
+        # An array of InsertEntry objects to insert into the BigQuery table
+        self.rows = []
+        self.dataset_id = dataset_id
+        self.table_id = table_id
+        self.utils = BigqueryLoggerClient(
+            project_id=PROJECT_ID,
+            google_application_credentials_path=CREDENTIALS_PATH)
+
+    def __enter__(self):
+        return self
+
+    def __exit__(self, exc_type, exc_value, traceback):
+        self.utils.flush(self.rows, self.dataset_id, self.table_id)
+
+    def clear(self):
+        """Clear data structures"""
+        self.rows = []
+
+    def get_rows(self):
+        """Getter method for self.rows()."""
+        return self.rows
+
+    def add_row(self, row_dict):
+        print('Adding row...')
+        return add_row(row_dict, self.rows)
diff --git a/acts/framework/acts/controllers/buds_lib/data_storage/bigquery/bigquery_logger_queue.py b/acts/framework/acts/controllers/buds_lib/data_storage/bigquery/bigquery_logger_queue.py
new file mode 100644
index 0000000..7ebf90f
--- /dev/null
+++ b/acts/framework/acts/controllers/buds_lib/data_storage/bigquery/bigquery_logger_queue.py
@@ -0,0 +1,75 @@
+#!/usr/bin/env python3
+#
+# Copyright (C) 2018 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may not
+# use this file except in compliance with the License. You may obtain a copy of
+# the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations under
+# the License.
+"""Queue wrapper object to be shared across all tests using the bigquery_buffer module."""
+
+from multiprocessing import Queue
+
+DEFAULT_SIZE = 30000
+
+
+class BigqueryLoggerQueue:
+    """Organizes and stores all BigQuery table row updates sent to it."""
+
+    def __init__(self, size=DEFAULT_SIZE):
+        self.queue = Queue(maxsize=size)
+        self.flush_period = 1
+
+    def add_row(self, dataset_id, table_id, row):
+        """Store row to be added with all other rows to be added to passed table.
+
+    Arguments:
+      dataset_id: the dataset in which table_id resides.
+      table_id: the id of the table to update.
+      row: a dictionary of field: value pairs representing the row to add.
+    """
+
+        self.queue.put(((dataset_id, table_id), row))
+
+    def get_insert_iterator(self):
+        """Organize queue into iterator of ((dataset_id, table_id), rows_list) tuples.
+    Takes state of queue upon invocation, ignoring items put in queue after.
+
+    Returns:
+      insert_iterator: an iterator of pairs dataset/table ids and the lists
+      of rows to insert into those tables.
+    """
+
+        insert_dict = {}
+        num_entries_to_insert = self.queue.qsize()
+
+        for i in xrange(num_entries_to_insert):
+            if not self.queue.empty():
+                dataset_table_tuple, row_dict = self.queue.get()
+                if dataset_table_tuple not in insert_dict.keys():
+                    insert_dict[dataset_table_tuple] = []
+                insert_dict[dataset_table_tuple].append(row_dict)
+
+        return insert_dict.items()
+
+    def put(self, row_tuple):
+        self.queue.put(row_tuple)
+
+    def get(self):
+        return self.queue.get()
+
+    def empty(self):
+        return self.queue.empty()
+
+    def get_flush_period(self):
+        return self.flush_period
+
+    def set_flush_period(self, period):
+        self.flush_period = int(period)
diff --git a/acts/framework/acts/controllers/buds_lib/data_storage/bigquery/bigquery_logger_server.py b/acts/framework/acts/controllers/buds_lib/data_storage/bigquery/bigquery_logger_server.py
new file mode 100644
index 0000000..39a2307
--- /dev/null
+++ b/acts/framework/acts/controllers/buds_lib/data_storage/bigquery/bigquery_logger_server.py
@@ -0,0 +1,40 @@
+#!/usr/bin/env python3
+#
+# Copyright (C) 2018 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may not
+# use this file except in compliance with the License. You may obtain a copy of
+# the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations under
+# the License.
+"""Script to start running server that manages a shared BigqueryLoggerQueue."""
+
+import sys
+from multiprocessing.managers import BaseManager
+
+from acts.controllers.buds_lib.data_storage.bigquery.bigquery_logger_queue import BigqueryLoggerQueue
+
+
+def start_queue_server(queue_size, ip_address, port, authkey):
+    queue = BigqueryLoggerQueue(size=int(queue_size))
+    BaseManager.register('get_queue', callable=lambda: queue)
+    m = BaseManager(address=(ip_address, int(port)), authkey=authkey)
+    s = m.get_server()
+
+    print('starting server...')
+    s.serve_forever()
+
+
+def main():
+    queue_size, ip_address, port, authkey = sys.argv[1:]
+    start_queue_server(queue_size, ip_address, port, authkey)
+
+
+if __name__ == '__main__':
+    main()
diff --git a/acts/framework/acts/controllers/buds_lib/data_storage/bigquery/bigquery_logger_utils.py b/acts/framework/acts/controllers/buds_lib/data_storage/bigquery/bigquery_logger_utils.py
new file mode 100644
index 0000000..e6e7277
--- /dev/null
+++ b/acts/framework/acts/controllers/buds_lib/data_storage/bigquery/bigquery_logger_utils.py
@@ -0,0 +1,704 @@
+#!/usr/bin/env python3
+#
+# Copyright (C) 2018 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may not
+# use this file except in compliance with the License. You may obtain a copy of
+# the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations under
+# the License.
+
+import logging
+from datetime import datetime
+import inspect
+import os
+import socket
+import string
+import subprocess
+import time
+import yaml
+from multiprocessing.managers import BaseManager
+
+from google.api_core.exceptions import NotFound
+from google.cloud import bigquery
+
+_TIMESTAMP_STR_FORMAT = '%Y-%m-%d %H:%M:%S'
+_AUTOMATIC_LOGGER_SCRIPT = 'bigquery_scheduled_automatic_client.py'
+_SERVER_SCRIPT = 'bigquery_logger_server.py'
+
+
+def load_config(config_file_path):
+    with open(config_file_path, 'r') as f:
+        config = yaml.load(f)
+    return config
+
+
+class BigQueryLoggerUtilsError(Exception):
+    """Exception class for bigquery logger utils module"""
+
+
+#################################
+# Data transformation and preparation methods
+#################################
+
+
+def make_storeable(value):
+    """Casts non primitive data types to string.
+
+  Certain data types such as list can cause unexpected behavior with BigQuery.
+
+  Arguments:
+    value: an object to store in a BigQuery table.
+  Returns:
+    value or str(value): string version of passed value, if necessary.
+  """
+    if (isinstance(value, int) or isinstance(value, float)
+            or isinstance(value, str) or isinstance(value, bool)):
+        return value
+    elif isinstance(value, datetime):
+        return value.strftime(_TIMESTAMP_STR_FORMAT)
+    return str(value)
+
+
+def get_field_name(dirty_string):
+    """Converts field name to a BigQuery acceptable field name.
+
+  Arguments:
+    dirty_string: the string to convert to a standardized field name.
+  Returns:
+    field_name: the field name as a string.
+  """
+    valid_chars = '_ %s%s' % (string.ascii_letters, string.digits)
+    field_name = ''.join(c for c in dirty_string.upper() if c in valid_chars)
+    field_name = field_name.strip().replace(' ', '_')
+    if not field_name:
+        field_name = 'FIELD'
+    elif field_name[0] not in string.ascii_letters + '_':
+        field_name = 'FIELD_' + field_name
+    return field_name
+
+
+def get_bigquery_type(value):
+    """Returns BigQuery recognizable datatype string from value.
+
+  Arguments:
+    value: the item you want to store in BigQuery
+  Returns:
+    field_type: the BigQuery data type for the field to store your value.
+  """
+    # Dict for converting Python types to BigQuery recognizable schema fields
+    field_name = {
+        'STR': 'STRING',
+        'INT': 'INTEGER',
+        'FLOAT': 'FLOAT',
+        'BOOL': 'BOOLEAN'
+    }
+
+    # Default field type is STRING
+    field_type = 'STRING'
+    if isinstance(value, str):
+        try:
+            # Try to infer whether datatype is a timestamp by converting it to
+            # a timestamp object using the string format
+            time.strptime(value, _TIMESTAMP_STR_FORMAT)
+            field_type = 'TIMESTAMP'
+        except ValueError:
+            pass
+    else:
+        type_string = type(value).__name__
+        try:
+            field_type = field_name[type_string.upper()]
+        except KeyError:
+            logging.error('Datatype %s not recognized. Reverting to STRING.',
+                          type_string)
+    return field_type
+
+
+def add_row(dictionary, row_list_to_update):
+    # Convert dictionary key names to BigQuery field names
+    to_add = {
+        get_field_name(key): make_storeable(value)
+        for key, value in dictionary.items()
+    }
+
+    row_list_to_update.append(to_add)
+
+
+def change_field_name(old_name, new_name, row_list_to_update):
+    """Changes field name in row_list_to_update in place.
+
+  Arguments:
+    old_name: the old field name, to be replaced.
+    new_name: the new name to replace the old one.
+    row_list_to_update: the list of row dictionaries to update the field name for
+  Returns:
+    num_replacements: how many rows were affected by this change.
+  """
+    old_name = get_field_name(old_name)
+    new_name = get_field_name(new_name)
+    num_replacements = 0
+    for row in row_list_to_update:
+        if old_name in row.keys():
+            # Update all items in the rows with the new field name
+            row[new_name] = row[old_name]
+            del row[old_name]
+            num_replacements += 1
+    return num_replacements
+
+
+def get_tuple_from_schema(schema):
+    """Returns a tuple of all field names in the passed schema"""
+    return tuple(field.name for field in schema)
+
+
+def get_dict_from_schema(schema):
+    """Turns a BigQuery schema array into a more flexible dictionary.
+
+  Arguments:
+    schema: the schema array to be converted.
+  Returns:
+    dictionary: a dictionary from the schema. Maps field names to field types.
+  """
+    dictionary = {
+        schema_field.name: schema_field.field_type
+        for schema_field in schema
+    }
+    return dictionary
+
+
+def reconcile_schema_differences(schema_to_change_dict,
+                                 schema_to_preserve_dict):
+    """Returns a schema dict combining two schema dicts.
+
+  If there are conflicts between the schemas, for example if they share a
+  field name but those field names don't share the same type value, that field
+  name in one of the schema dicts will have to change to be added to the
+  combined schema.
+  Arguments:
+    schema_to_change_dict: a dict representing the schema that will be changed
+    if a conflict arises.
+    schema_to_preserve_dict: a dict representing the schema whose fields will
+    remain unchanged.
+  Returns:
+    new_schema_dict: a dict representing the combined schemas
+    changed_fields: a dict mapping old field names to their new field names,
+    if they were changed, in schema_to_change_dict.
+  """
+    new_schema_dict = schema_to_preserve_dict.copy()
+    changed_fields = {}
+    for field_name, field_type in schema_to_change_dict.items():
+        if field_name in schema_to_preserve_dict.keys():
+
+            # Field name already exists in remote table, but it might not accept the
+            # same value type the user is passing this time around
+            if schema_to_preserve_dict[field_name] == field_type:
+
+                # Same data type for fields, no need to do anything
+                continue
+            else:
+
+                # We need to create a new field with a unique name to store this
+                # different data type. Automatically makes new name:
+                # FIELD_NAME_FIELD_TYPE, ex. 'RESULT_BOOLEAN'
+                new_field_name = '%s_%s' % (field_name, field_type)
+
+                # On the off chance that this new field name is also already taken, we
+                # start appending numbers to it to make it unique. This should be an
+                # extreme edge case, hence the inelegance.
+                count = 1
+                merged_schemas = schema_to_preserve_dict.copy()
+                merged_schemas.update(schema_to_change_dict)
+                if new_field_name in merged_schemas.keys(
+                ) and merged_schemas[new_field_name] != field_type:
+                    new_field_name += str(count)
+                while new_field_name in merged_schemas.keys(
+                ) and merged_schemas[new_field_name] != field_type:
+                    count += 1
+                    new_field_name = new_field_name[:-1] + str(count)
+
+                # Update the actual rows in our logger as well as self.schema_dict to
+                # reflect the new field name.
+                changed_fields[field_name] = new_field_name
+
+                new_schema_dict[new_field_name] = field_type
+
+        else:
+            new_schema_dict[field_name] = field_type
+
+    return new_schema_dict, changed_fields
+
+
+#################################
+# BigQuery request data preparation methods
+#################################
+
+
+def get_schema_from_dict(dictionary):
+    """Turns dictionary into a schema formatted for BigQuery requests.
+
+  Arguments:
+    dictionary: the dictionary to convert into a schema array.
+  Returns:
+    schema: an array of SchemaField objects specifying name and type, listed alphabetically.
+  """
+    schema = []
+    for key in sorted(dictionary):
+        schema.append(
+            bigquery.SchemaField(key, dictionary[key], mode='nullable'))
+    return schema
+
+
+def get_schema_from_rows_list(rows_list):
+    """Deduces the BigQuery table schema represented by a list of row dictionaries.
+
+  Arguments:
+    rows_list: the list of row dictionaries to create a schema from.
+  Returns:
+    schema: a formatted BigQuery table schema with the fields in alphabetical order."""
+    schema = {}
+    for row in rows_list:
+        # Create new field names and corresponding types in self.schema_dict in case
+        # the schema of the remote table needs to be updated.
+        for key, value in row.items():
+            value_type = get_bigquery_type(value)
+            if key in schema.keys():
+                # We have another row with the same field name. Most of the time their
+                # types should match and we can just skip adding it to the fields to
+                # update
+
+                if value_type != schema[key]:
+                    # Their types don't match. Merge the fields and change the type to
+                    # string
+                    schema[key] = 'STRING'
+
+                    row[key] = str(row[key])
+            else:
+                schema[key] = value_type
+
+    return get_schema_from_dict(schema)
+
+
+def get_formatted_rows(rows_list, schema):
+    """Returns an InsertEntry object for adding to BQ insert request.
+
+  Arguments:
+    rows_list: a list of row dictionaries to turn into tuples of values corresponding to the schema fields.
+    schema: a tuple representing the column names in the table.
+  Returns:
+    rows: an array of tuples with the elements ordered corresponding to the order of the column names in schema.
+  """
+    rows = []
+    schema_tuple = get_tuple_from_schema(schema)
+    for row in rows_list:
+        row_tuple = tuple(
+            row[key] if key in row.keys() else None for key in schema_tuple)
+        rows.append(row_tuple)
+    return rows
+
+
+#################################
+#  BigQuery client class
+#################################
+
+
+class BigqueryLoggerClient:
+    """Client class for interacting with and preparing data for BigQuery"""
+
+    def __init__(self, project_id, google_application_credentials_path):
+        os.environ[
+            'GOOGLE_APPLICATION_CREDENTIALS'] = google_application_credentials_path
+        self.client = bigquery.Client(project_id)
+
+    #################################
+    # BigQuery request methods
+    #################################
+
+    def create_dataset(self, dataset_id):
+        """Creates a new dataset if it doesn't exist.
+
+    Arguments:
+      dataset_id: the name of the dataset you want to create.
+    Returns:
+      dataset: the resulting dataset object.
+    """
+        dataset_ref = self.client.dataset(dataset_id)
+        dataset = bigquery.Dataset(dataset_ref)
+        try:
+            dataset = self.client.get_dataset(dataset_ref)
+        except Exception as err:
+            self.client.create_dataset(dataset)
+        return dataset
+
+    def create_table(self, dataset_id, table_id, schema):
+        """Creates a new table if it doesn't exist.
+
+    Arguments:
+      dataset_id: the name of the dataset that will contain the table you want
+      to create.
+      table_id: the name of the table you want to create.
+      schema: a schema array for the table to be created.
+    Returns:
+      table: the resulting table object
+    """
+        dataset = self.create_dataset(dataset_id)
+        table_ref = dataset.table(table_id)
+        table = bigquery.Table(table_ref, schema=schema)
+        try:
+            table = self.client.get_table(table_ref)
+        except NotFound:
+            self.client.create_table(table)
+        return table
+
+    def update_table_schema(self, dataset_id, table_id, new_schema):
+        """Updates the schema for the given remote table.
+
+    Uses fields specified in self.schema_dict. This method will never remove
+    fields, to avoid loss of data.
+
+    Arguments:
+      dataset_id: the dataset containing the table to modify.
+      table_id: the table to modify.
+      new_schema: a new schema to update the remote table's schema with.
+    Returns:
+      table: the updated table object.
+      changed_fields: a dictionary mapping any changed field names to their new name strings.
+    """
+        table = self.create_table(dataset_id, table_id, new_schema)
+        remote_schema = table.schema
+        remote_schema_dict = get_dict_from_schema(remote_schema)
+        new_schema_dict = get_dict_from_schema(new_schema)
+
+        updated_schema_dict, changed_fields = reconcile_schema_differences(
+            new_schema_dict, remote_schema_dict)
+
+        if updated_schema_dict.items() != remote_schema_dict.items():
+            table.schema = get_schema_from_dict(updated_schema_dict)
+            table = self.client.update_table(
+                table=table, properties=['schema'])
+
+        return table, changed_fields
+
+    def delete(self, dataset_id, table_id=None):
+        """Deletes specified table in specified dataset.
+
+    Arguments:
+      dataset_id: the name of the dataset to be deleted or the dataset that
+      contains the table to be deleted.
+      table_id: the name of the table to be deleted.
+    """
+        dataset_ref = self.client.dataset(dataset_id)
+        dataset = bigquery.Dataset(dataset_ref)
+        try:
+            if table_id:
+                table_ref = dataset.table(table_id)
+                table = bigquery.Table(table_ref)
+                self.client.delete_table(table)
+            else:
+                self.client.delete_dataset(dataset)
+        except NotFound:
+            pass
+
+    def flush(self, rows_list, dataset_id, table_id, retries=5):
+        """Inserts key value store of data into the specified table.
+
+    Arguments:
+      rows_list: a list of row dictionaries to send to BigQuery
+      dataset_id: dataset name to store table in.
+      table_id: table name to store info in.
+      retries: how many times to retry insert upon failure
+    Returns:
+      erros: any errors resulting from the insert operation.
+    Raises:
+      DataNotStoredError: if data is not stored because of insertErrors in
+      query response or timeout.
+    """
+        correctly_formatted_rows_list = []
+
+        for row in rows_list:
+            add_row(row, correctly_formatted_rows_list)
+
+        local_schema = get_schema_from_rows_list(correctly_formatted_rows_list)
+        table, changed_fields = self.update_table_schema(
+            dataset_id, table_id, local_schema)
+
+        if changed_fields:
+            print('Changed Fields: ' + str(changed_fields))
+            for old_name, new_name in changed_fields.items():
+                change_field_name(old_name, new_name,
+                                  correctly_formatted_rows_list)
+
+        schema = table.schema
+
+        values = get_formatted_rows(correctly_formatted_rows_list, schema)
+        errors = self.client.create_rows(table, values)
+        if errors:
+            for retry in range(retries):
+                print('Retry ' + str(retry + 1))
+                time.sleep(30)
+                errors = self.client.create_rows(table, values)
+                if not errors:
+                    break
+
+        if errors:
+            print(errors)
+        return errors
+
+
+####################
+# Subprocess and helper methods to help with automated logger
+####################
+
+
+def start_queue_server(queue_size, ip_address, port, authkey):
+    """Starts a subprocess bigquery_logger_server.py.
+  Subprocess creates a server to handle the shared job queue.
+
+  Arguments:
+    queue_size: maximum number of items this queue can hold
+    ip_address: ip address of the machine on which to start queue management server
+    port: port on which to reach queue management server
+    authkey: password to be used by clients trying to access server
+  Returns:
+    process: the result of Popen on the subprocess.
+  """
+
+    # If ip_address is empty string (signifying local machine) we need to have '' in the command so it is counted
+    # as an actual argument to bigquery_logger_server
+    ip_address = ip_address or '\'\''
+    command = ' '.join([
+        _SERVER_SCRIPT,
+        str(queue_size),
+        str(ip_address),
+        str(port),
+        str(authkey)
+    ])
+    # Create error log file for user to check
+    error_log_name = os.path.join(
+        os.path.dirname(__file__), 'queue_server_err.log')
+    error_log = open(error_log_name, 'w+')
+    process = subprocess.Popen(
+        command,
+        shell=True,
+        stderr=error_log,
+        stdin=subprocess.PIPE,
+        stdout=subprocess.PIPE)
+    return process
+
+
+def start_scheduled_automatic_logger(ip_address, port, authkey, project_id,
+                                     credentials_path):
+    """Starts a subprocess bigquery_scheduled_automatic_logger.
+  Subprocess accesses the queue managed by the server at ip_address:port
+  and periodically sends items in queue to the BigQuery project identified by project_id.
+
+  Arguments:
+    ip_address: ip_address of the machine on which the server managing the shared queue to pull from is located
+    port: port on which the server managing the shared queue to pull from can be reached
+    authkey: password needed to access server
+    project_id: name of BigQuery project to send data to
+    credentials_path: path to directory where Google Service Account credentials for this BigQuery
+      project are stored
+  Returns:
+    process: the result of Popen on the subprocess.
+  """
+
+    # If ip_address is empty string (signifying local machine) we need to have '' in the command so it is counted
+    # as an actual argument to bigquery_scheduled_automatic_logger
+    ip_address = ip_address or '\'\''
+    print('starting scheduled automatic logger...')
+    command = ' '.join([
+        _AUTOMATIC_LOGGER_SCRIPT,
+        str(ip_address),
+        str(port),
+        str(authkey),
+        str(project_id),
+        str(credentials_path)
+    ])
+    # Create error log file for user to check
+    error_log_name = os.path.join(
+        os.path.dirname(__file__), 'scheduled_automatic_logger_err.log')
+    error_log = open(error_log_name, 'w+')
+    process = subprocess.Popen(
+        command,
+        shell=True,
+        stderr=error_log,
+        stdin=subprocess.PIPE,
+        stdout=subprocess.PIPE)
+    return process
+
+
+def get_queue(ip_address, port, authkey):
+    """Returns a proxy object for shared queue.
+  Shared queue is created and managed in start_server().
+
+  Arguments:
+    ip_address: ip_address of the machine on which the server managing the shared queue to proxy is located
+    port: port on which the server managing the shared queue to proxy can be reached
+    authkey: password needed to access server
+  Returns:
+    queue: the BigqueryLoggerQueue object that organizers and holds all BigQuery
+      inserts sent to server."""
+    BaseManager.register('get_queue')
+    m = BaseManager(address=(ip_address, int(port)), authkey=authkey)
+    try:
+        m.connect()
+        return m.get_queue()
+    except socket.error:
+        raise BigQueryLoggerUtilsError('Cannot connect to data storage queue.')
+
+
+def get_current_scheduled_automatic_logger():
+    """Returns process id and args of running scheduled automatic logger"""
+
+    processes = get_processes(_AUTOMATIC_LOGGER_SCRIPT)
+
+    pid = 0
+    args = {}
+    if processes:
+        process = processes[0]
+        pid = process[0]
+        process_argspec = inspect.getargspec(start_scheduled_automatic_logger)
+        process_arg_names = process_argspec.args
+        process_argv = process[-1 * len(process_arg_names):]
+        args = dict(zip(process_arg_names, process_argv))
+
+    return pid, args
+
+
+def get_current_logger_server():
+    """Returns process id and args of running logger servers"""
+
+    processes = get_processes(_SERVER_SCRIPT)
+
+    pid = 0
+    args = {}
+    if processes:
+        process = processes[0]
+        pid = process[0]
+        process_argspec = inspect.getargspec(start_queue_server)
+        process_arg_names = process_argspec.args
+        process_argv = process[-1 * len(process_arg_names):]
+        args = dict(zip(process_arg_names, process_argv))
+
+    return pid, args
+
+
+def get_current_queue_and_server_pid():
+    """Kills the current running queue server process.
+
+  Returns:
+    queue: the queue that the server used to serve.
+  """
+
+    pid, args = get_current_logger_server()
+    get_queue_args = inspect.getargspec(get_queue).args
+    if pid:
+        try:
+            kwargs = {arg_name: args[arg_name] for arg_name in get_queue_args}
+        except KeyError:
+            raise BigQueryLoggerUtilsError(
+                'Param names in get_queue %s must be subset of param names for start_queue_server %s'
+                % (get_queue_args, args.keys()))
+        else:
+            # Retrieve reference to current
+            queue = get_queue(**kwargs)
+            return pid, queue
+
+
+def kill_current_scheduled_automatic_logger():
+    pid, _ = get_current_scheduled_automatic_logger()
+    if pid:
+        kill_pid(pid)
+
+
+def get_scheduled_automatic_logger_pid(ip_address, port, authkey, project_id,
+                                       credentials_path):
+    """Returns the process id of a bigquery_scheduled_automatic_logger instance for a given set of configs.
+
+  Arguments:
+    ip_address: ip_address of the machine on which the server managing the shared queue to pull from is located
+    port: port on which the server managing the shared queue to pull from can be reached
+    authkey: password needed to access server
+    project_id: name of BigQuery project to send data to
+    credentials_path: path to directory where Google Service Account credentials for this BigQuery
+      project are stored
+  Returns:
+    pid: process id of process if found. Else 0
+  """
+
+    pids = get_pids(_AUTOMATIC_LOGGER_SCRIPT, ip_address, port, authkey,
+                    project_id, os.path.expanduser(credentials_path))
+
+    pid = 0
+    if pids:
+        pid = pids[0]
+    return pid
+
+
+def get_logger_server_pid(queue_size, ip_address, port, authkey):
+    """Returns the process id of a bigquery_logger_service instance for a given set of configs.
+
+  Arguments:
+    queue_size: the size of the shared data queue
+    ip_address: ip_address of the machine on which the server managing the shared queue to pull from is located
+    port: port on which the server managing the shared queue to pull from can be reached
+    authkey: password needed to access server
+  Returns:
+    pid: process id of process if found. Else 0
+  """
+
+    pids = get_pids(_SERVER_SCRIPT, queue_size, ip_address, port, authkey)
+    pid = 0
+    if pids:
+        pid = pids[0]
+    return pid
+
+
+def get_pids(*argv):
+    """Gets process ids based on arguments to concatenate and grep
+
+  Arguments:
+    *argv: any number of arguments to be joined and grepped
+  Returns:
+    pids: process ids of process if found.
+  """
+    processes = get_processes(*argv)
+    pids = [process[0] for process in processes]
+
+    return pids
+
+
+def get_processes(*argv):
+    """Returns process grepped by a set of arguments.
+
+  Arguments:
+    *argv: any number of arguments to be joined and grepped
+  Returns:
+    processes: processes returned by grep, as a list of lists.
+  """
+    expression = ' '.join([str(arg) for arg in argv])
+    processes = []
+    try:
+        results = subprocess.check_output(
+            'pgrep -af \"%s\"' % expression, shell=True)
+        for result in results.split('\n'):
+            items = result.split(' ')
+            if 'pgrep' not in items:
+                processes.append(items)
+    except subprocess.CalledProcessError:
+        pass
+
+    return processes
+
+
+def kill_pid(pid):
+    """To only be used on _SERVER_SCRIPT or _AUTOMATIC_LOGGER_SCRIPT"""
+
+    result = subprocess.check_output('kill -9 %s' % str(pid), shell=True)
+    return result
diff --git a/acts/framework/acts/controllers/buds_lib/data_storage/bigquery/bigquery_scheduled_automatic_client.py b/acts/framework/acts/controllers/buds_lib/data_storage/bigquery/bigquery_scheduled_automatic_client.py
new file mode 100644
index 0000000..a1ec395
--- /dev/null
+++ b/acts/framework/acts/controllers/buds_lib/data_storage/bigquery/bigquery_scheduled_automatic_client.py
@@ -0,0 +1,49 @@
+#!/usr/bin/env python3
+#
+# Copyright (C) 2018 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may not
+# use this file except in compliance with the License. You may obtain a copy of
+# the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations under
+# the License.
+"""Script that runs perpetually, flushing contents of shared BigqueryLoggerQueue
+to BigQuery on a specified schedule."""
+
+import sys
+import time
+
+import acts.controllers.buds_lib.data_storage.bigquery.bigquery_logger_utils as utils
+
+
+def start_scheduled_automatic_logging(queue, project_id, credentials_path):
+    """Runs infinite while loop that flushes contents of queue to BigQuery
+  on schedule determined by flush_period."""
+
+    client = utils.BigqueryLoggerClient(project_id, credentials_path)
+
+    while True:
+        # TODO: check if connected to internet before attempting to push to BQ
+        insert_iterator = queue.get_insert_iterator()
+        for dataset_table_tuple, rows_list in insert_iterator:
+            dataset_id, table_id = dataset_table_tuple
+            client.flush(rows_list, dataset_id, table_id)
+
+        time.sleep(queue.get_flush_period())
+
+
+def main():
+    """Pass shared BigqueryLoggerQueue to automatic logging method."""
+    ip_address, port, authkey, project_id, credentials_path = sys.argv[1:]
+    queue = utils.get_queue(ip_address, port, authkey)
+    start_scheduled_automatic_logging(queue, project_id, credentials_path)
+
+
+if __name__ == '__main__':
+    main()
diff --git a/acts/framework/acts/controllers/buds_lib/data_storage/bigquery/config.yml b/acts/framework/acts/controllers/buds_lib/data_storage/bigquery/config.yml
new file mode 100644
index 0000000..12fe4a4
--- /dev/null
+++ b/acts/framework/acts/controllers/buds_lib/data_storage/bigquery/config.yml
@@ -0,0 +1,7 @@
+project_id: 'google.com:wearables-qa'
+credentials_path: '/google/data/ro/teams/wearables/test/automation/bigquery/wearables-service-key.json'
+queue_size: 30000
+ip_address: ''
+port: 60009
+authkey: 'wearables'
+flush_period: 5
\ No newline at end of file
diff --git a/acts/framework/acts/controllers/buds_lib/data_storage/bigquery/test_bigquery_logger.py b/acts/framework/acts/controllers/buds_lib/data_storage/bigquery/test_bigquery_logger.py
new file mode 100644
index 0000000..e19b6cc
--- /dev/null
+++ b/acts/framework/acts/controllers/buds_lib/data_storage/bigquery/test_bigquery_logger.py
@@ -0,0 +1,27 @@
+#!/usr/bin/env python3
+#
+# Copyright (C) 2018 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may not
+# use this file except in compliance with the License. You may obtain a copy of
+# the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations under
+# the License.
+"""Tests for bigquery_logger."""
+
+import acts.controllers.buds_lib.data_storage.bigquery.bigquery_logger
+
+logger = bigquery_logger.BigqueryLogger(dataset_id='test', table_id='test')
+
+
+def test_with_block():
+    with bigquery_logger.BigqueryLogger('with_block_test',
+                                        'test_table') as log:
+        log.add_row({'NEW': 'nice', 'FIELD6': 3.0, 'noodle': 3})
+        log.add_row({'FIELD2': 12, 'FIELD3': True, 'SUPERNEW': 'stroong'})
diff --git a/acts/framework/acts/controllers/buds_lib/data_storage/bigquery/test_bigquery_utils.py b/acts/framework/acts/controllers/buds_lib/data_storage/bigquery/test_bigquery_utils.py
new file mode 100644
index 0000000..f6855a4
--- /dev/null
+++ b/acts/framework/acts/controllers/buds_lib/data_storage/bigquery/test_bigquery_utils.py
@@ -0,0 +1,510 @@
+#!/usr/bin/env python3
+#
+# Copyright (C) 2018 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may not
+# use this file except in compliance with the License. You may obtain a copy of
+# the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations under
+# the License.
+
+from google.api_core.exceptions import NotFound
+from google.cloud import bigquery
+from mock import patch, Mock
+
+import acts.controllers.buds_lib.data_storage.bigquery.bigquery_logger_utils as utils
+
+_TIMESTAMP_STR_FORMAT = '%Y-%m-%d %H:%M:%S'
+
+
+def test_make_storable():
+    to_make_storable = ['one', 1, 1.0, True, [1]]
+    targets = ['one', 1, 1.0, True, str([1])]
+    assert [utils.make_storeable(item) for item in to_make_storable] == targets
+
+
+def test_get_field_name():
+    bad_names = [
+        'all_lowercase', 'b@d<h4r^c7=r$', '5tarts_with_digit', '_underscore',
+        '', 'hyphen-name'
+    ]
+    targets = [
+        'ALL_LOWERCASE', 'BDH4RC7R', 'FIELD_5TARTS_WITH_DIGIT', '_UNDERSCORE',
+        'FIELD', 'HYPHENNAME'
+    ]
+    assert [utils.get_field_name(item) for item in bad_names] == targets
+
+
+def test_get_bigquery_type():
+    items = ['one', '2017-11-03 12:30:00', 1, 1.0, True, utils]
+    targets = ['STRING', 'TIMESTAMP', 'INTEGER', 'FLOAT', 'BOOLEAN', 'STRING']
+    assert [utils.get_bigquery_type(item) for item in items] == targets
+
+
+def test_add_row():
+    row_list = []
+    utils.add_row({'int': 500, 'list': [1, 2, 3], 'float': 5.0}, row_list)
+    assert set(row_list[0].items()) == set({
+        'INT': 500,
+        'LIST': '[1, 2, 3]',
+        'FLOAT': 5.0
+    }.items())
+    utils.add_row({'int': 12, 'time': '2011-12-13 10:00:00'}, row_list)
+    assert set(row_list[1].items()) == set({
+        'INT': 12,
+        'TIME': '2011-12-13 10:00:00'
+    }.items())
+    utils.add_row({'1string': '1'}, row_list)
+    assert set(row_list[2].items()) == set({'FIELD_1STRING': '1'}.items())
+
+
+def test_change_field_name():
+    row_list = [{
+        'FIELD1': None,
+        'FIELD2': 300,
+        'FIELD3': True
+    }, {
+        'FIELD1': 'a string',
+        'FIELD2': 300,
+        'FIELD4': False
+    }, {
+        'FIELD1': 'another string',
+        'FIELD3': True,
+        'FIELD4': False
+    }]
+    num_replacements = utils.change_field_name('field1', 'new_name', row_list)
+    assert num_replacements == 3
+    assert set(row_list[0].items()) == set({
+        'NEW_NAME': None,
+        'FIELD2': 300,
+        'FIELD3': True
+    }.items())
+    assert set(row_list[1].items()) == set({
+        'NEW_NAME': 'a string',
+        'FIELD2': 300,
+        'FIELD4': False
+    }.items())
+    assert set(row_list[2].items()) == set({
+        'NEW_NAME': 'another string',
+        'FIELD3': True,
+        'FIELD4': False
+    }.items())
+    num_replacements = utils.change_field_name('field2', 'new_name2', row_list)
+    assert num_replacements == 2
+    assert set(row_list[0].items()) == set({
+        'NEW_NAME': None,
+        'NEW_NAME2': 300,
+        'FIELD3': True
+    }.items())
+    assert set(row_list[1].items()) == set({
+        'NEW_NAME': 'a string',
+        'NEW_NAME2': 300,
+        'FIELD4': False
+    }.items())
+    assert set(row_list[2].items()) == set({
+        'NEW_NAME': 'another string',
+        'FIELD3': True,
+        'FIELD4': False
+    }.items())
+    num_replacements = utils.change_field_name('field5', 'new_name3', row_list)
+    assert num_replacements == 0
+    assert set(row_list[0].items()) == set({
+        'NEW_NAME': None,
+        'NEW_NAME2': 300,
+        'FIELD3': True
+    }.items())
+    assert set(row_list[1].items()) == set({
+        'NEW_NAME': 'a string',
+        'NEW_NAME2': 300,
+        'FIELD4': False
+    }.items())
+    assert set(row_list[2].items()) == set({
+        'NEW_NAME': 'another string',
+        'FIELD3': True,
+        'FIELD4': False
+    }.items())
+
+
+def test_get_schema_from_dict():
+    dict = {'FIELD': 'STRING', 'IELD': 'BOOLEAN', 'ELD': 'TIMESTAMP'}
+    target = [
+        bigquery.SchemaField('ELD', 'TIMESTAMP', mode='nullable'),
+        bigquery.SchemaField('FIELD', 'STRING', mode='nullable'),
+        bigquery.SchemaField('IELD', 'BOOLEAN', mode='nullable')
+    ]
+    assert utils.get_schema_from_dict(dict) == target
+
+
+def test_get_dict_from_schema():
+    schema = [
+        bigquery.SchemaField('a_float'.upper(), 'FLOAT'),
+        bigquery.SchemaField('an_int'.upper(), 'INTEGER'),
+        bigquery.SchemaField('a_string'.upper(), 'STRING'),
+        bigquery.SchemaField('a_timestamp'.upper(), 'TIMESTAMP'),
+        bigquery.SchemaField('a_boolean'.upper(), 'BOOLEAN'),
+        bigquery.SchemaField('unknown'.upper(), 'STRING')
+    ]
+
+    dictionary = {
+        'a_float'.upper(): 'FLOAT',
+        'an_int'.upper(): 'INTEGER',
+        'a_string'.upper(): 'STRING',
+        'a_timestamp'.upper(): 'TIMESTAMP',
+        'a_boolean'.upper(): 'BOOLEAN',
+        'unknown'.upper(): 'STRING'
+    }
+
+    assert dictionary.items() == utils.get_dict_from_schema(schema).items()
+
+
+def test_reconcile_schema_differences():
+    schema_to_change = {
+        'FIELD1': 'TIMESTAMP',
+        'FIELD2': 'INTEGER',
+        'FIELD3': 'FLOAT',
+        'FIELD4': 'STRING',
+        'FIELD5': 'BOOLEAN',
+        'FIELD6': 'STRING'
+    }
+    schema_to_preserve = {
+        'FIELD1': 'TIMESTAMP',
+        'FIELD2': 'FLOAT',
+        'FIELD3_FLOAT': 'TIMESTAMP',
+        'FIELD3': 'BOOLEAN',
+        'FIELD5': 'TIMESTAMP',
+        'FIELD7': 'TIMESTAMP'
+    }
+    target_schema = {
+        'FIELD1': 'TIMESTAMP',
+        'FIELD2': 'FLOAT',
+        'FIELD2_INTEGER': 'INTEGER',
+        'FIELD3': 'BOOLEAN',
+        'FIELD3_FLOAT': 'TIMESTAMP',
+        'FIELD3_FLOAT1': 'FLOAT',
+        'FIELD4': 'STRING',
+        'FIELD5': 'TIMESTAMP',
+        'FIELD5_BOOLEAN': 'BOOLEAN',
+        'FIELD6': 'STRING',
+        'FIELD7': 'TIMESTAMP'
+    }
+    assert utils.reconcile_schema_differences(
+        schema_to_change,
+        schema_to_preserve)[0].items() == target_schema.items()
+
+
+def test_get_tuple_from_schema():
+    schema = [
+        bigquery.SchemaField('FIELD1', 'BOOLEAN', mode='nullable'),
+        bigquery.SchemaField('FIELD2', 'INTEGER', mode='nullable'),
+        bigquery.SchemaField('FIELD3', 'STRING', mode='nullable'),
+        bigquery.SchemaField('FIELD4', 'TIMESTAMP', mode='nullable'),
+        bigquery.SchemaField('FIELD5', 'FLOAT', mode='nullable')
+    ]
+    target = ('FIELD1', 'FIELD2', 'FIELD3', 'FIELD4', 'FIELD5')
+    assert utils.get_tuple_from_schema(schema) == target
+
+
+def test_get_schema_from_rows_list():
+    row_list = [{
+        'FIELD1': None,
+        'FIELD2': 300,
+        'FIELD3': True
+    }, {
+        'FIELD1': 'a string',
+        'FIELD2': 300.0,
+        'FIELD4': False
+    }, {
+        'FIELD1': 'another string',
+        'FIELD3': True,
+        'FIELD4': False
+    }]
+    schema = [
+        bigquery.SchemaField('FIELD1', 'STRING', mode='nullable'),
+        bigquery.SchemaField('FIELD2', 'STRING', mode='nullable'),
+        bigquery.SchemaField('FIELD3', 'BOOLEAN', mode='nullable'),
+        bigquery.SchemaField('FIELD4', 'BOOLEAN', mode='nullable')
+    ]
+    assert utils.get_schema_from_rows_list(row_list) == schema
+
+
+def test_get_formatted_rows():
+    row_list = [{
+        'FIELD1': None,
+        'FIELD2': 300,
+        'FIELD3': True
+    }, {
+        'FIELD1': 'a string',
+        'FIELD2': 300.0,
+        'FIELD4': False
+    }, {
+        'FIELD1': 'another string',
+        'FIELD3': True,
+        'FIELD4': False
+    }]
+    schema = (bigquery.SchemaField('FIELD5', 'TIMESTAMP', mode='nullable'),
+              bigquery.SchemaField('FIELD4', 'BOOLEAN', mode='nullable'),
+              bigquery.SchemaField('FIELD3.5', 'INTEGER', mode='nullable'),
+              bigquery.SchemaField('FIELD3', 'BOOLEAN', mode='nullable'),
+              bigquery.SchemaField('FIELD2', 'STRING', mode='nullable'),
+              bigquery.SchemaField('FIELD1', 'STRING', mode='nullable'))
+    target = [(None, None, None, True, 300, None), (None, False, None, None,
+                                                    300.0, 'a string'),
+              (None, False, None, True, None, 'another string')]
+    assert utils.get_formatted_rows(row_list, schema) == target
+
+
+class Client:
+    def get_dataset(self, name):
+        if name == 'existing_dataset':
+            return Dataset(name)
+        else:
+            raise NotFound('')
+
+    def create_dataset(self, dataset):
+        return dataset
+
+    def dataset(self, name):
+        return name
+
+    def delete_dataset(self, dataset):
+        return 'deleted dataset ' + dataset.name
+
+    def get_table(self, name):
+        if name == 'existing_table':
+            return Table(name, [])
+        else:
+            raise NotFound('')
+
+    def create_table(self, table):
+        return table
+
+    def update_table(self, table, properties):
+        return Table(table.name + '_changed', table.schema)
+
+    def delete_table(self, table):
+        return 'deleted table ' + table.name
+
+    def create_rows(self, table, rows):
+        if table.name == 'bad_table':
+            return ['errors']
+        return []
+
+
+class Dataset:
+    def __init__(self, name):
+        self.name = name
+
+    def __eq__(self, other):
+        return self.name == other.name
+
+    def table(self, name):
+        return name
+
+
+class Table:
+    def __init__(self, name, schema):
+        self.name = name
+        self.schema = schema
+
+    def __eq__(self, other):
+        return self.name == other.name and set(self.schema) == set(
+            other.schema)
+
+    def __str__(self):
+        return 'NAME: %s\nSCHEMA: %s' % (self.name, str(self.schema))
+
+
+@patch('infra.data_storage.bigquery.bigquery_logger_utils.bigquery.Dataset')
+@patch('infra.data_storage.bigquery.bigquery_logger_utils.bigquery.Client')
+def test_create_dataset_already_exists(mock_client, mock_dataset):
+    mock_client.return_value = Client()
+    client = utils.BigqueryLoggerClient('', '')
+    dataset = client.create_dataset('existing_dataset')
+    assert dataset == Dataset('existing_dataset')
+
+
+@patch('infra.data_storage.bigquery.bigquery_logger_utils.bigquery.Dataset')
+@patch('infra.data_storage.bigquery.bigquery_logger_utils.bigquery.Client')
+def test_create_dataset_does_not_exist(mock_client, mock_dataset):
+    mock_client.return_value = Client()
+    mock_dataset.return_value = Dataset('new_dataset')
+    client = utils.BigqueryLoggerClient('', '')
+    dataset = client.create_dataset('new_dataset')
+    assert dataset == Dataset('new_dataset')
+
+
+@patch('infra.data_storage.bigquery.bigquery_logger_utils.bigquery.Table')
+@patch(
+    'infra.data_storage.bigquery.bigquery_logger_utils.BigqueryLoggerClient.create_dataset'
+)
+@patch('infra.data_storage.bigquery.bigquery_logger_utils.bigquery.Client')
+def test_create_table_already_exists(mock_client, mock_dataset, mock_table):
+    mock_client.return_value = Client()
+    mock_dataset.return_value = Dataset('existing_dataset')
+    schema = {
+        bigquery.SchemaField('FIELD1', 'STRING', mode='nullable'),
+        bigquery.SchemaField('FIELD2', 'BOOLEAN', mode='nullable'),
+        bigquery.SchemaField('FIELD3', 'TIMESTAMP', mode='nullable')
+    }
+    mock_table.return_value = Table('existing_table', schema)
+    client = utils.BigqueryLoggerClient('', '')
+    table = client.create_table('existing_dataset', 'existing_table', schema)
+    assert table == Table('existing_table', [])
+
+
+@patch('infra.data_storage.bigquery.bigquery_logger_utils.bigquery.Table')
+@patch(
+    'infra.data_storage.bigquery.bigquery_logger_utils.BigqueryLoggerClient.create_dataset'
+)
+@patch('infra.data_storage.bigquery.bigquery_logger_utils.bigquery.Client')
+def test_create_table_does_not_exist(mock_client, mock_dataset, mock_table):
+    mock_client.return_value = Client()
+    mock_dataset.return_value = Dataset('existing_dataset')
+    schema = {
+        bigquery.SchemaField('FIELD1', 'STRING', mode='nullable'),
+        bigquery.SchemaField('FIELD2', 'BOOLEAN', mode='nullable'),
+        bigquery.SchemaField('FIELD3', 'TIMESTAMP', mode='nullable')
+    }
+    mock_table.return_value = Table('new_table', schema)
+    client = utils.BigqueryLoggerClient('', '')
+    table = client.create_table('existing_dataset', 'new_table', schema)
+    assert table == Table('new_table', schema)
+
+
+@patch(
+    'infra.data_storage.bigquery.bigquery_logger_utils.BigqueryLoggerClient.create_table'
+)
+@patch('infra.data_storage.bigquery.bigquery_logger_utils.bigquery.Client')
+def test_update_table_schema(mock_client, mock_table):
+    mock_client.return_value = Client()
+    schema = {
+        bigquery.SchemaField('FIELD1', 'STRING', mode='nullable'),
+        bigquery.SchemaField('FIELD2', 'BOOLEAN', mode='nullable'),
+        bigquery.SchemaField('FIELD3', 'TIMESTAMP', mode='nullable')
+    }
+    mock_table.return_value = Table('existing_table', schema)
+    new_schema = {
+        bigquery.SchemaField('FIELD1', 'INTEGER', mode='nullable'),
+        bigquery.SchemaField('FIELD2', 'BOOLEAN', mode='nullable'),
+        bigquery.SchemaField('FIELD5', 'FLOAT', mode='nullable')
+    }
+    client = utils.BigqueryLoggerClient('', '')
+    table, changed_fields = client.update_table_schema(
+        'existing_dataset', 'existing_table', new_schema)
+    print(table)
+    assert table == Table(
+        'existing_table_changed', {
+            bigquery.SchemaField('FIELD1_INTEGER', 'INTEGER', mode='nullable'),
+            bigquery.SchemaField('FIELD1', 'STRING', mode='nullable'),
+            bigquery.SchemaField('FIELD2', 'BOOLEAN', mode='nullable'),
+            bigquery.SchemaField('FIELD3', 'TIMESTAMP', mode='nullable'),
+            bigquery.SchemaField('FIELD5', 'FLOAT', mode='nullable')
+        })
+    assert set(changed_fields.items()) == set({
+        'FIELD1': 'FIELD1_INTEGER'
+    }.items())
+
+
+@patch(
+    'infra.data_storage.bigquery.bigquery_logger_utils.BigqueryLoggerClient.create_table'
+)
+@patch('infra.data_storage.bigquery.bigquery_logger_utils.bigquery.Client')
+def test_update_table_schema_no_change(mock_client, mock_table):
+    mock_client.return_value = Client()
+    schema = {
+        bigquery.SchemaField('FIELD1', 'STRING', mode='nullable'),
+        bigquery.SchemaField('FIELD2', 'BOOLEAN', mode='nullable'),
+        bigquery.SchemaField('FIELD3', 'TIMESTAMP', mode='nullable')
+    }
+    mock_table.return_value = Table('existing_table', schema)
+    new_schema = {
+        bigquery.SchemaField('FIELD1', 'STRING', mode='nullable'),
+        bigquery.SchemaField('FIELD2', 'BOOLEAN', mode='nullable')
+    }
+    client = utils.BigqueryLoggerClient('', '')
+    table, changed_fields = client.update_table_schema(
+        'existing_dataset', 'existing_table', new_schema)
+    print(table)
+    assert table == Table(
+        'existing_table', {
+            bigquery.SchemaField('FIELD1', 'STRING', mode='nullable'),
+            bigquery.SchemaField('FIELD2', 'BOOLEAN', mode='nullable'),
+            bigquery.SchemaField('FIELD3', 'TIMESTAMP', mode='nullable')
+        })
+    assert set(changed_fields.items()) == set({}.items())
+
+
+@patch('infra.data_storage.bigquery.test_bigquery_utils.Client.delete_dataset')
+@patch('infra.data_storage.bigquery.bigquery_logger_utils.bigquery.Dataset')
+@patch('infra.data_storage.bigquery.bigquery_logger_utils.bigquery.Client')
+def test_delete_dataset(mock_client, mock_dataset, mock_delete_dataset):
+    mock_client.return_value = Client()
+    ds = Dataset('existing_dataset')
+    mock_dataset.return_value = ds
+    client = utils.BigqueryLoggerClient('', '')
+    client.delete('existing_dataset')
+    mock_delete_dataset.assert_called_with(ds)
+
+
+@patch('infra.data_storage.bigquery.test_bigquery_utils.Client.delete_table')
+@patch('infra.data_storage.bigquery.bigquery_logger_utils.bigquery.Table')
+@patch('infra.data_storage.bigquery.bigquery_logger_utils.bigquery.Dataset')
+@patch('infra.data_storage.bigquery.bigquery_logger_utils.bigquery.Client')
+def test_delete_dataset(mock_client, mock_dataset, mock_table,
+                        mock_delete_table):
+    mock_client.return_value = Client()
+    schema = {
+        bigquery.SchemaField('FIELD1', 'STRING', mode='nullable'),
+        bigquery.SchemaField('FIELD2', 'BOOLEAN', mode='nullable'),
+        bigquery.SchemaField('FIELD3', 'TIMESTAMP', mode='nullable')
+    }
+    tb = Table('existing_table', schema)
+    mock_table.return_value = tb
+    client = utils.BigqueryLoggerClient('', '')
+    client.delete('existing_dataset', 'existing_table')
+    mock_delete_table.assert_called_with(tb)
+
+
+@patch('infra.data_storage.bigquery.test_bigquery_utils.Client.create_rows')
+@patch(
+    'infra.data_storage.bigquery.test_bigquery_utils.utils.get_schema_from_rows_list'
+)
+@patch(
+    'infra.data_storage.bigquery.test_bigquery_utils.utils.change_field_name')
+@patch(
+    'infra.data_storage.bigquery.test_bigquery_utils.utils.BigqueryLoggerClient.update_table_schema'
+)
+@patch('infra.data_storage.bigquery.bigquery_logger_utils.bigquery.Client')
+def test_flush(mock_client, mock_update_table_schema, mock_change_field_name,
+               mock_get_schema, mock_create_rows):
+    mock_create_rows.return_value = []
+    mock_client.return_value = Client()
+    schema = {
+        bigquery.SchemaField('FIELD1', 'STRING', mode='nullable'),
+        bigquery.SchemaField('FIELD2', 'BOOLEAN', mode='nullable'),
+        bigquery.SchemaField('FIELD3', 'TIMESTAMP', mode='nullable')
+    }
+    tb = Table('existing_table', schema)
+    mock_update_table_schema.return_value = tb, {'FIELD1': 'NEW_NAME1'}
+    row_list = [{
+        'FIELD1': 1,
+        'FIELD2': False,
+        'FIELD3': 'result'
+    }, {
+        'FIELD1': 2,
+        'FIELD2': True
+    }, {
+        'FIELD1': 3,
+        'FIELD3': 'result'
+    }]
+    client = utils.BigqueryLoggerClient('', '')
+    errors = client.flush(row_list, 'existing_dataset', 'existing_table')
+    mock_change_field_name.assert_called_with('FIELD1', 'NEW_NAME1', row_list)
+    mock_create_rows.assert_called_once()
+    assert errors == []
diff --git a/acts/framework/acts/controllers/buds_lib/dev_utils/__init__.py b/acts/framework/acts/controllers/buds_lib/dev_utils/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/acts/framework/acts/controllers/buds_lib/dev_utils/__init__.py
diff --git a/acts/framework/acts/controllers/buds_lib/dev_utils/apollo_log_decoder.py b/acts/framework/acts/controllers/buds_lib/dev_utils/apollo_log_decoder.py
new file mode 100644
index 0000000..c21387c
--- /dev/null
+++ b/acts/framework/acts/controllers/buds_lib/dev_utils/apollo_log_decoder.py
@@ -0,0 +1,77 @@
+#!/usr/bin/env python3
+#
+#   Copyright 2018 - The Android Open Source Project
+#
+#   Licensed under the Apache License, Version 2.0 (the "License");
+#   you may not use this file except in compliance with the License.
+#   You may obtain a copy of the License at
+#
+#       http://www.apache.org/licenses/LICENSE-2.0
+#
+#   Unless required by applicable law or agreed to in writing, software
+#   distributed under the License is distributed on an "AS IS" BASIS,
+#   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#   See the License for the specific language governing permissions and
+#   limitations under the License.
+
+"""Decodes the protobufs described in go/apollo-qa-tracing-design."""
+
+import base64
+import binascii
+import struct
+
+from acts.controllers.buds_lib.dev_utils.proto.gen import apollo_qa_pb2
+from acts.controllers.buds_lib.dev_utils.proto.gen import audiowear_pb2
+
+
+def to_dictionary(proto):
+    proto_dic = {}
+    msg = [element.split(':') for element in str(proto).split('\n') if element]
+    for element in msg:
+        key = element[0].strip()
+        value = element[1].strip()
+        proto_dic[key] = value
+    return proto_dic
+
+
+def is_automation_protobuf(logline):
+    return logline.startswith('QA_MSG|')
+
+
+def decode(logline):
+    """Decode the logline.
+
+    Args:
+      logline: String line with the encoded message.
+
+    Returns:
+      String value with the decoded message.
+    """
+    decoded = None
+    decoders = {'HEX': binascii.unhexlify, 'B64': base64.decodebytes}
+    msgs = {
+        apollo_qa_pb2.TRACE:
+            apollo_qa_pb2.ApolloQATrace,
+        apollo_qa_pb2.GET_VER_RESPONSE:
+            apollo_qa_pb2.ApolloQAGetVerResponse,
+        apollo_qa_pb2.GET_CODEC_RESPONSE:
+            apollo_qa_pb2.ApolloQAGetCodecResponse,
+        apollo_qa_pb2.GET_DSP_STATUS_RESPONSE:
+            apollo_qa_pb2.ApolloQAGetDspStatusResponse,
+    }
+
+    if is_automation_protobuf(logline):
+        _, encoding, message = logline.split("|", 2)
+        message = message.rstrip()
+        if encoding in decoders.keys():
+            message = decoders[encoding](message)
+            header = message[0:4]
+            serialized = message[4:]
+            if len(header) == 4 and len(serialized) == len(message) - 4:
+                msg_group, msg_type, msg_len = struct.unpack('>BBH', header)
+                if (len(serialized) == msg_len and
+                        msg_group == audiowear_pb2.APOLLO_QA):
+                    proto = msgs[msg_type]()
+                    proto.ParseFromString(serialized)
+                    decoded = to_dictionary(proto)
+    return decoded
diff --git a/acts/framework/acts/controllers/buds_lib/dev_utils/apollo_log_regex.py b/acts/framework/acts/controllers/buds_lib/dev_utils/apollo_log_regex.py
new file mode 100644
index 0000000..658ed72
--- /dev/null
+++ b/acts/framework/acts/controllers/buds_lib/dev_utils/apollo_log_regex.py
@@ -0,0 +1,63 @@
+#!/usr/bin/env python3
+#
+#   Copyright 2018 - The Android Open Source Project
+#
+#   Licensed under the Apache License, Version 2.0 (the "License");
+#   you may not use this file except in compliance with the License.
+#   You may obtain a copy of the License at
+#
+#       http://www.apache.org/licenses/LICENSE-2.0
+#
+#   Unless required by applicable law or agreed to in writing, software
+#   distributed under the License is distributed on an "AS IS" BASIS,
+#   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#   See the License for the specific language governing permissions and
+#   limitations under the License.
+
+"""Apollo's event logs regexp for each button action."""
+
+EVENT_REGEX = (
+  r'(?P<time_stamp>\d+)\s(?P<log_level>[A-Z])(.*)State Match(.*)\r\n')
+VOL_CHANGE_REGEX = (
+  r'(?P<time_stamp>\d+)\sVolume = (?P<vol_level>\d+)(.*)\r\n')
+VOLUP_REGEX = (
+  r'(?P<time_stamp>\d+)\s(?P<log_level>[A-Z])(.*)State Match(.*)3202(.*)\r\n')
+VOLDOWN_REGEX = (
+  r'(?P<time_stamp>\d+)\s(?P<log_level>[A-Z])(.*)State Match(.*)320a(.*)\r\n')
+AVRCP_PLAY_REGEX = (r'(?P<time_stamp>\d+)\sAVRCP '
+                    r'play\r\n')
+AVRCP_PAUSE_REGEX = (r'(?P<time_stamp>\d+)\sAVRCP '
+                     r'paused\r\n')
+MIC_OPEN_EVENT_REGEX = (
+  r'(?P<time_stamp>\d+)\s(?P<log_level>[A-Z])(.*)State Match(.*)'
+  r'\[3206\](.*)\r\n')
+MIC_CLOSE_EVENT_REGEX = (
+  r'(?P<time_stamp>\d+)\s(?P<log_level>[A-Z])(.*)State Match(.*)'
+  r'\[3207\](.*)\r\n')
+PREV_TRACK_EVENT_REGEX = (
+  r'(?P<time_stamp>\d+)\s(?P<log_level>[A-Z])(.*)State Match(.*)'
+  r'\[3208\](.*)\r\n')
+PREV_CHANNEL_EVENT_REGEX = (
+  r'(?P<time_stamp>\d+)\s(?P<log_level>[A-Z])(.*)State Match(.*)'
+  r'\[3209\](.*)\r\n')
+NEXT_TRACK_EVENT_REGEX = (
+  r'(?P<time_stamp>\d+)\s(?P<log_level>[A-Z])(.*)State Match(.*)'
+  r'\[3200\](.*)\r\n')
+NEXT_CHANNEL_EVENT_REGEX = (
+  r'(?P<time_stamp>\d+)\s(?P<log_level>[A-Z])(.*)State Match(.*)'
+  r'\[3201\](.*)\r\n')
+FETCH_NOTIFICATION_EVENT_REGEX = (
+  r'(?P<time_stamp>\d+)\s(?P<log_level>[A-Z])(.*)State Match(.*)'
+  r'\[3205\](.*)\r\n')
+VOICE_CMD_COMPLETE_REGEX = (
+  r'(?P<time_stamp>\d+)\s(?P<log_level>[A-Z])\sDspOnVoiceCommandComplete\r\n')
+VOICE_CMD_START_REGEX = (
+  r'(?P<time_stamp>\d+)\s(?P<log_level>[A-Z])\sDspStartVoiceCommand(.*)\r\n')
+MIC_OPEN_PROMT_REGEX = (
+  r'(?P<time_stamp>\d+)\s(?P<log_level>[A-Z])(.*)AudioPromptPlay 33(.*)\r\n')
+MIC_CLOSE_PROMT_REGEX = (
+  r'(?P<time_stamp>\d+)\s(?P<log_level>[A-Z])(.*)AudioPromptPlay 34(.*)\r\n')
+POWER_ON_EVENT_REGEX = (
+  r'(?P<time_stamp>\d+)\s(?P<log_level>[A-Z]) --hello--(.*)PowerOn(.*)\r\n')
+POWER_OFF_EVENT_REGEX = (
+  r'(?P<time_stamp>\d+)\s(?P<log_level>[A-Z]) EvtAW:320d(.*)\r\n')
diff --git a/acts/framework/acts/controllers/buds_lib/dev_utils/apollo_sink_events.py b/acts/framework/acts/controllers/buds_lib/dev_utils/apollo_sink_events.py
new file mode 100644
index 0000000..3fe0823
--- /dev/null
+++ b/acts/framework/acts/controllers/buds_lib/dev_utils/apollo_sink_events.py
@@ -0,0 +1,221 @@
+#!/usr/bin/env python3
+#
+#   Copyright 2018 - The Android Open Source Project
+#
+#   Licensed under the Apache License, Version 2.0 (the "License");
+#   you may not use this file except in compliance with the License.
+#   You may obtain a copy of the License at
+#
+#       http://www.apache.org/licenses/LICENSE-2.0
+#
+#   Unless required by applicable law or agreed to in writing, software
+#   distributed under the License is distributed on an "AS IS" BASIS,
+#   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#   See the License for the specific language governing permissions and
+#   limitations under the License.
+
+"""Apollo PS Keys and User Sink Events."""
+
+# Persistent Store (PS) Keys from rio_all_merged.psr.
+
+PSKEY = {
+    'PSKEY_BDADDR': '0x0001',
+    'PSKEY_DEVICE_NAME': '0x0108',
+    'PSKEY_DEEP_SLEEP_STATE': '0x0229',
+    'PSKEY_USB_VERSION': '0x02bc',
+    'PSKEY_USB_DEVICE_CLASS_CODES': '0x02bd',
+    'PSKEY_USB_VENDOR_ID': '0x02be',
+    'PSKEY_USB_PRODUCT_ID': '0x02bf',
+    'PSKEY_USB_PRODUCT_STRING': '0x02c2',
+    'PSKEY_USB_SERIAL_NUMBER_STRING': '0x02c3',
+    'PSKEY_EARCON_VERSION': '0x28b'
+}
+
+# Rio logging features from rio_log.h.
+
+LOG_FEATURES = {
+    'ALL': '0xffff',
+    'VERBOSE': '0x0001',
+    'TEST': '0x0002',
+    'CSR': '0x0004',
+    'DEBUG': '0x0008',
+    'INFO': '0x0010',
+    'ERROR': '0x0020',
+    'TIME_STAMP': '0x0040',
+}
+
+# Supported events from sink_events.h."""
+
+SINK_EVENTS = {
+    'EventUsrMicOpen': '0x3206',
+    'EventUsrMicClose': '0x3207',
+    'EventUsrPowerOn': '0x4001',
+    'EventUsrPowerOff': '0x4002',
+    'EventUsrEnterPairing': '0x4003',
+    'EventUsrInitateVoiceDial': '0x4004',
+    'EventUsrInitateVoiceDial_AG2': '0x4005',
+    'EventUsrLastNumberRedial': '0x4006',
+    'EventUsrLastNumberRedial_AG2': '0x4007',
+    'EventUsrAnswer': '0x4008',
+    'EventUsrReject': '0x4009',
+    'EventUsrCancelEnd': '0x400A',
+    'EventUsrTransferToggle': '0x400B',
+    'EventUsrMuteToggle': '0x400C',
+    'EventUsrVolumeUp': '0x400D',
+    'EventUsrVolumeDown': '0x400E',
+    'EventUsrVolumeToggle': '0x400F',
+    'EventUsrThreeWayReleaseAllHeld': '0x4010',
+    'EventUsrThreeWayAcceptWaitingReleaseActive': '0x4011',
+    'EventUsrThreeWayAcceptWaitingHoldActive': '0x4012',
+    'EventUsrThreeWayAddHeldTo3Way': '0x4013',
+    'EventUsrThreeWayConnect2Disconnect': '0x4014',
+    'EventUsrLedsOnOffToggle': '0x4015',
+    'EventUsrLedsOn': '0x4016',
+    'EventUsrLedsOff': '0x4017',
+    'EventUsrEstablishSLC': '0x4018',
+    'EventUsrMuteOn': '0x4019',
+    'EventUsrMuteOff': '0x401A',
+    'EventUsrEnterTXContTestMode': '0x401B',
+    'EventUsrEnterDUTState': '0x401C',
+    'EventUsrBassBoostEnableDisableToggle': '0x401D',
+    'EventUsrPlaceIncomingCallOnHold': '0x401E',
+    'EventUsrAcceptHeldIncomingCall': '0x401F',
+    'EventUsrRejectHeldIncomingCall': '0x4020',
+    'EventUsrEnterDFUMode': '0x4021',
+    'EventUsrEnterDriverlessDFUMode': '0x4022',
+    'EventUsrEnterServiceMode': '0x4023',
+    'EventUsrAudioPromptsOn': '0x4024',
+    'EventUsrAudioPromptsOff': '0x4025',
+    'EventUsrDialStoredNumber': '0x4026',
+    'EventUsrUpdateStoredNumber': '0x4027',
+    'EventUsrRestoreDefaults': '0x4028',
+    'EventUsrConfirmationAccept': '0x4029',
+    'EventUsrConfirmationReject': '0x402A',
+    'EventUsrSelectAudioPromptLanguageMode': '0x402B',
+    'EventUsrSwitchAudioMode': '0x402F',
+    'EventUsrButtonLockingOn': '0x4030',
+    'EventUsrButtonLockingOff': '0x4031',
+    'EventUsrButtonLockingToggle': '0x4032',
+    'EventUsrRssiPair': '0x4034',
+    'EventUsrBassBoostOn': '0x4035',
+    'EventUsrBassBoostOff': '0x4036',
+    'EventUsr3DEnhancementOn': '0x4037',
+    'EventUsr3DEnhancementOff': '0x4038',
+    'EventUsrSelectAudioSourceNext': '0x4039',
+    'EventUsrSelectAudioSourceAnalog': '0x403A',
+    'EventUsrSelectAudioSourceUSB': '0x403B',
+    'EventUsrSelectAudioSourceAG1': '0x403C',
+    'EventUsrSelectAudioSourceAG2': '0x403D',
+    'EventUsrSelectFMAudioSource': '0x403E',
+    'EventUsrSelectAudioSourceNone': '0x403F',
+    'EventUsrPbapDialIch': '0x4040',
+    'EventUsrPbapDialMch': '0x4041',
+    'EventUsrIntelligentPowerManagementOn': '0x4042',
+    'EventUsrIntelligentPowerManagementOff': '0x4043',
+    'EventUsrIntelligentPowerManagementToggle': '0x4044',
+    'EventUsrAvrcpPlayPause': '0x4045',
+    'EventUsrAvrcpStop': '0x4046',
+    'EventUsrAvrcpSkipForward': '0x4047',
+    'EventUsrAvrcpSkipBackward': '0x4048',
+    'EventUsrAvrcpFastForwardPress': '0x4049',
+    'EventUsrAvrcpFastForwardRelease': '0x404A',
+    'EventUsrAvrcpRewindPress': '0x404B',
+    'EventUsrAvrcpRewindRelease': '0x404C',
+    'EventUsrAvrcpShuffleOff': '0x404D',
+    'EventUsrAvrcpShuffleAllTrack': '0x404E',
+    'EventUsrAvrcpShuffleGroup': '0x404F',
+    'EventUsrAvrcpRepeatOff': '0x4050',
+    'EventUsrAvrcpRepeatSingleTrack': '0x4051',
+    'EventUsrAvrcpRepeatAllTrack': '0x4052',
+    'EventUsrAvrcpRepeatGroup': '0x4053',
+    'EventUsrAvrcpPlay': '0x4054',
+    'EventUsrAvrcpPause': '0x4055',
+    'EventUsrAvrcpToggleActive': '0x4056',
+    'EventUsrAvrcpNextGroupPress': '0x4057',
+    'EventUsrAvrcpPreviousGroupPress': '0x4058',
+    'EventUsrPbapSetPhonebook': '0x4059',
+    'EventUsrPbapBrowseEntry': '0x405A',
+    'EventUsrPbapBrowseList': '0x405B',
+    'EventUsrPbapDownloadPhonebook': '0x405C',
+    'EventUsrPbapSelectPhonebookObject': '0x405D',
+    'EventUsrPbapBrowseComplete': '0x405E',
+    'EventUsrPbapGetPhonebookSize': '0x405F',
+    'EventUsrUsbPlayPause': '0x4060',
+    'EventUsrUsbStop': '0x4061',
+    'EventUsrUsbFwd': '0x4062',
+    'EventUsrUsbBack': '0x4063',
+    'EventUsrUsbMute': '0x4064',
+    'EventUsrUsbLowPowerMode': '0x4065',
+    'EventUsrTestModeAudio': '0x4066',
+    'EventUsrTestModeTone': '0x4067',
+    'EventUsrTestModeKey': '0x4068',
+    'EventUsrTestDefrag': '0x4069',
+    'EventUsrDebugKeysToggle': '0x406A',
+    'EventUsrSpeechRecognitionTuningStart': '0x406B',
+    'EventUsrWbsTestSetCodecs': '0x406C',
+    'EventUsrWbsTestOverrideResponse': '0x406D',
+    'EventUsrWbsTestSetCodecsSendBAC': '0x406E',
+    'EventUsrCreateAudioConnection': '0x406F',
+    'EventUsrSwapA2dpMediaChannel': '0x407F',
+    'EventUsrExternalMicConnected': '0x4080',
+    'EventUsrExternalMicDisconnected': '0x4081',
+    'EventUsrSSROn': '0x4082',
+    'EventUsrSSROff': '0x4083',
+    'EventUsrPeerSessionInquire': '0x4089',
+    'EventUsrPeerSessionConnDisc': '0x408A',
+    'EventUsrPeerSessionEnd': '0x408B',
+    'EventUsrBatteryLevelRequest': '0x408C',
+    'EventUsrVolumeOrientationNormal': '0x408D',
+    'EventUsrVolumeOrientationInvert': '0x408E',
+    'EventUsrResetPairedDeviceList': '0x408F',
+    'EventUsrEnterDutMode': '0x4090',
+    'EventUsr3DEnhancementEnableDisableToggle': '0x4091',
+    'EventUsrRCVolumeUp': '0x4092',
+    'EventUsrRCVolumeDown': '0x4093',
+    'EventUsrEnterBootMode2': '0x4094',
+    'EventUsrChargerConnected': '0x4095',
+    'EventUsrChargerDisconnected': '0x4096',
+    'EventUsrSubwooferDisconnect': '0x4097',
+    'EventUsrAnalogAudioConnected': '0x4098',
+    'EventUsrAnalogAudioDisconnected': '0x4099',
+    'EventUsrGaiaDFURequest': '0x409A',
+    'EventUsrStartIRLearningMode': '0x409B',
+    'EventUsrStopIRLearningMode': '0x409C',
+    'EventUsrClearIRCodes': '0x409D',
+    'EventUsrUserEqOn': '0x409E',
+    'EventUsrUserEqOff': '0x409F',
+    'EventUsrUserEqOnOffToggle': '0x40A0',
+    'EventUsrSpdifAudioConnected': '0x40AD',
+    'EventUsrSpdifAudioDisconnected': '0x40AE',
+    'EventUsrSelectAudioSourceSpdif': '0x40AF',
+    'EventUsrChangeAudioRouting': '0x40B0',
+    'EventUsrMasterDeviceTrimVolumeUp': '0x40B1',
+    'EventUsrMasterDeviceTrimVolumeDown': '0x40B2',
+    'EventUsrSlaveDeviceTrimVolumeUp': '0x40B3',
+    'EventUsrSlaveDeviceTrimVolumeDown': '0x40B4',
+    'EventUsrEstablishPeerConnection': '0x40B5',
+    'EventUsrTwsQualificationEnablePeerOpen': '0x40B7',
+    'EventUsrBleStartBonding': '0x40D0',
+    'EventUsrBleSwitchPeripheral': '0x40D1',
+    'EventUsrBleSwitchCentral': '0x40D2',
+    'EventUsrImmAlertStop': '0x40D3',
+    'EventUsrLlsAlertStop': '0x40D4',
+    'EventUsrFindMyRemoteImmAlertMild': '0x40D5',
+    'EventUsrFindMyRemoteImmAlertHigh': '0x40D6',
+    'EventUsrFindMyPhoneImmAlertMild': '0x40D7',
+    'EventUsrFindMyPhoneImmAlertHigh': '0x40D8',
+    'EventUsrFindMyPhoneRemoteImmAlertHigh': '0x40D9',
+    'EventUsrPartyModeOn': '0x40DA',
+    'EventUsrPartyModeOff': '0x40DB',
+    'EventUsrBleDeleteDevice': '0x40EC',
+    'EventUsrAvrcpNextGroupRelease': '0x40ED',
+    'EventUsrAvrcpPreviousGroupRelease': '0x40EE',
+    'EventUsrTwsQualificationVolUp': '0x40EF',
+    'EventUsrTwsQualificationVolDown': '0x40F0',
+    'EventUsrStartA2DPStream': '0x40F1',
+    'EventUsrPbapBrowseListByName': '0x40F2',
+    'EventUsrTwsQualificationSetAbsVolume': '0x40F3',
+    'EventUsrTwsQualificationPlayTrack': '0x40F4',
+    'EventUsrBleHidExtraConfig': '0x40F5',
+    'EventUsrTwsQualificationAVRCPConfigureDataSize': '0x40F6',
+}
diff --git a/acts/framework/acts/controllers/buds_lib/dev_utils/proto/Makefile b/acts/framework/acts/controllers/buds_lib/dev_utils/proto/Makefile
new file mode 100644
index 0000000..8509cd6
--- /dev/null
+++ b/acts/framework/acts/controllers/buds_lib/dev_utils/proto/Makefile
@@ -0,0 +1,4 @@
+all: nanopb_pb2.py plugin_pb2.py
+
+%_pb2.py: %.proto
+	protoc -I. --python_out=. $<
diff --git a/acts/framework/acts/controllers/buds_lib/dev_utils/proto/apollo_qa.proto b/acts/framework/acts/controllers/buds_lib/dev_utils/proto/apollo_qa.proto
new file mode 100644
index 0000000..0db32e9
--- /dev/null
+++ b/acts/framework/acts/controllers/buds_lib/dev_utils/proto/apollo_qa.proto
@@ -0,0 +1,175 @@
+/* Forward & backward compatibility practices must be followed.  This means:
+   a) Never re-use an enum if the semantics are different.
+   b) Never alter the semantic meaning.  If needed, simply deprecate
+      old message/field/enum & start using new one.  If deprecated message
+      is no longer used in code make sure to communicate this to QA.
+   c) Prefer 3-stage migrations if possible: add new message/enum/field & start
+      sending it, make sure QA has switched to new message/enum/field, remove old
+      message/enum/field.
+   Fields should start out required unless they are optional in the code.  They should
+   become optional when deprecated (make sure to update QA automation first to expect the
+   field to be optional) or removed.  Make sure to never re-use field ids unless
+   the field is being resurrected.
+ */
+syntax = "proto2";
+//package audiowear;
+package apollo.lib.apollo_dev_util_lib.proto;
+import "nanopb.proto";
+//import "third_party/nanopb/nanopb.proto";
+
+
+option java_package = "com.google.android.bisto.nano";
+option java_outer_classname = "ApolloQA";
+
+enum ApolloQAMessageType {
+  TRACE = 1;
+  GET_VER_RESPONSE = 2;
+  GET_CODEC_RESPONSE = 3;
+  GET_DSP_STATUS_RESPONSE = 4;
+  FACTORY_PLAY_SOUND = 5;
+  FACTORY_INFO_REQUEST = 6;
+  FACTORY_INFO_RESPONSE = 7;
+}
+enum TraceId {
+  // data[0] = partition# erased.
+  OTA_ERASE_PARTITION = 1;
+  // data[0] = partition# we will write to.
+  // data[1] = expected length we will write to partition
+  OTA_START_PARTITION_WRITE = 2;
+  // data[0] = partition# written
+  // data[1] = actual written length
+  OTA_FINISHED_PARTITION_WRITE = 3;
+  // our custom signature validation has begun
+  OTA_SIGNATURE_START = 4;
+  // our custom signature validation rejected the image
+  OTA_SIGNATURE_FAILURE = 5;
+  // triggering CSR to reboot & apply DFU
+  OTA_TRIGGERING_LOADER = 6;
+  // the CSR loader rejected the image
+  OTA_LOADER_VERIFY_FAILED = 7;
+  // progress update.
+  // data[0] = num bytes received
+  // data[1] = num bytes expected
+  OTA_PROGRESS = 8;
+  OTA_ABORTED = 9;
+  // up to 10: reserved for OTA
+  // data[0] = AvrcpPlayStatus as integer
+  AVRCP_PLAY_STATUS_CHANGE = 10;
+  /* data[0] = new volume level (int16 stored in uint32)
+     data[1] = new volume level in dB (int16 stored in uint32)
+     data[2] = previous volume level (int16 stored in uint32)
+     easy conversion in python:
+       new_volume = ctpyes.c_short(ctypes.c_uint(data[0]).value).value
+       new_volume_db = ctpyes.c_short(ctypes.c_uint(data[1]).value).value
+  */
+  VOLUME_CHANGE = 11;
+  /* data[0] = entry number of command */
+  COMMANDER_RECV_COMMAND = 12;
+  COMMANDER_FINISH_COMMAND = 13;
+  /* currently no information about the rejected command */
+  COMMANDER_REJECT_COMMAND = 14;
+}
+/* Note: FWD_SEEK & REV_SEEK are bit-wise or'ed into the status.
+   Use SEEK_MASK first to read the seek or mask it out to get the other
+   states).  Do not & with individual states for comparison as aside from
+   seek the other states are not a bitwise-mask.
+*/
+enum AvrcpPlayStatus {
+  STOPPED = 0;
+  PLAYING = 1;
+  PAUSED = 2;
+  FWD_SEEK = 8;
+  REV_SEEK = 16;
+  ERROR = 5;
+  /* & this with the status to compare against FWD_SEEK/REV_SEEK.
+     & with the complement of this value to get the other states */
+  SEEK_MASK = 24;
+}
+/* These messages are internal trace-points for QA to do whitebox validation.
+   However, developers should feel free to remove trace-points if they
+   no longer make sense (but communicate to QA to prep this ahead-of-time). */
+message ApolloQATrace {
+  // all messages should have a timestamp field and it will get auto-populated
+  // (no need to set it at the call-site).
+  required uint32 timestamp = 1;
+  // this is the event that occured internally
+  required TraceId id = 2;
+  // this is some optional data that depends on the traceid.
+  // if not documented then no fields will be written.
+  repeated uint32 data = 3 [packed = true, (nanopb).max_count = 5];
+}
+enum PreviousBootStatus {
+  OTA_SUCCESS = 1; /* previous boot OK */
+  OTA_ERROR = 2; /* previous OTA failed */
+}
+//Next ID: 10
+message ApolloQAGetVerResponse {
+  required uint32 timestamp = 1;
+  required uint32 csr_fw_version = 2; // not implemented for now
+  required bool csr_fw_debug_build = 3; // not implemented for now
+  required uint32 vm_build_number = 4;
+  required bool vm_debug_build = 5;
+  required uint32 psoc_version = 6;
+  // the build label sent to AGSA. This should just be an amalgum of the broken-down
+  // info above. Aside from X.Y.Z prefix, do not parse this for anything as it is
+  // free-form text.
+  required string build_label = 7 [(nanopb).max_size = 32];
+  // if not set then info wasn't available.
+  optional PreviousBootStatus last_ota_status = 8;
+  required uint32 charger_version = 9;
+  optional uint32 expected_psoc_version = 10;
+}
+enum ApolloQAA2dpCodec {
+  AAC = 1;
+  SBC = 2;
+}
+message ApolloQAGetCodecResponse {
+  required uint32 timestamp = 1;
+  // if not set then unknown codec (error).
+  optional ApolloQAA2dpCodec codec = 2;
+}
+enum SinkState {
+  LIMBO = 0;
+  CONNECTABLE = 1;
+  DISCOVERABLE = 2;
+  CONNECTED = 3;
+  OUTGOING_CALLS_ESTABLISH = 4;
+  INCOMING_CALLS_ESTABLISH = 5;
+  ACTIVE_CALL_SCO = 6;
+  TEST_MODE = 7;
+  THREE_WAY_CALL_WAITING = 8;
+  THREE_WAY_CALL_ON_HOLD = 9;
+  THREE_WAY_MULTICALL = 10;
+  INCOMING_CALL_ON_HOLD = 11;
+  ACTIVE_CALL_NO_SCO = 12;
+  A2DP_STREAMING = 13;
+  DEVICE_LOW_BATTERY = 14;
+}
+message ApolloQAGetDspStatusResponse {
+  required uint32 timestamp = 1;
+  required bool is_dsp_loaded = 2;
+  required SinkState sink_state = 3;
+  /**
+   * This is a bit-wise mask.
+   * see AudioWearFeatureBits audiowear_plugin_state.h
+   * for the definition.
+   */
+  required uint32 features_active = 4;
+}
+message ApolloQAFactoryPlaySound {
+  enum PromptType {
+    PROMPT_TYPE_BT_CONNECTED = 1;
+    PROMPT_TYPE_IN_EAR = 2;
+  }
+  required PromptType prompt = 1;
+}
+message ApolloQAFactoryInfoRequest {
+}
+message ApolloQAFactoryInfoResponse {
+  required uint32 timestamp = 1;
+  optional int32 crystal_trim = 2 [(nanopb).int_size = IS_16];
+  optional bool crash_dump_exists = 3;
+  optional bool is_developer_mode_enabled = 4;
+  optional bool is_always_connected = 5;
+  optional uint32 hwid = 6;
+}
\ No newline at end of file
diff --git a/acts/framework/acts/controllers/buds_lib/dev_utils/proto/audiowear.proto b/acts/framework/acts/controllers/buds_lib/dev_utils/proto/audiowear.proto
new file mode 100644
index 0000000..8f825bd
--- /dev/null
+++ b/acts/framework/acts/controllers/buds_lib/dev_utils/proto/audiowear.proto
@@ -0,0 +1,25 @@
+syntax = "proto2";
+
+//package audiowear;
+package apollo.lib.apollo_dev_util_lib.proto;
+
+option java_package = "com.google.android.bisto.nano";
+option java_outer_classname = "Protocol";
+
+enum MessageGroup {
+    UNKNOWN_MESSAGE_GROUP = 0;
+    DEVICE_INPUT = 1;
+    OTA = 2;
+    DEVICE_CAPABILITY = 3;
+    DEVICE_STATUS = 4;
+    LOGGING = 5;
+    SENSORS = 6;
+    COMPANION_STATUS = 7;
+    DEVICE_COMMAND = 8;
+    BISTO_SETTINGS = 9;
+    WELLNESS = 10;
+    TEST = 11;
+    BLE_SERVICE = 12;
+    APOLLO_QA = 126;
+    TRANSLATE = 127;
+}
\ No newline at end of file
diff --git a/acts/framework/acts/controllers/buds_lib/dev_utils/proto/gen/apollo_qa_pb2.py b/acts/framework/acts/controllers/buds_lib/dev_utils/proto/gen/apollo_qa_pb2.py
new file mode 100644
index 0000000..8bfbda9
--- /dev/null
+++ b/acts/framework/acts/controllers/buds_lib/dev_utils/proto/gen/apollo_qa_pb2.py
@@ -0,0 +1,807 @@
+# Generated by the protocol buffer compiler.  DO NOT EDIT!
+# source: apollo_qa.proto
+
+import sys
+_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
+from google.protobuf.internal import enum_type_wrapper
+from google.protobuf import descriptor as _descriptor
+from google.protobuf import message as _message
+from google.protobuf import reflection as _reflection
+from google.protobuf import symbol_database as _symbol_database
+from google.protobuf import descriptor_pb2
+# @@protoc_insertion_point(imports)
+
+_sym_db = _symbol_database.Default()
+
+
+import acts.controllers.buds_lib.dev_utils.proto.gen.nanopb_pb2 as nanopb__pb2
+
+
+DESCRIPTOR = _descriptor.FileDescriptor(
+  name='apollo_qa.proto',
+  package='apollo.lib.apollo_dev_util_lib.proto',
+  syntax='proto2',
+  serialized_pb=_b('\n\x0f\x61pollo_qa.proto\x12$apollo.lib.apollo_dev_util_lib.proto\x1a\x0cnanopb.proto\"t\n\rApolloQATrace\x12\x11\n\ttimestamp\x18\x01 \x02(\r\x12\x39\n\x02id\x18\x02 \x02(\x0e\x32-.apollo.lib.apollo_dev_util_lib.proto.TraceId\x12\x15\n\x04\x64\x61ta\x18\x03 \x03(\rB\x07\x10\x01\x92?\x02\x10\x05\"\xcd\x02\n\x16\x41polloQAGetVerResponse\x12\x11\n\ttimestamp\x18\x01 \x02(\r\x12\x16\n\x0e\x63sr_fw_version\x18\x02 \x02(\r\x12\x1a\n\x12\x63sr_fw_debug_build\x18\x03 \x02(\x08\x12\x17\n\x0fvm_build_number\x18\x04 \x02(\r\x12\x16\n\x0evm_debug_build\x18\x05 \x02(\x08\x12\x14\n\x0cpsoc_version\x18\x06 \x02(\r\x12\x1a\n\x0b\x62uild_label\x18\x07 \x02(\tB\x05\x92?\x02\x08 \x12Q\n\x0flast_ota_status\x18\x08 \x01(\x0e\x32\x38.apollo.lib.apollo_dev_util_lib.proto.PreviousBootStatus\x12\x17\n\x0f\x63harger_version\x18\t \x02(\r\x12\x1d\n\x15\x65xpected_psoc_version\x18\n \x01(\r\"u\n\x18\x41polloQAGetCodecResponse\x12\x11\n\ttimestamp\x18\x01 \x02(\r\x12\x46\n\x05\x63odec\x18\x02 \x01(\x0e\x32\x37.apollo.lib.apollo_dev_util_lib.proto.ApolloQAA2dpCodec\"\xa6\x01\n\x1c\x41polloQAGetDspStatusResponse\x12\x11\n\ttimestamp\x18\x01 \x02(\r\x12\x15\n\ris_dsp_loaded\x18\x02 \x02(\x08\x12\x43\n\nsink_state\x18\x03 \x02(\x0e\x32/.apollo.lib.apollo_dev_util_lib.proto.SinkState\x12\x17\n\x0f\x66\x65\x61tures_active\x18\x04 \x02(\r\"\xb9\x01\n\x18\x41polloQAFactoryPlaySound\x12Y\n\x06prompt\x18\x01 \x02(\x0e\x32I.apollo.lib.apollo_dev_util_lib.proto.ApolloQAFactoryPlaySound.PromptType\"B\n\nPromptType\x12\x1c\n\x18PROMPT_TYPE_BT_CONNECTED\x10\x01\x12\x16\n\x12PROMPT_TYPE_IN_EAR\x10\x02\"\x1c\n\x1a\x41polloQAFactoryInfoRequest\"\xb6\x01\n\x1b\x41polloQAFactoryInfoResponse\x12\x11\n\ttimestamp\x18\x01 \x02(\r\x12\x1b\n\x0c\x63rystal_trim\x18\x02 \x01(\x05\x42\x05\x92?\x02\x38\x10\x12\x19\n\x11\x63rash_dump_exists\x18\x03 \x01(\x08\x12!\n\x19is_developer_mode_enabled\x18\x04 \x01(\x08\x12\x1b\n\x13is_always_connected\x18\x05 \x01(\x08\x12\x0c\n\x04hwid\x18\x06 \x01(\r*\xb8\x01\n\x13\x41polloQAMessageType\x12\t\n\x05TRACE\x10\x01\x12\x14\n\x10GET_VER_RESPONSE\x10\x02\x12\x16\n\x12GET_CODEC_RESPONSE\x10\x03\x12\x1b\n\x17GET_DSP_STATUS_RESPONSE\x10\x04\x12\x16\n\x12\x46\x41\x43TORY_PLAY_SOUND\x10\x05\x12\x18\n\x14\x46\x41\x43TORY_INFO_REQUEST\x10\x06\x12\x19\n\x15\x46\x41\x43TORY_INFO_RESPONSE\x10\x07*\xfc\x02\n\x07TraceId\x12\x17\n\x13OTA_ERASE_PARTITION\x10\x01\x12\x1d\n\x19OTA_START_PARTITION_WRITE\x10\x02\x12 \n\x1cOTA_FINISHED_PARTITION_WRITE\x10\x03\x12\x17\n\x13OTA_SIGNATURE_START\x10\x04\x12\x19\n\x15OTA_SIGNATURE_FAILURE\x10\x05\x12\x19\n\x15OTA_TRIGGERING_LOADER\x10\x06\x12\x1c\n\x18OTA_LOADER_VERIFY_FAILED\x10\x07\x12\x10\n\x0cOTA_PROGRESS\x10\x08\x12\x0f\n\x0bOTA_ABORTED\x10\t\x12\x1c\n\x18\x41VRCP_PLAY_STATUS_CHANGE\x10\n\x12\x11\n\rVOLUME_CHANGE\x10\x0b\x12\x1a\n\x16\x43OMMANDER_RECV_COMMAND\x10\x0c\x12\x1c\n\x18\x43OMMANDER_FINISH_COMMAND\x10\r\x12\x1c\n\x18\x43OMMANDER_REJECT_COMMAND\x10\x0e*m\n\x0f\x41vrcpPlayStatus\x12\x0b\n\x07STOPPED\x10\x00\x12\x0b\n\x07PLAYING\x10\x01\x12\n\n\x06PAUSED\x10\x02\x12\x0c\n\x08\x46WD_SEEK\x10\x08\x12\x0c\n\x08REV_SEEK\x10\x10\x12\t\n\x05\x45RROR\x10\x05\x12\r\n\tSEEK_MASK\x10\x18*4\n\x12PreviousBootStatus\x12\x0f\n\x0bOTA_SUCCESS\x10\x01\x12\r\n\tOTA_ERROR\x10\x02*%\n\x11\x41polloQAA2dpCodec\x12\x07\n\x03\x41\x41\x43\x10\x01\x12\x07\n\x03SBC\x10\x02*\xd8\x02\n\tSinkState\x12\t\n\x05LIMBO\x10\x00\x12\x0f\n\x0b\x43ONNECTABLE\x10\x01\x12\x10\n\x0c\x44ISCOVERABLE\x10\x02\x12\r\n\tCONNECTED\x10\x03\x12\x1c\n\x18OUTGOING_CALLS_ESTABLISH\x10\x04\x12\x1c\n\x18INCOMING_CALLS_ESTABLISH\x10\x05\x12\x13\n\x0f\x41\x43TIVE_CALL_SCO\x10\x06\x12\r\n\tTEST_MODE\x10\x07\x12\x1a\n\x16THREE_WAY_CALL_WAITING\x10\x08\x12\x1a\n\x16THREE_WAY_CALL_ON_HOLD\x10\t\x12\x17\n\x13THREE_WAY_MULTICALL\x10\n\x12\x19\n\x15INCOMING_CALL_ON_HOLD\x10\x0b\x12\x16\n\x12\x41\x43TIVE_CALL_NO_SCO\x10\x0c\x12\x12\n\x0e\x41\x32\x44P_STREAMING\x10\r\x12\x16\n\x12\x44\x45VICE_LOW_BATTERY\x10\x0e\x42)\n\x1d\x63om.google.android.bisto.nanoB\x08\x41polloQA')
+  ,
+  dependencies=[nanopb__pb2.DESCRIPTOR,])
+
+_APOLLOQAMESSAGETYPE = _descriptor.EnumDescriptor(
+  name='ApolloQAMessageType',
+  full_name='apollo.lib.apollo_dev_util_lib.proto.ApolloQAMessageType',
+  filename=None,
+  file=DESCRIPTOR,
+  values=[
+    _descriptor.EnumValueDescriptor(
+      name='TRACE', index=0, number=1,
+      options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='GET_VER_RESPONSE', index=1, number=2,
+      options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='GET_CODEC_RESPONSE', index=2, number=3,
+      options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='GET_DSP_STATUS_RESPONSE', index=3, number=4,
+      options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='FACTORY_PLAY_SOUND', index=4, number=5,
+      options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='FACTORY_INFO_REQUEST', index=5, number=6,
+      options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='FACTORY_INFO_RESPONSE', index=6, number=7,
+      options=None,
+      type=None),
+  ],
+  containing_type=None,
+  options=None,
+  serialized_start=1217,
+  serialized_end=1401,
+)
+_sym_db.RegisterEnumDescriptor(_APOLLOQAMESSAGETYPE)
+
+ApolloQAMessageType = enum_type_wrapper.EnumTypeWrapper(_APOLLOQAMESSAGETYPE)
+_TRACEID = _descriptor.EnumDescriptor(
+  name='TraceId',
+  full_name='apollo.lib.apollo_dev_util_lib.proto.TraceId',
+  filename=None,
+  file=DESCRIPTOR,
+  values=[
+    _descriptor.EnumValueDescriptor(
+      name='OTA_ERASE_PARTITION', index=0, number=1,
+      options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='OTA_START_PARTITION_WRITE', index=1, number=2,
+      options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='OTA_FINISHED_PARTITION_WRITE', index=2, number=3,
+      options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='OTA_SIGNATURE_START', index=3, number=4,
+      options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='OTA_SIGNATURE_FAILURE', index=4, number=5,
+      options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='OTA_TRIGGERING_LOADER', index=5, number=6,
+      options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='OTA_LOADER_VERIFY_FAILED', index=6, number=7,
+      options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='OTA_PROGRESS', index=7, number=8,
+      options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='OTA_ABORTED', index=8, number=9,
+      options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='AVRCP_PLAY_STATUS_CHANGE', index=9, number=10,
+      options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='VOLUME_CHANGE', index=10, number=11,
+      options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='COMMANDER_RECV_COMMAND', index=11, number=12,
+      options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='COMMANDER_FINISH_COMMAND', index=12, number=13,
+      options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='COMMANDER_REJECT_COMMAND', index=13, number=14,
+      options=None,
+      type=None),
+  ],
+  containing_type=None,
+  options=None,
+  serialized_start=1404,
+  serialized_end=1784,
+)
+_sym_db.RegisterEnumDescriptor(_TRACEID)
+
+TraceId = enum_type_wrapper.EnumTypeWrapper(_TRACEID)
+_AVRCPPLAYSTATUS = _descriptor.EnumDescriptor(
+  name='AvrcpPlayStatus',
+  full_name='apollo.lib.apollo_dev_util_lib.proto.AvrcpPlayStatus',
+  filename=None,
+  file=DESCRIPTOR,
+  values=[
+    _descriptor.EnumValueDescriptor(
+      name='STOPPED', index=0, number=0,
+      options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='PLAYING', index=1, number=1,
+      options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='PAUSED', index=2, number=2,
+      options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='FWD_SEEK', index=3, number=8,
+      options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='REV_SEEK', index=4, number=16,
+      options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='ERROR', index=5, number=5,
+      options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='SEEK_MASK', index=6, number=24,
+      options=None,
+      type=None),
+  ],
+  containing_type=None,
+  options=None,
+  serialized_start=1786,
+  serialized_end=1895,
+)
+_sym_db.RegisterEnumDescriptor(_AVRCPPLAYSTATUS)
+
+AvrcpPlayStatus = enum_type_wrapper.EnumTypeWrapper(_AVRCPPLAYSTATUS)
+_PREVIOUSBOOTSTATUS = _descriptor.EnumDescriptor(
+  name='PreviousBootStatus',
+  full_name='apollo.lib.apollo_dev_util_lib.proto.PreviousBootStatus',
+  filename=None,
+  file=DESCRIPTOR,
+  values=[
+    _descriptor.EnumValueDescriptor(
+      name='OTA_SUCCESS', index=0, number=1,
+      options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='OTA_ERROR', index=1, number=2,
+      options=None,
+      type=None),
+  ],
+  containing_type=None,
+  options=None,
+  serialized_start=1897,
+  serialized_end=1949,
+)
+_sym_db.RegisterEnumDescriptor(_PREVIOUSBOOTSTATUS)
+
+PreviousBootStatus = enum_type_wrapper.EnumTypeWrapper(_PREVIOUSBOOTSTATUS)
+_APOLLOQAA2DPCODEC = _descriptor.EnumDescriptor(
+  name='ApolloQAA2dpCodec',
+  full_name='apollo.lib.apollo_dev_util_lib.proto.ApolloQAA2dpCodec',
+  filename=None,
+  file=DESCRIPTOR,
+  values=[
+    _descriptor.EnumValueDescriptor(
+      name='AAC', index=0, number=1,
+      options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='SBC', index=1, number=2,
+      options=None,
+      type=None),
+  ],
+  containing_type=None,
+  options=None,
+  serialized_start=1951,
+  serialized_end=1988,
+)
+_sym_db.RegisterEnumDescriptor(_APOLLOQAA2DPCODEC)
+
+ApolloQAA2dpCodec = enum_type_wrapper.EnumTypeWrapper(_APOLLOQAA2DPCODEC)
+_SINKSTATE = _descriptor.EnumDescriptor(
+  name='SinkState',
+  full_name='apollo.lib.apollo_dev_util_lib.proto.SinkState',
+  filename=None,
+  file=DESCRIPTOR,
+  values=[
+    _descriptor.EnumValueDescriptor(
+      name='LIMBO', index=0, number=0,
+      options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='CONNECTABLE', index=1, number=1,
+      options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='DISCOVERABLE', index=2, number=2,
+      options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='CONNECTED', index=3, number=3,
+      options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='OUTGOING_CALLS_ESTABLISH', index=4, number=4,
+      options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='INCOMING_CALLS_ESTABLISH', index=5, number=5,
+      options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='ACTIVE_CALL_SCO', index=6, number=6,
+      options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='TEST_MODE', index=7, number=7,
+      options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='THREE_WAY_CALL_WAITING', index=8, number=8,
+      options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='THREE_WAY_CALL_ON_HOLD', index=9, number=9,
+      options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='THREE_WAY_MULTICALL', index=10, number=10,
+      options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='INCOMING_CALL_ON_HOLD', index=11, number=11,
+      options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='ACTIVE_CALL_NO_SCO', index=12, number=12,
+      options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='A2DP_STREAMING', index=13, number=13,
+      options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='DEVICE_LOW_BATTERY', index=14, number=14,
+      options=None,
+      type=None),
+  ],
+  containing_type=None,
+  options=None,
+  serialized_start=1991,
+  serialized_end=2335,
+)
+_sym_db.RegisterEnumDescriptor(_SINKSTATE)
+
+SinkState = enum_type_wrapper.EnumTypeWrapper(_SINKSTATE)
+TRACE = 1
+GET_VER_RESPONSE = 2
+GET_CODEC_RESPONSE = 3
+GET_DSP_STATUS_RESPONSE = 4
+FACTORY_PLAY_SOUND = 5
+FACTORY_INFO_REQUEST = 6
+FACTORY_INFO_RESPONSE = 7
+OTA_ERASE_PARTITION = 1
+OTA_START_PARTITION_WRITE = 2
+OTA_FINISHED_PARTITION_WRITE = 3
+OTA_SIGNATURE_START = 4
+OTA_SIGNATURE_FAILURE = 5
+OTA_TRIGGERING_LOADER = 6
+OTA_LOADER_VERIFY_FAILED = 7
+OTA_PROGRESS = 8
+OTA_ABORTED = 9
+AVRCP_PLAY_STATUS_CHANGE = 10
+VOLUME_CHANGE = 11
+COMMANDER_RECV_COMMAND = 12
+COMMANDER_FINISH_COMMAND = 13
+COMMANDER_REJECT_COMMAND = 14
+STOPPED = 0
+PLAYING = 1
+PAUSED = 2
+FWD_SEEK = 8
+REV_SEEK = 16
+ERROR = 5
+SEEK_MASK = 24
+OTA_SUCCESS = 1
+OTA_ERROR = 2
+AAC = 1
+SBC = 2
+LIMBO = 0
+CONNECTABLE = 1
+DISCOVERABLE = 2
+CONNECTED = 3
+OUTGOING_CALLS_ESTABLISH = 4
+INCOMING_CALLS_ESTABLISH = 5
+ACTIVE_CALL_SCO = 6
+TEST_MODE = 7
+THREE_WAY_CALL_WAITING = 8
+THREE_WAY_CALL_ON_HOLD = 9
+THREE_WAY_MULTICALL = 10
+INCOMING_CALL_ON_HOLD = 11
+ACTIVE_CALL_NO_SCO = 12
+A2DP_STREAMING = 13
+DEVICE_LOW_BATTERY = 14
+
+
+_APOLLOQAFACTORYPLAYSOUND_PROMPTTYPE = _descriptor.EnumDescriptor(
+  name='PromptType',
+  full_name='apollo.lib.apollo_dev_util_lib.proto.ApolloQAFactoryPlaySound.PromptType',
+  filename=None,
+  file=DESCRIPTOR,
+  values=[
+    _descriptor.EnumValueDescriptor(
+      name='PROMPT_TYPE_BT_CONNECTED', index=0, number=1,
+      options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='PROMPT_TYPE_IN_EAR', index=1, number=2,
+      options=None,
+      type=None),
+  ],
+  containing_type=None,
+  options=None,
+  serialized_start=933,
+  serialized_end=999,
+)
+_sym_db.RegisterEnumDescriptor(_APOLLOQAFACTORYPLAYSOUND_PROMPTTYPE)
+
+
+_APOLLOQATRACE = _descriptor.Descriptor(
+  name='ApolloQATrace',
+  full_name='apollo.lib.apollo_dev_util_lib.proto.ApolloQATrace',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='timestamp', full_name='apollo.lib.apollo_dev_util_lib.proto.ApolloQATrace.timestamp', index=0,
+      number=1, type=13, cpp_type=3, label=2,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='id', full_name='apollo.lib.apollo_dev_util_lib.proto.ApolloQATrace.id', index=1,
+      number=2, type=14, cpp_type=8, label=2,
+      has_default_value=False, default_value=1,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='data', full_name='apollo.lib.apollo_dev_util_lib.proto.ApolloQATrace.data', index=2,
+      number=3, type=13, cpp_type=3, label=3,
+      has_default_value=False, default_value=[],
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001\222?\002\020\005'))),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  options=None,
+  is_extendable=False,
+  syntax='proto2',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=71,
+  serialized_end=187,
+)
+
+
+_APOLLOQAGETVERRESPONSE = _descriptor.Descriptor(
+  name='ApolloQAGetVerResponse',
+  full_name='apollo.lib.apollo_dev_util_lib.proto.ApolloQAGetVerResponse',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='timestamp', full_name='apollo.lib.apollo_dev_util_lib.proto.ApolloQAGetVerResponse.timestamp', index=0,
+      number=1, type=13, cpp_type=3, label=2,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='csr_fw_version', full_name='apollo.lib.apollo_dev_util_lib.proto.ApolloQAGetVerResponse.csr_fw_version', index=1,
+      number=2, type=13, cpp_type=3, label=2,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='csr_fw_debug_build', full_name='apollo.lib.apollo_dev_util_lib.proto.ApolloQAGetVerResponse.csr_fw_debug_build', index=2,
+      number=3, type=8, cpp_type=7, label=2,
+      has_default_value=False, default_value=False,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='vm_build_number', full_name='apollo.lib.apollo_dev_util_lib.proto.ApolloQAGetVerResponse.vm_build_number', index=3,
+      number=4, type=13, cpp_type=3, label=2,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='vm_debug_build', full_name='apollo.lib.apollo_dev_util_lib.proto.ApolloQAGetVerResponse.vm_debug_build', index=4,
+      number=5, type=8, cpp_type=7, label=2,
+      has_default_value=False, default_value=False,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='psoc_version', full_name='apollo.lib.apollo_dev_util_lib.proto.ApolloQAGetVerResponse.psoc_version', index=5,
+      number=6, type=13, cpp_type=3, label=2,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='build_label', full_name='apollo.lib.apollo_dev_util_lib.proto.ApolloQAGetVerResponse.build_label', index=6,
+      number=7, type=9, cpp_type=9, label=2,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\222?\002\010 '))),
+    _descriptor.FieldDescriptor(
+      name='last_ota_status', full_name='apollo.lib.apollo_dev_util_lib.proto.ApolloQAGetVerResponse.last_ota_status', index=7,
+      number=8, type=14, cpp_type=8, label=1,
+      has_default_value=False, default_value=1,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='charger_version', full_name='apollo.lib.apollo_dev_util_lib.proto.ApolloQAGetVerResponse.charger_version', index=8,
+      number=9, type=13, cpp_type=3, label=2,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='expected_psoc_version', full_name='apollo.lib.apollo_dev_util_lib.proto.ApolloQAGetVerResponse.expected_psoc_version', index=9,
+      number=10, type=13, cpp_type=3, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  options=None,
+  is_extendable=False,
+  syntax='proto2',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=190,
+  serialized_end=523,
+)
+
+
+_APOLLOQAGETCODECRESPONSE = _descriptor.Descriptor(
+  name='ApolloQAGetCodecResponse',
+  full_name='apollo.lib.apollo_dev_util_lib.proto.ApolloQAGetCodecResponse',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='timestamp', full_name='apollo.lib.apollo_dev_util_lib.proto.ApolloQAGetCodecResponse.timestamp', index=0,
+      number=1, type=13, cpp_type=3, label=2,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='codec', full_name='apollo.lib.apollo_dev_util_lib.proto.ApolloQAGetCodecResponse.codec', index=1,
+      number=2, type=14, cpp_type=8, label=1,
+      has_default_value=False, default_value=1,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  options=None,
+  is_extendable=False,
+  syntax='proto2',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=525,
+  serialized_end=642,
+)
+
+
+_APOLLOQAGETDSPSTATUSRESPONSE = _descriptor.Descriptor(
+  name='ApolloQAGetDspStatusResponse',
+  full_name='apollo.lib.apollo_dev_util_lib.proto.ApolloQAGetDspStatusResponse',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='timestamp', full_name='apollo.lib.apollo_dev_util_lib.proto.ApolloQAGetDspStatusResponse.timestamp', index=0,
+      number=1, type=13, cpp_type=3, label=2,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='is_dsp_loaded', full_name='apollo.lib.apollo_dev_util_lib.proto.ApolloQAGetDspStatusResponse.is_dsp_loaded', index=1,
+      number=2, type=8, cpp_type=7, label=2,
+      has_default_value=False, default_value=False,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='sink_state', full_name='apollo.lib.apollo_dev_util_lib.proto.ApolloQAGetDspStatusResponse.sink_state', index=2,
+      number=3, type=14, cpp_type=8, label=2,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='features_active', full_name='apollo.lib.apollo_dev_util_lib.proto.ApolloQAGetDspStatusResponse.features_active', index=3,
+      number=4, type=13, cpp_type=3, label=2,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  options=None,
+  is_extendable=False,
+  syntax='proto2',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=645,
+  serialized_end=811,
+)
+
+
+_APOLLOQAFACTORYPLAYSOUND = _descriptor.Descriptor(
+  name='ApolloQAFactoryPlaySound',
+  full_name='apollo.lib.apollo_dev_util_lib.proto.ApolloQAFactoryPlaySound',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='prompt', full_name='apollo.lib.apollo_dev_util_lib.proto.ApolloQAFactoryPlaySound.prompt', index=0,
+      number=1, type=14, cpp_type=8, label=2,
+      has_default_value=False, default_value=1,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+    _APOLLOQAFACTORYPLAYSOUND_PROMPTTYPE,
+  ],
+  options=None,
+  is_extendable=False,
+  syntax='proto2',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=814,
+  serialized_end=999,
+)
+
+
+_APOLLOQAFACTORYINFOREQUEST = _descriptor.Descriptor(
+  name='ApolloQAFactoryInfoRequest',
+  full_name='apollo.lib.apollo_dev_util_lib.proto.ApolloQAFactoryInfoRequest',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  options=None,
+  is_extendable=False,
+  syntax='proto2',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=1001,
+  serialized_end=1029,
+)
+
+
+_APOLLOQAFACTORYINFORESPONSE = _descriptor.Descriptor(
+  name='ApolloQAFactoryInfoResponse',
+  full_name='apollo.lib.apollo_dev_util_lib.proto.ApolloQAFactoryInfoResponse',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='timestamp', full_name='apollo.lib.apollo_dev_util_lib.proto.ApolloQAFactoryInfoResponse.timestamp', index=0,
+      number=1, type=13, cpp_type=3, label=2,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='crystal_trim', full_name='apollo.lib.apollo_dev_util_lib.proto.ApolloQAFactoryInfoResponse.crystal_trim', index=1,
+      number=2, type=5, cpp_type=1, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\222?\0028\020'))),
+    _descriptor.FieldDescriptor(
+      name='crash_dump_exists', full_name='apollo.lib.apollo_dev_util_lib.proto.ApolloQAFactoryInfoResponse.crash_dump_exists', index=2,
+      number=3, type=8, cpp_type=7, label=1,
+      has_default_value=False, default_value=False,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='is_developer_mode_enabled', full_name='apollo.lib.apollo_dev_util_lib.proto.ApolloQAFactoryInfoResponse.is_developer_mode_enabled', index=3,
+      number=4, type=8, cpp_type=7, label=1,
+      has_default_value=False, default_value=False,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='is_always_connected', full_name='apollo.lib.apollo_dev_util_lib.proto.ApolloQAFactoryInfoResponse.is_always_connected', index=4,
+      number=5, type=8, cpp_type=7, label=1,
+      has_default_value=False, default_value=False,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='hwid', full_name='apollo.lib.apollo_dev_util_lib.proto.ApolloQAFactoryInfoResponse.hwid', index=5,
+      number=6, type=13, cpp_type=3, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  options=None,
+  is_extendable=False,
+  syntax='proto2',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=1032,
+  serialized_end=1214,
+)
+
+_APOLLOQATRACE.fields_by_name['id'].enum_type = _TRACEID
+_APOLLOQAGETVERRESPONSE.fields_by_name['last_ota_status'].enum_type = _PREVIOUSBOOTSTATUS
+_APOLLOQAGETCODECRESPONSE.fields_by_name['codec'].enum_type = _APOLLOQAA2DPCODEC
+_APOLLOQAGETDSPSTATUSRESPONSE.fields_by_name['sink_state'].enum_type = _SINKSTATE
+_APOLLOQAFACTORYPLAYSOUND.fields_by_name['prompt'].enum_type = _APOLLOQAFACTORYPLAYSOUND_PROMPTTYPE
+_APOLLOQAFACTORYPLAYSOUND_PROMPTTYPE.containing_type = _APOLLOQAFACTORYPLAYSOUND
+DESCRIPTOR.message_types_by_name['ApolloQATrace'] = _APOLLOQATRACE
+DESCRIPTOR.message_types_by_name['ApolloQAGetVerResponse'] = _APOLLOQAGETVERRESPONSE
+DESCRIPTOR.message_types_by_name['ApolloQAGetCodecResponse'] = _APOLLOQAGETCODECRESPONSE
+DESCRIPTOR.message_types_by_name['ApolloQAGetDspStatusResponse'] = _APOLLOQAGETDSPSTATUSRESPONSE
+DESCRIPTOR.message_types_by_name['ApolloQAFactoryPlaySound'] = _APOLLOQAFACTORYPLAYSOUND
+DESCRIPTOR.message_types_by_name['ApolloQAFactoryInfoRequest'] = _APOLLOQAFACTORYINFOREQUEST
+DESCRIPTOR.message_types_by_name['ApolloQAFactoryInfoResponse'] = _APOLLOQAFACTORYINFORESPONSE
+DESCRIPTOR.enum_types_by_name['ApolloQAMessageType'] = _APOLLOQAMESSAGETYPE
+DESCRIPTOR.enum_types_by_name['TraceId'] = _TRACEID
+DESCRIPTOR.enum_types_by_name['AvrcpPlayStatus'] = _AVRCPPLAYSTATUS
+DESCRIPTOR.enum_types_by_name['PreviousBootStatus'] = _PREVIOUSBOOTSTATUS
+DESCRIPTOR.enum_types_by_name['ApolloQAA2dpCodec'] = _APOLLOQAA2DPCODEC
+DESCRIPTOR.enum_types_by_name['SinkState'] = _SINKSTATE
+_sym_db.RegisterFileDescriptor(DESCRIPTOR)
+
+ApolloQATrace = _reflection.GeneratedProtocolMessageType('ApolloQATrace', (_message.Message,), dict(
+  DESCRIPTOR = _APOLLOQATRACE,
+  __module__ = 'apollo_qa_pb2'
+  # @@protoc_insertion_point(class_scope:apollo.lib.apollo_dev_util_lib.proto.ApolloQATrace)
+  ))
+_sym_db.RegisterMessage(ApolloQATrace)
+
+ApolloQAGetVerResponse = _reflection.GeneratedProtocolMessageType('ApolloQAGetVerResponse', (_message.Message,), dict(
+  DESCRIPTOR = _APOLLOQAGETVERRESPONSE,
+  __module__ = 'apollo_qa_pb2'
+  # @@protoc_insertion_point(class_scope:apollo.lib.apollo_dev_util_lib.proto.ApolloQAGetVerResponse)
+  ))
+_sym_db.RegisterMessage(ApolloQAGetVerResponse)
+
+ApolloQAGetCodecResponse = _reflection.GeneratedProtocolMessageType('ApolloQAGetCodecResponse', (_message.Message,), dict(
+  DESCRIPTOR = _APOLLOQAGETCODECRESPONSE,
+  __module__ = 'apollo_qa_pb2'
+  # @@protoc_insertion_point(class_scope:apollo.lib.apollo_dev_util_lib.proto.ApolloQAGetCodecResponse)
+  ))
+_sym_db.RegisterMessage(ApolloQAGetCodecResponse)
+
+ApolloQAGetDspStatusResponse = _reflection.GeneratedProtocolMessageType('ApolloQAGetDspStatusResponse', (_message.Message,), dict(
+  DESCRIPTOR = _APOLLOQAGETDSPSTATUSRESPONSE,
+  __module__ = 'apollo_qa_pb2'
+  # @@protoc_insertion_point(class_scope:apollo.lib.apollo_dev_util_lib.proto.ApolloQAGetDspStatusResponse)
+  ))
+_sym_db.RegisterMessage(ApolloQAGetDspStatusResponse)
+
+ApolloQAFactoryPlaySound = _reflection.GeneratedProtocolMessageType('ApolloQAFactoryPlaySound', (_message.Message,), dict(
+  DESCRIPTOR = _APOLLOQAFACTORYPLAYSOUND,
+  __module__ = 'apollo_qa_pb2'
+  # @@protoc_insertion_point(class_scope:apollo.lib.apollo_dev_util_lib.proto.ApolloQAFactoryPlaySound)
+  ))
+_sym_db.RegisterMessage(ApolloQAFactoryPlaySound)
+
+ApolloQAFactoryInfoRequest = _reflection.GeneratedProtocolMessageType('ApolloQAFactoryInfoRequest', (_message.Message,), dict(
+  DESCRIPTOR = _APOLLOQAFACTORYINFOREQUEST,
+  __module__ = 'apollo_qa_pb2'
+  # @@protoc_insertion_point(class_scope:apollo.lib.apollo_dev_util_lib.proto.ApolloQAFactoryInfoRequest)
+  ))
+_sym_db.RegisterMessage(ApolloQAFactoryInfoRequest)
+
+ApolloQAFactoryInfoResponse = _reflection.GeneratedProtocolMessageType('ApolloQAFactoryInfoResponse', (_message.Message,), dict(
+  DESCRIPTOR = _APOLLOQAFACTORYINFORESPONSE,
+  __module__ = 'apollo_qa_pb2'
+  # @@protoc_insertion_point(class_scope:apollo.lib.apollo_dev_util_lib.proto.ApolloQAFactoryInfoResponse)
+  ))
+_sym_db.RegisterMessage(ApolloQAFactoryInfoResponse)
+
+
+DESCRIPTOR.has_options = True
+DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\n\035com.google.android.bisto.nanoB\010ApolloQA'))
+_APOLLOQATRACE.fields_by_name['data'].has_options = True
+_APOLLOQATRACE.fields_by_name['data']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001\222?\002\020\005'))
+_APOLLOQAGETVERRESPONSE.fields_by_name['build_label'].has_options = True
+_APOLLOQAGETVERRESPONSE.fields_by_name['build_label']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\222?\002\010 '))
+_APOLLOQAFACTORYINFORESPONSE.fields_by_name['crystal_trim'].has_options = True
+_APOLLOQAFACTORYINFORESPONSE.fields_by_name['crystal_trim']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\222?\0028\020'))
+# @@protoc_insertion_point(module_scope)
diff --git a/acts/framework/acts/controllers/buds_lib/dev_utils/proto/gen/audiowear_pb2.py b/acts/framework/acts/controllers/buds_lib/dev_utils/proto/gen/audiowear_pb2.py
new file mode 100644
index 0000000..094a868
--- /dev/null
+++ b/acts/framework/acts/controllers/buds_lib/dev_utils/proto/gen/audiowear_pb2.py
@@ -0,0 +1,124 @@
+# Generated by the protocol buffer compiler.  DO NOT EDIT!
+# source: audiowear.proto
+
+import sys
+_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
+from google.protobuf.internal import enum_type_wrapper
+from google.protobuf import descriptor as _descriptor
+from google.protobuf import message as _message
+from google.protobuf import reflection as _reflection
+from google.protobuf import symbol_database as _symbol_database
+from google.protobuf import descriptor_pb2
+# @@protoc_insertion_point(imports)
+
+_sym_db = _symbol_database.Default()
+
+
+
+
+DESCRIPTOR = _descriptor.FileDescriptor(
+  name='audiowear.proto',
+  package='apollo.lib.apollo_dev_util_lib.proto',
+  syntax='proto2',
+  serialized_pb=_b('\n\x0f\x61udiowear.proto\x12$apollo.lib.apollo_dev_util_lib.proto*\x8d\x02\n\x0cMessageGroup\x12\x19\n\x15UNKNOWN_MESSAGE_GROUP\x10\x00\x12\x10\n\x0c\x44\x45VICE_INPUT\x10\x01\x12\x07\n\x03OTA\x10\x02\x12\x15\n\x11\x44\x45VICE_CAPABILITY\x10\x03\x12\x11\n\rDEVICE_STATUS\x10\x04\x12\x0b\n\x07LOGGING\x10\x05\x12\x0b\n\x07SENSORS\x10\x06\x12\x14\n\x10\x43OMPANION_STATUS\x10\x07\x12\x12\n\x0e\x44\x45VICE_COMMAND\x10\x08\x12\x12\n\x0e\x42ISTO_SETTINGS\x10\t\x12\x0c\n\x08WELLNESS\x10\n\x12\x08\n\x04TEST\x10\x0b\x12\x0f\n\x0b\x42LE_SERVICE\x10\x0c\x12\r\n\tAPOLLO_QA\x10~\x12\r\n\tTRANSLATE\x10\x7f\x42)\n\x1d\x63om.google.android.bisto.nanoB\x08Protocol')
+)
+
+_MESSAGEGROUP = _descriptor.EnumDescriptor(
+  name='MessageGroup',
+  full_name='apollo.lib.apollo_dev_util_lib.proto.MessageGroup',
+  filename=None,
+  file=DESCRIPTOR,
+  values=[
+    _descriptor.EnumValueDescriptor(
+      name='UNKNOWN_MESSAGE_GROUP', index=0, number=0,
+      options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='DEVICE_INPUT', index=1, number=1,
+      options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='OTA', index=2, number=2,
+      options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='DEVICE_CAPABILITY', index=3, number=3,
+      options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='DEVICE_STATUS', index=4, number=4,
+      options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='LOGGING', index=5, number=5,
+      options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='SENSORS', index=6, number=6,
+      options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='COMPANION_STATUS', index=7, number=7,
+      options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='DEVICE_COMMAND', index=8, number=8,
+      options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='BISTO_SETTINGS', index=9, number=9,
+      options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='WELLNESS', index=10, number=10,
+      options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='TEST', index=11, number=11,
+      options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='BLE_SERVICE', index=12, number=12,
+      options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='APOLLO_QA', index=13, number=126,
+      options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='TRANSLATE', index=14, number=127,
+      options=None,
+      type=None),
+  ],
+  containing_type=None,
+  options=None,
+  serialized_start=58,
+  serialized_end=327,
+)
+_sym_db.RegisterEnumDescriptor(_MESSAGEGROUP)
+
+MessageGroup = enum_type_wrapper.EnumTypeWrapper(_MESSAGEGROUP)
+UNKNOWN_MESSAGE_GROUP = 0
+DEVICE_INPUT = 1
+OTA = 2
+DEVICE_CAPABILITY = 3
+DEVICE_STATUS = 4
+LOGGING = 5
+SENSORS = 6
+COMPANION_STATUS = 7
+DEVICE_COMMAND = 8
+BISTO_SETTINGS = 9
+WELLNESS = 10
+TEST = 11
+BLE_SERVICE = 12
+APOLLO_QA = 126
+TRANSLATE = 127
+
+
+DESCRIPTOR.enum_types_by_name['MessageGroup'] = _MESSAGEGROUP
+_sym_db.RegisterFileDescriptor(DESCRIPTOR)
+
+
+DESCRIPTOR.has_options = True
+DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\n\035com.google.android.bisto.nanoB\010Protocol'))
+# @@protoc_insertion_point(module_scope)
diff --git a/acts/framework/acts/controllers/buds_lib/dev_utils/proto/gen/google/protobuf/descriptor_pb2.py b/acts/framework/acts/controllers/buds_lib/dev_utils/proto/gen/google/protobuf/descriptor_pb2.py
new file mode 100644
index 0000000..e3b4558
--- /dev/null
+++ b/acts/framework/acts/controllers/buds_lib/dev_utils/proto/gen/google/protobuf/descriptor_pb2.py
@@ -0,0 +1,1572 @@
+# Generated by the protocol buffer compiler.  DO NOT EDIT!
+# source: google/protobuf/descriptor.proto
+
+import sys
+_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
+from google.protobuf import descriptor as _descriptor
+from google.protobuf import message as _message
+from google.protobuf import reflection as _reflection
+from google.protobuf import symbol_database as _symbol_database
+# @@protoc_insertion_point(imports)
+
+_sym_db = _symbol_database.Default()
+
+
+
+
+DESCRIPTOR = _descriptor.FileDescriptor(
+  name='google/protobuf/descriptor.proto',
+  package='google.protobuf',
+  syntax='proto2',
+  serialized_pb=_b('\n google/protobuf/descriptor.proto\x12\x0fgoogle.protobuf\"G\n\x11\x46ileDescriptorSet\x12\x32\n\x04\x66ile\x18\x01 \x03(\x0b\x32$.google.protobuf.FileDescriptorProto\"\xdb\x03\n\x13\x46ileDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0f\n\x07package\x18\x02 \x01(\t\x12\x12\n\ndependency\x18\x03 \x03(\t\x12\x19\n\x11public_dependency\x18\n \x03(\x05\x12\x17\n\x0fweak_dependency\x18\x0b \x03(\x05\x12\x36\n\x0cmessage_type\x18\x04 \x03(\x0b\x32 .google.protobuf.DescriptorProto\x12\x37\n\tenum_type\x18\x05 \x03(\x0b\x32$.google.protobuf.EnumDescriptorProto\x12\x38\n\x07service\x18\x06 \x03(\x0b\x32\'.google.protobuf.ServiceDescriptorProto\x12\x38\n\textension\x18\x07 \x03(\x0b\x32%.google.protobuf.FieldDescriptorProto\x12-\n\x07options\x18\x08 \x01(\x0b\x32\x1c.google.protobuf.FileOptions\x12\x39\n\x10source_code_info\x18\t \x01(\x0b\x32\x1f.google.protobuf.SourceCodeInfo\x12\x0e\n\x06syntax\x18\x0c \x01(\t\"\xe4\x03\n\x0f\x44\x65scriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x34\n\x05\x66ield\x18\x02 \x03(\x0b\x32%.google.protobuf.FieldDescriptorProto\x12\x38\n\textension\x18\x06 \x03(\x0b\x32%.google.protobuf.FieldDescriptorProto\x12\x35\n\x0bnested_type\x18\x03 \x03(\x0b\x32 .google.protobuf.DescriptorProto\x12\x37\n\tenum_type\x18\x04 \x03(\x0b\x32$.google.protobuf.EnumDescriptorProto\x12H\n\x0f\x65xtension_range\x18\x05 \x03(\x0b\x32/.google.protobuf.DescriptorProto.ExtensionRange\x12\x39\n\noneof_decl\x18\x08 \x03(\x0b\x32%.google.protobuf.OneofDescriptorProto\x12\x30\n\x07options\x18\x07 \x01(\x0b\x32\x1f.google.protobuf.MessageOptions\x1a,\n\x0e\x45xtensionRange\x12\r\n\x05start\x18\x01 \x01(\x05\x12\x0b\n\x03\x65nd\x18\x02 \x01(\x05\"\xa9\x05\n\x14\x46ieldDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0e\n\x06number\x18\x03 \x01(\x05\x12:\n\x05label\x18\x04 \x01(\x0e\x32+.google.protobuf.FieldDescriptorProto.Label\x12\x38\n\x04type\x18\x05 \x01(\x0e\x32*.google.protobuf.FieldDescriptorProto.Type\x12\x11\n\ttype_name\x18\x06 \x01(\t\x12\x10\n\x08\x65xtendee\x18\x02 \x01(\t\x12\x15\n\rdefault_value\x18\x07 \x01(\t\x12\x13\n\x0boneof_index\x18\t \x01(\x05\x12.\n\x07options\x18\x08 \x01(\x0b\x32\x1d.google.protobuf.FieldOptions\"\xb6\x02\n\x04Type\x12\x0f\n\x0bTYPE_DOUBLE\x10\x01\x12\x0e\n\nTYPE_FLOAT\x10\x02\x12\x0e\n\nTYPE_INT64\x10\x03\x12\x0f\n\x0bTYPE_UINT64\x10\x04\x12\x0e\n\nTYPE_INT32\x10\x05\x12\x10\n\x0cTYPE_FIXED64\x10\x06\x12\x10\n\x0cTYPE_FIXED32\x10\x07\x12\r\n\tTYPE_BOOL\x10\x08\x12\x0f\n\x0bTYPE_STRING\x10\t\x12\x0e\n\nTYPE_GROUP\x10\n\x12\x10\n\x0cTYPE_MESSAGE\x10\x0b\x12\x0e\n\nTYPE_BYTES\x10\x0c\x12\x0f\n\x0bTYPE_UINT32\x10\r\x12\r\n\tTYPE_ENUM\x10\x0e\x12\x11\n\rTYPE_SFIXED32\x10\x0f\x12\x11\n\rTYPE_SFIXED64\x10\x10\x12\x0f\n\x0bTYPE_SINT32\x10\x11\x12\x0f\n\x0bTYPE_SINT64\x10\x12\"C\n\x05Label\x12\x12\n\x0eLABEL_OPTIONAL\x10\x01\x12\x12\n\x0eLABEL_REQUIRED\x10\x02\x12\x12\n\x0eLABEL_REPEATED\x10\x03\"$\n\x14OneofDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\"\x8c\x01\n\x13\x45numDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x38\n\x05value\x18\x02 \x03(\x0b\x32).google.protobuf.EnumValueDescriptorProto\x12-\n\x07options\x18\x03 \x01(\x0b\x32\x1c.google.protobuf.EnumOptions\"l\n\x18\x45numValueDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0e\n\x06number\x18\x02 \x01(\x05\x12\x32\n\x07options\x18\x03 \x01(\x0b\x32!.google.protobuf.EnumValueOptions\"\x90\x01\n\x16ServiceDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x36\n\x06method\x18\x02 \x03(\x0b\x32&.google.protobuf.MethodDescriptorProto\x12\x30\n\x07options\x18\x03 \x01(\x0b\x32\x1f.google.protobuf.ServiceOptions\"\xc1\x01\n\x15MethodDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x12\n\ninput_type\x18\x02 \x01(\t\x12\x13\n\x0boutput_type\x18\x03 \x01(\t\x12/\n\x07options\x18\x04 \x01(\x0b\x32\x1e.google.protobuf.MethodOptions\x12\x1f\n\x10\x63lient_streaming\x18\x05 \x01(\x08:\x05\x66\x61lse\x12\x1f\n\x10server_streaming\x18\x06 \x01(\x08:\x05\x66\x61lse\"\xcc\x04\n\x0b\x46ileOptions\x12\x14\n\x0cjava_package\x18\x01 \x01(\t\x12\x1c\n\x14java_outer_classname\x18\x08 \x01(\t\x12\"\n\x13java_multiple_files\x18\n \x01(\x08:\x05\x66\x61lse\x12,\n\x1djava_generate_equals_and_hash\x18\x14 \x01(\x08:\x05\x66\x61lse\x12%\n\x16java_string_check_utf8\x18\x1b \x01(\x08:\x05\x66\x61lse\x12\x46\n\x0coptimize_for\x18\t \x01(\x0e\x32).google.protobuf.FileOptions.OptimizeMode:\x05SPEED\x12\x12\n\ngo_package\x18\x0b \x01(\t\x12\"\n\x13\x63\x63_generic_services\x18\x10 \x01(\x08:\x05\x66\x61lse\x12$\n\x15java_generic_services\x18\x11 \x01(\x08:\x05\x66\x61lse\x12\"\n\x13py_generic_services\x18\x12 \x01(\x08:\x05\x66\x61lse\x12\x19\n\ndeprecated\x18\x17 \x01(\x08:\x05\x66\x61lse\x12\x1f\n\x10\x63\x63_enable_arenas\x18\x1f \x01(\x08:\x05\x66\x61lse\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption\":\n\x0cOptimizeMode\x12\t\n\x05SPEED\x10\x01\x12\r\n\tCODE_SIZE\x10\x02\x12\x10\n\x0cLITE_RUNTIME\x10\x03*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"\xe6\x01\n\x0eMessageOptions\x12&\n\x17message_set_wire_format\x18\x01 \x01(\x08:\x05\x66\x61lse\x12.\n\x1fno_standard_descriptor_accessor\x18\x02 \x01(\x08:\x05\x66\x61lse\x12\x19\n\ndeprecated\x18\x03 \x01(\x08:\x05\x66\x61lse\x12\x11\n\tmap_entry\x18\x07 \x01(\x08\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"\xa0\x02\n\x0c\x46ieldOptions\x12:\n\x05\x63type\x18\x01 \x01(\x0e\x32#.google.protobuf.FieldOptions.CType:\x06STRING\x12\x0e\n\x06packed\x18\x02 \x01(\x08\x12\x13\n\x04lazy\x18\x05 \x01(\x08:\x05\x66\x61lse\x12\x19\n\ndeprecated\x18\x03 \x01(\x08:\x05\x66\x61lse\x12\x13\n\x04weak\x18\n \x01(\x08:\x05\x66\x61lse\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption\"/\n\x05\x43Type\x12\n\n\x06STRING\x10\x00\x12\x08\n\x04\x43ORD\x10\x01\x12\x10\n\x0cSTRING_PIECE\x10\x02*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"\x8d\x01\n\x0b\x45numOptions\x12\x13\n\x0b\x61llow_alias\x18\x02 \x01(\x08\x12\x19\n\ndeprecated\x18\x03 \x01(\x08:\x05\x66\x61lse\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"}\n\x10\x45numValueOptions\x12\x19\n\ndeprecated\x18\x01 \x01(\x08:\x05\x66\x61lse\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"{\n\x0eServiceOptions\x12\x19\n\ndeprecated\x18! \x01(\x08:\x05\x66\x61lse\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"z\n\rMethodOptions\x12\x19\n\ndeprecated\x18! \x01(\x08:\x05\x66\x61lse\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"\x9e\x02\n\x13UninterpretedOption\x12;\n\x04name\x18\x02 \x03(\x0b\x32-.google.protobuf.UninterpretedOption.NamePart\x12\x18\n\x10identifier_value\x18\x03 \x01(\t\x12\x1a\n\x12positive_int_value\x18\x04 \x01(\x04\x12\x1a\n\x12negative_int_value\x18\x05 \x01(\x03\x12\x14\n\x0c\x64ouble_value\x18\x06 \x01(\x01\x12\x14\n\x0cstring_value\x18\x07 \x01(\x0c\x12\x17\n\x0f\x61ggregate_value\x18\x08 \x01(\t\x1a\x33\n\x08NamePart\x12\x11\n\tname_part\x18\x01 \x02(\t\x12\x14\n\x0cis_extension\x18\x02 \x02(\x08\"\xb1\x01\n\x0eSourceCodeInfo\x12:\n\x08location\x18\x01 \x03(\x0b\x32(.google.protobuf.SourceCodeInfo.Location\x1a\x63\n\x08Location\x12\x10\n\x04path\x18\x01 \x03(\x05\x42\x02\x10\x01\x12\x10\n\x04span\x18\x02 \x03(\x05\x42\x02\x10\x01\x12\x18\n\x10leading_comments\x18\x03 \x01(\t\x12\x19\n\x11trailing_comments\x18\x04 \x01(\tB)\n\x13\x63om.google.protobufB\x10\x44\x65scriptorProtosH\x01')
+)
+
+
+
+_FIELDDESCRIPTORPROTO_TYPE = _descriptor.EnumDescriptor(
+  name='Type',
+  full_name='google.protobuf.FieldDescriptorProto.Type',
+  filename=None,
+  file=DESCRIPTOR,
+  values=[
+    _descriptor.EnumValueDescriptor(
+      name='TYPE_DOUBLE', index=0, number=1,
+      options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='TYPE_FLOAT', index=1, number=2,
+      options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='TYPE_INT64', index=2, number=3,
+      options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='TYPE_UINT64', index=3, number=4,
+      options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='TYPE_INT32', index=4, number=5,
+      options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='TYPE_FIXED64', index=5, number=6,
+      options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='TYPE_FIXED32', index=6, number=7,
+      options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='TYPE_BOOL', index=7, number=8,
+      options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='TYPE_STRING', index=8, number=9,
+      options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='TYPE_GROUP', index=9, number=10,
+      options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='TYPE_MESSAGE', index=10, number=11,
+      options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='TYPE_BYTES', index=11, number=12,
+      options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='TYPE_UINT32', index=12, number=13,
+      options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='TYPE_ENUM', index=13, number=14,
+      options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='TYPE_SFIXED32', index=14, number=15,
+      options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='TYPE_SFIXED64', index=15, number=16,
+      options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='TYPE_SINT32', index=16, number=17,
+      options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='TYPE_SINT64', index=17, number=18,
+      options=None,
+      type=None),
+  ],
+  containing_type=None,
+  options=None,
+  serialized_start=1394,
+  serialized_end=1704,
+)
+_sym_db.RegisterEnumDescriptor(_FIELDDESCRIPTORPROTO_TYPE)
+
+_FIELDDESCRIPTORPROTO_LABEL = _descriptor.EnumDescriptor(
+  name='Label',
+  full_name='google.protobuf.FieldDescriptorProto.Label',
+  filename=None,
+  file=DESCRIPTOR,
+  values=[
+    _descriptor.EnumValueDescriptor(
+      name='LABEL_OPTIONAL', index=0, number=1,
+      options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='LABEL_REQUIRED', index=1, number=2,
+      options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='LABEL_REPEATED', index=2, number=3,
+      options=None,
+      type=None),
+  ],
+  containing_type=None,
+  options=None,
+  serialized_start=1706,
+  serialized_end=1773,
+)
+_sym_db.RegisterEnumDescriptor(_FIELDDESCRIPTORPROTO_LABEL)
+
+_FILEOPTIONS_OPTIMIZEMODE = _descriptor.EnumDescriptor(
+  name='OptimizeMode',
+  full_name='google.protobuf.FileOptions.OptimizeMode',
+  filename=None,
+  file=DESCRIPTOR,
+  values=[
+    _descriptor.EnumValueDescriptor(
+      name='SPEED', index=0, number=1,
+      options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='CODE_SIZE', index=1, number=2,
+      options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='LITE_RUNTIME', index=2, number=3,
+      options=None,
+      type=None),
+  ],
+  containing_type=None,
+  options=None,
+  serialized_start=2929,
+  serialized_end=2987,
+)
+_sym_db.RegisterEnumDescriptor(_FILEOPTIONS_OPTIMIZEMODE)
+
+_FIELDOPTIONS_CTYPE = _descriptor.EnumDescriptor(
+  name='CType',
+  full_name='google.protobuf.FieldOptions.CType',
+  filename=None,
+  file=DESCRIPTOR,
+  values=[
+    _descriptor.EnumValueDescriptor(
+      name='STRING', index=0, number=0,
+      options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='CORD', index=1, number=1,
+      options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='STRING_PIECE', index=2, number=2,
+      options=None,
+      type=None),
+  ],
+  containing_type=None,
+  options=None,
+  serialized_start=3464,
+  serialized_end=3511,
+)
+_sym_db.RegisterEnumDescriptor(_FIELDOPTIONS_CTYPE)
+
+
+_FILEDESCRIPTORSET = _descriptor.Descriptor(
+  name='FileDescriptorSet',
+  full_name='google.protobuf.FileDescriptorSet',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='file', full_name='google.protobuf.FileDescriptorSet.file', index=0,
+      number=1, type=11, cpp_type=10, label=3,
+      has_default_value=False, default_value=[],
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  options=None,
+  is_extendable=False,
+  syntax='proto2',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=53,
+  serialized_end=124,
+)
+
+
+_FILEDESCRIPTORPROTO = _descriptor.Descriptor(
+  name='FileDescriptorProto',
+  full_name='google.protobuf.FileDescriptorProto',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='name', full_name='google.protobuf.FileDescriptorProto.name', index=0,
+      number=1, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='package', full_name='google.protobuf.FileDescriptorProto.package', index=1,
+      number=2, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='dependency', full_name='google.protobuf.FileDescriptorProto.dependency', index=2,
+      number=3, type=9, cpp_type=9, label=3,
+      has_default_value=False, default_value=[],
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='public_dependency', full_name='google.protobuf.FileDescriptorProto.public_dependency', index=3,
+      number=10, type=5, cpp_type=1, label=3,
+      has_default_value=False, default_value=[],
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='weak_dependency', full_name='google.protobuf.FileDescriptorProto.weak_dependency', index=4,
+      number=11, type=5, cpp_type=1, label=3,
+      has_default_value=False, default_value=[],
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='message_type', full_name='google.protobuf.FileDescriptorProto.message_type', index=5,
+      number=4, type=11, cpp_type=10, label=3,
+      has_default_value=False, default_value=[],
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='enum_type', full_name='google.protobuf.FileDescriptorProto.enum_type', index=6,
+      number=5, type=11, cpp_type=10, label=3,
+      has_default_value=False, default_value=[],
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='service', full_name='google.protobuf.FileDescriptorProto.service', index=7,
+      number=6, type=11, cpp_type=10, label=3,
+      has_default_value=False, default_value=[],
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='extension', full_name='google.protobuf.FileDescriptorProto.extension', index=8,
+      number=7, type=11, cpp_type=10, label=3,
+      has_default_value=False, default_value=[],
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='options', full_name='google.protobuf.FileDescriptorProto.options', index=9,
+      number=8, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='source_code_info', full_name='google.protobuf.FileDescriptorProto.source_code_info', index=10,
+      number=9, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='syntax', full_name='google.protobuf.FileDescriptorProto.syntax', index=11,
+      number=12, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  options=None,
+  is_extendable=False,
+  syntax='proto2',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=127,
+  serialized_end=602,
+)
+
+
+_DESCRIPTORPROTO_EXTENSIONRANGE = _descriptor.Descriptor(
+  name='ExtensionRange',
+  full_name='google.protobuf.DescriptorProto.ExtensionRange',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='start', full_name='google.protobuf.DescriptorProto.ExtensionRange.start', index=0,
+      number=1, type=5, cpp_type=1, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='end', full_name='google.protobuf.DescriptorProto.ExtensionRange.end', index=1,
+      number=2, type=5, cpp_type=1, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  options=None,
+  is_extendable=False,
+  syntax='proto2',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=1045,
+  serialized_end=1089,
+)
+
+_DESCRIPTORPROTO = _descriptor.Descriptor(
+  name='DescriptorProto',
+  full_name='google.protobuf.DescriptorProto',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='name', full_name='google.protobuf.DescriptorProto.name', index=0,
+      number=1, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='field', full_name='google.protobuf.DescriptorProto.field', index=1,
+      number=2, type=11, cpp_type=10, label=3,
+      has_default_value=False, default_value=[],
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='extension', full_name='google.protobuf.DescriptorProto.extension', index=2,
+      number=6, type=11, cpp_type=10, label=3,
+      has_default_value=False, default_value=[],
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='nested_type', full_name='google.protobuf.DescriptorProto.nested_type', index=3,
+      number=3, type=11, cpp_type=10, label=3,
+      has_default_value=False, default_value=[],
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='enum_type', full_name='google.protobuf.DescriptorProto.enum_type', index=4,
+      number=4, type=11, cpp_type=10, label=3,
+      has_default_value=False, default_value=[],
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='extension_range', full_name='google.protobuf.DescriptorProto.extension_range', index=5,
+      number=5, type=11, cpp_type=10, label=3,
+      has_default_value=False, default_value=[],
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='oneof_decl', full_name='google.protobuf.DescriptorProto.oneof_decl', index=6,
+      number=8, type=11, cpp_type=10, label=3,
+      has_default_value=False, default_value=[],
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='options', full_name='google.protobuf.DescriptorProto.options', index=7,
+      number=7, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+  ],
+  extensions=[
+  ],
+  nested_types=[_DESCRIPTORPROTO_EXTENSIONRANGE, ],
+  enum_types=[
+  ],
+  options=None,
+  is_extendable=False,
+  syntax='proto2',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=605,
+  serialized_end=1089,
+)
+
+
+_FIELDDESCRIPTORPROTO = _descriptor.Descriptor(
+  name='FieldDescriptorProto',
+  full_name='google.protobuf.FieldDescriptorProto',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='name', full_name='google.protobuf.FieldDescriptorProto.name', index=0,
+      number=1, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='number', full_name='google.protobuf.FieldDescriptorProto.number', index=1,
+      number=3, type=5, cpp_type=1, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='label', full_name='google.protobuf.FieldDescriptorProto.label', index=2,
+      number=4, type=14, cpp_type=8, label=1,
+      has_default_value=False, default_value=1,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='type', full_name='google.protobuf.FieldDescriptorProto.type', index=3,
+      number=5, type=14, cpp_type=8, label=1,
+      has_default_value=False, default_value=1,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='type_name', full_name='google.protobuf.FieldDescriptorProto.type_name', index=4,
+      number=6, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='extendee', full_name='google.protobuf.FieldDescriptorProto.extendee', index=5,
+      number=2, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='default_value', full_name='google.protobuf.FieldDescriptorProto.default_value', index=6,
+      number=7, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='oneof_index', full_name='google.protobuf.FieldDescriptorProto.oneof_index', index=7,
+      number=9, type=5, cpp_type=1, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='options', full_name='google.protobuf.FieldDescriptorProto.options', index=8,
+      number=8, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+    _FIELDDESCRIPTORPROTO_TYPE,
+    _FIELDDESCRIPTORPROTO_LABEL,
+  ],
+  options=None,
+  is_extendable=False,
+  syntax='proto2',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=1092,
+  serialized_end=1773,
+)
+
+
+_ONEOFDESCRIPTORPROTO = _descriptor.Descriptor(
+  name='OneofDescriptorProto',
+  full_name='google.protobuf.OneofDescriptorProto',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='name', full_name='google.protobuf.OneofDescriptorProto.name', index=0,
+      number=1, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  options=None,
+  is_extendable=False,
+  syntax='proto2',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=1775,
+  serialized_end=1811,
+)
+
+
+_ENUMDESCRIPTORPROTO = _descriptor.Descriptor(
+  name='EnumDescriptorProto',
+  full_name='google.protobuf.EnumDescriptorProto',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='name', full_name='google.protobuf.EnumDescriptorProto.name', index=0,
+      number=1, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='value', full_name='google.protobuf.EnumDescriptorProto.value', index=1,
+      number=2, type=11, cpp_type=10, label=3,
+      has_default_value=False, default_value=[],
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='options', full_name='google.protobuf.EnumDescriptorProto.options', index=2,
+      number=3, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  options=None,
+  is_extendable=False,
+  syntax='proto2',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=1814,
+  serialized_end=1954,
+)
+
+
+_ENUMVALUEDESCRIPTORPROTO = _descriptor.Descriptor(
+  name='EnumValueDescriptorProto',
+  full_name='google.protobuf.EnumValueDescriptorProto',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='name', full_name='google.protobuf.EnumValueDescriptorProto.name', index=0,
+      number=1, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='number', full_name='google.protobuf.EnumValueDescriptorProto.number', index=1,
+      number=2, type=5, cpp_type=1, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='options', full_name='google.protobuf.EnumValueDescriptorProto.options', index=2,
+      number=3, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  options=None,
+  is_extendable=False,
+  syntax='proto2',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=1956,
+  serialized_end=2064,
+)
+
+
+_SERVICEDESCRIPTORPROTO = _descriptor.Descriptor(
+  name='ServiceDescriptorProto',
+  full_name='google.protobuf.ServiceDescriptorProto',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='name', full_name='google.protobuf.ServiceDescriptorProto.name', index=0,
+      number=1, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='method', full_name='google.protobuf.ServiceDescriptorProto.method', index=1,
+      number=2, type=11, cpp_type=10, label=3,
+      has_default_value=False, default_value=[],
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='options', full_name='google.protobuf.ServiceDescriptorProto.options', index=2,
+      number=3, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  options=None,
+  is_extendable=False,
+  syntax='proto2',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=2067,
+  serialized_end=2211,
+)
+
+
+_METHODDESCRIPTORPROTO = _descriptor.Descriptor(
+  name='MethodDescriptorProto',
+  full_name='google.protobuf.MethodDescriptorProto',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='name', full_name='google.protobuf.MethodDescriptorProto.name', index=0,
+      number=1, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='input_type', full_name='google.protobuf.MethodDescriptorProto.input_type', index=1,
+      number=2, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='output_type', full_name='google.protobuf.MethodDescriptorProto.output_type', index=2,
+      number=3, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='options', full_name='google.protobuf.MethodDescriptorProto.options', index=3,
+      number=4, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='client_streaming', full_name='google.protobuf.MethodDescriptorProto.client_streaming', index=4,
+      number=5, type=8, cpp_type=7, label=1,
+      has_default_value=True, default_value=False,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='server_streaming', full_name='google.protobuf.MethodDescriptorProto.server_streaming', index=5,
+      number=6, type=8, cpp_type=7, label=1,
+      has_default_value=True, default_value=False,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  options=None,
+  is_extendable=False,
+  syntax='proto2',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=2214,
+  serialized_end=2407,
+)
+
+
+_FILEOPTIONS = _descriptor.Descriptor(
+  name='FileOptions',
+  full_name='google.protobuf.FileOptions',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='java_package', full_name='google.protobuf.FileOptions.java_package', index=0,
+      number=1, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='java_outer_classname', full_name='google.protobuf.FileOptions.java_outer_classname', index=1,
+      number=8, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='java_multiple_files', full_name='google.protobuf.FileOptions.java_multiple_files', index=2,
+      number=10, type=8, cpp_type=7, label=1,
+      has_default_value=True, default_value=False,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='java_generate_equals_and_hash', full_name='google.protobuf.FileOptions.java_generate_equals_and_hash', index=3,
+      number=20, type=8, cpp_type=7, label=1,
+      has_default_value=True, default_value=False,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='java_string_check_utf8', full_name='google.protobuf.FileOptions.java_string_check_utf8', index=4,
+      number=27, type=8, cpp_type=7, label=1,
+      has_default_value=True, default_value=False,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='optimize_for', full_name='google.protobuf.FileOptions.optimize_for', index=5,
+      number=9, type=14, cpp_type=8, label=1,
+      has_default_value=True, default_value=1,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='go_package', full_name='google.protobuf.FileOptions.go_package', index=6,
+      number=11, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='cc_generic_services', full_name='google.protobuf.FileOptions.cc_generic_services', index=7,
+      number=16, type=8, cpp_type=7, label=1,
+      has_default_value=True, default_value=False,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='java_generic_services', full_name='google.protobuf.FileOptions.java_generic_services', index=8,
+      number=17, type=8, cpp_type=7, label=1,
+      has_default_value=True, default_value=False,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='py_generic_services', full_name='google.protobuf.FileOptions.py_generic_services', index=9,
+      number=18, type=8, cpp_type=7, label=1,
+      has_default_value=True, default_value=False,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='deprecated', full_name='google.protobuf.FileOptions.deprecated', index=10,
+      number=23, type=8, cpp_type=7, label=1,
+      has_default_value=True, default_value=False,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='cc_enable_arenas', full_name='google.protobuf.FileOptions.cc_enable_arenas', index=11,
+      number=31, type=8, cpp_type=7, label=1,
+      has_default_value=True, default_value=False,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='uninterpreted_option', full_name='google.protobuf.FileOptions.uninterpreted_option', index=12,
+      number=999, type=11, cpp_type=10, label=3,
+      has_default_value=False, default_value=[],
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+    _FILEOPTIONS_OPTIMIZEMODE,
+  ],
+  options=None,
+  is_extendable=True,
+  syntax='proto2',
+  extension_ranges=[(1000, 536870912), ],
+  oneofs=[
+  ],
+  serialized_start=2410,
+  serialized_end=2998,
+)
+
+
+_MESSAGEOPTIONS = _descriptor.Descriptor(
+  name='MessageOptions',
+  full_name='google.protobuf.MessageOptions',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='message_set_wire_format', full_name='google.protobuf.MessageOptions.message_set_wire_format', index=0,
+      number=1, type=8, cpp_type=7, label=1,
+      has_default_value=True, default_value=False,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='no_standard_descriptor_accessor', full_name='google.protobuf.MessageOptions.no_standard_descriptor_accessor', index=1,
+      number=2, type=8, cpp_type=7, label=1,
+      has_default_value=True, default_value=False,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='deprecated', full_name='google.protobuf.MessageOptions.deprecated', index=2,
+      number=3, type=8, cpp_type=7, label=1,
+      has_default_value=True, default_value=False,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='map_entry', full_name='google.protobuf.MessageOptions.map_entry', index=3,
+      number=7, type=8, cpp_type=7, label=1,
+      has_default_value=False, default_value=False,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='uninterpreted_option', full_name='google.protobuf.MessageOptions.uninterpreted_option', index=4,
+      number=999, type=11, cpp_type=10, label=3,
+      has_default_value=False, default_value=[],
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  options=None,
+  is_extendable=True,
+  syntax='proto2',
+  extension_ranges=[(1000, 536870912), ],
+  oneofs=[
+  ],
+  serialized_start=3001,
+  serialized_end=3231,
+)
+
+
+_FIELDOPTIONS = _descriptor.Descriptor(
+  name='FieldOptions',
+  full_name='google.protobuf.FieldOptions',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='ctype', full_name='google.protobuf.FieldOptions.ctype', index=0,
+      number=1, type=14, cpp_type=8, label=1,
+      has_default_value=True, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='packed', full_name='google.protobuf.FieldOptions.packed', index=1,
+      number=2, type=8, cpp_type=7, label=1,
+      has_default_value=False, default_value=False,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='lazy', full_name='google.protobuf.FieldOptions.lazy', index=2,
+      number=5, type=8, cpp_type=7, label=1,
+      has_default_value=True, default_value=False,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='deprecated', full_name='google.protobuf.FieldOptions.deprecated', index=3,
+      number=3, type=8, cpp_type=7, label=1,
+      has_default_value=True, default_value=False,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='weak', full_name='google.protobuf.FieldOptions.weak', index=4,
+      number=10, type=8, cpp_type=7, label=1,
+      has_default_value=True, default_value=False,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='uninterpreted_option', full_name='google.protobuf.FieldOptions.uninterpreted_option', index=5,
+      number=999, type=11, cpp_type=10, label=3,
+      has_default_value=False, default_value=[],
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+    _FIELDOPTIONS_CTYPE,
+  ],
+  options=None,
+  is_extendable=True,
+  syntax='proto2',
+  extension_ranges=[(1000, 536870912), ],
+  oneofs=[
+  ],
+  serialized_start=3234,
+  serialized_end=3522,
+)
+
+
+_ENUMOPTIONS = _descriptor.Descriptor(
+  name='EnumOptions',
+  full_name='google.protobuf.EnumOptions',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='allow_alias', full_name='google.protobuf.EnumOptions.allow_alias', index=0,
+      number=2, type=8, cpp_type=7, label=1,
+      has_default_value=False, default_value=False,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='deprecated', full_name='google.protobuf.EnumOptions.deprecated', index=1,
+      number=3, type=8, cpp_type=7, label=1,
+      has_default_value=True, default_value=False,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='uninterpreted_option', full_name='google.protobuf.EnumOptions.uninterpreted_option', index=2,
+      number=999, type=11, cpp_type=10, label=3,
+      has_default_value=False, default_value=[],
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  options=None,
+  is_extendable=True,
+  syntax='proto2',
+  extension_ranges=[(1000, 536870912), ],
+  oneofs=[
+  ],
+  serialized_start=3525,
+  serialized_end=3666,
+)
+
+
+_ENUMVALUEOPTIONS = _descriptor.Descriptor(
+  name='EnumValueOptions',
+  full_name='google.protobuf.EnumValueOptions',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='deprecated', full_name='google.protobuf.EnumValueOptions.deprecated', index=0,
+      number=1, type=8, cpp_type=7, label=1,
+      has_default_value=True, default_value=False,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='uninterpreted_option', full_name='google.protobuf.EnumValueOptions.uninterpreted_option', index=1,
+      number=999, type=11, cpp_type=10, label=3,
+      has_default_value=False, default_value=[],
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  options=None,
+  is_extendable=True,
+  syntax='proto2',
+  extension_ranges=[(1000, 536870912), ],
+  oneofs=[
+  ],
+  serialized_start=3668,
+  serialized_end=3793,
+)
+
+
+_SERVICEOPTIONS = _descriptor.Descriptor(
+  name='ServiceOptions',
+  full_name='google.protobuf.ServiceOptions',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='deprecated', full_name='google.protobuf.ServiceOptions.deprecated', index=0,
+      number=33, type=8, cpp_type=7, label=1,
+      has_default_value=True, default_value=False,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='uninterpreted_option', full_name='google.protobuf.ServiceOptions.uninterpreted_option', index=1,
+      number=999, type=11, cpp_type=10, label=3,
+      has_default_value=False, default_value=[],
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  options=None,
+  is_extendable=True,
+  syntax='proto2',
+  extension_ranges=[(1000, 536870912), ],
+  oneofs=[
+  ],
+  serialized_start=3795,
+  serialized_end=3918,
+)
+
+
+_METHODOPTIONS = _descriptor.Descriptor(
+  name='MethodOptions',
+  full_name='google.protobuf.MethodOptions',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='deprecated', full_name='google.protobuf.MethodOptions.deprecated', index=0,
+      number=33, type=8, cpp_type=7, label=1,
+      has_default_value=True, default_value=False,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='uninterpreted_option', full_name='google.protobuf.MethodOptions.uninterpreted_option', index=1,
+      number=999, type=11, cpp_type=10, label=3,
+      has_default_value=False, default_value=[],
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  options=None,
+  is_extendable=True,
+  syntax='proto2',
+  extension_ranges=[(1000, 536870912), ],
+  oneofs=[
+  ],
+  serialized_start=3920,
+  serialized_end=4042,
+)
+
+
+_UNINTERPRETEDOPTION_NAMEPART = _descriptor.Descriptor(
+  name='NamePart',
+  full_name='google.protobuf.UninterpretedOption.NamePart',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='name_part', full_name='google.protobuf.UninterpretedOption.NamePart.name_part', index=0,
+      number=1, type=9, cpp_type=9, label=2,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='is_extension', full_name='google.protobuf.UninterpretedOption.NamePart.is_extension', index=1,
+      number=2, type=8, cpp_type=7, label=2,
+      has_default_value=False, default_value=False,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  options=None,
+  is_extendable=False,
+  syntax='proto2',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=4280,
+  serialized_end=4331,
+)
+
+_UNINTERPRETEDOPTION = _descriptor.Descriptor(
+  name='UninterpretedOption',
+  full_name='google.protobuf.UninterpretedOption',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='name', full_name='google.protobuf.UninterpretedOption.name', index=0,
+      number=2, type=11, cpp_type=10, label=3,
+      has_default_value=False, default_value=[],
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='identifier_value', full_name='google.protobuf.UninterpretedOption.identifier_value', index=1,
+      number=3, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='positive_int_value', full_name='google.protobuf.UninterpretedOption.positive_int_value', index=2,
+      number=4, type=4, cpp_type=4, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='negative_int_value', full_name='google.protobuf.UninterpretedOption.negative_int_value', index=3,
+      number=5, type=3, cpp_type=2, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='double_value', full_name='google.protobuf.UninterpretedOption.double_value', index=4,
+      number=6, type=1, cpp_type=5, label=1,
+      has_default_value=False, default_value=float(0),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='string_value', full_name='google.protobuf.UninterpretedOption.string_value', index=5,
+      number=7, type=12, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b(""),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='aggregate_value', full_name='google.protobuf.UninterpretedOption.aggregate_value', index=6,
+      number=8, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+  ],
+  extensions=[
+  ],
+  nested_types=[_UNINTERPRETEDOPTION_NAMEPART, ],
+  enum_types=[
+  ],
+  options=None,
+  is_extendable=False,
+  syntax='proto2',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=4045,
+  serialized_end=4331,
+)
+
+
+_SOURCECODEINFO_LOCATION = _descriptor.Descriptor(
+  name='Location',
+  full_name='google.protobuf.SourceCodeInfo.Location',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='path', full_name='google.protobuf.SourceCodeInfo.Location.path', index=0,
+      number=1, type=5, cpp_type=1, label=3,
+      has_default_value=False, default_value=[],
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='span', full_name='google.protobuf.SourceCodeInfo.Location.span', index=1,
+      number=2, type=5, cpp_type=1, label=3,
+      has_default_value=False, default_value=[],
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='leading_comments', full_name='google.protobuf.SourceCodeInfo.Location.leading_comments', index=2,
+      number=3, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='trailing_comments', full_name='google.protobuf.SourceCodeInfo.Location.trailing_comments', index=3,
+      number=4, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  options=None,
+  is_extendable=False,
+  syntax='proto2',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=4412,
+  serialized_end=4511,
+)
+
+_SOURCECODEINFO = _descriptor.Descriptor(
+  name='SourceCodeInfo',
+  full_name='google.protobuf.SourceCodeInfo',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='location', full_name='google.protobuf.SourceCodeInfo.location', index=0,
+      number=1, type=11, cpp_type=10, label=3,
+      has_default_value=False, default_value=[],
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+  ],
+  extensions=[
+  ],
+  nested_types=[_SOURCECODEINFO_LOCATION, ],
+  enum_types=[
+  ],
+  options=None,
+  is_extendable=False,
+  syntax='proto2',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=4334,
+  serialized_end=4511,
+)
+
+_FILEDESCRIPTORSET.fields_by_name['file'].message_type = _FILEDESCRIPTORPROTO
+_FILEDESCRIPTORPROTO.fields_by_name['message_type'].message_type = _DESCRIPTORPROTO
+_FILEDESCRIPTORPROTO.fields_by_name['enum_type'].message_type = _ENUMDESCRIPTORPROTO
+_FILEDESCRIPTORPROTO.fields_by_name['service'].message_type = _SERVICEDESCRIPTORPROTO
+_FILEDESCRIPTORPROTO.fields_by_name['extension'].message_type = _FIELDDESCRIPTORPROTO
+_FILEDESCRIPTORPROTO.fields_by_name['options'].message_type = _FILEOPTIONS
+_FILEDESCRIPTORPROTO.fields_by_name['source_code_info'].message_type = _SOURCECODEINFO
+_DESCRIPTORPROTO_EXTENSIONRANGE.containing_type = _DESCRIPTORPROTO
+_DESCRIPTORPROTO.fields_by_name['field'].message_type = _FIELDDESCRIPTORPROTO
+_DESCRIPTORPROTO.fields_by_name['extension'].message_type = _FIELDDESCRIPTORPROTO
+_DESCRIPTORPROTO.fields_by_name['nested_type'].message_type = _DESCRIPTORPROTO
+_DESCRIPTORPROTO.fields_by_name['enum_type'].message_type = _ENUMDESCRIPTORPROTO
+_DESCRIPTORPROTO.fields_by_name['extension_range'].message_type = _DESCRIPTORPROTO_EXTENSIONRANGE
+_DESCRIPTORPROTO.fields_by_name['oneof_decl'].message_type = _ONEOFDESCRIPTORPROTO
+_DESCRIPTORPROTO.fields_by_name['options'].message_type = _MESSAGEOPTIONS
+_FIELDDESCRIPTORPROTO.fields_by_name['label'].enum_type = _FIELDDESCRIPTORPROTO_LABEL
+_FIELDDESCRIPTORPROTO.fields_by_name['type'].enum_type = _FIELDDESCRIPTORPROTO_TYPE
+_FIELDDESCRIPTORPROTO.fields_by_name['options'].message_type = _FIELDOPTIONS
+_FIELDDESCRIPTORPROTO_TYPE.containing_type = _FIELDDESCRIPTORPROTO
+_FIELDDESCRIPTORPROTO_LABEL.containing_type = _FIELDDESCRIPTORPROTO
+_ENUMDESCRIPTORPROTO.fields_by_name['value'].message_type = _ENUMVALUEDESCRIPTORPROTO
+_ENUMDESCRIPTORPROTO.fields_by_name['options'].message_type = _ENUMOPTIONS
+_ENUMVALUEDESCRIPTORPROTO.fields_by_name['options'].message_type = _ENUMVALUEOPTIONS
+_SERVICEDESCRIPTORPROTO.fields_by_name['method'].message_type = _METHODDESCRIPTORPROTO
+_SERVICEDESCRIPTORPROTO.fields_by_name['options'].message_type = _SERVICEOPTIONS
+_METHODDESCRIPTORPROTO.fields_by_name['options'].message_type = _METHODOPTIONS
+_FILEOPTIONS.fields_by_name['optimize_for'].enum_type = _FILEOPTIONS_OPTIMIZEMODE
+_FILEOPTIONS.fields_by_name['uninterpreted_option'].message_type = _UNINTERPRETEDOPTION
+_FILEOPTIONS_OPTIMIZEMODE.containing_type = _FILEOPTIONS
+_MESSAGEOPTIONS.fields_by_name['uninterpreted_option'].message_type = _UNINTERPRETEDOPTION
+_FIELDOPTIONS.fields_by_name['ctype'].enum_type = _FIELDOPTIONS_CTYPE
+_FIELDOPTIONS.fields_by_name['uninterpreted_option'].message_type = _UNINTERPRETEDOPTION
+_FIELDOPTIONS_CTYPE.containing_type = _FIELDOPTIONS
+_ENUMOPTIONS.fields_by_name['uninterpreted_option'].message_type = _UNINTERPRETEDOPTION
+_ENUMVALUEOPTIONS.fields_by_name['uninterpreted_option'].message_type = _UNINTERPRETEDOPTION
+_SERVICEOPTIONS.fields_by_name['uninterpreted_option'].message_type = _UNINTERPRETEDOPTION
+_METHODOPTIONS.fields_by_name['uninterpreted_option'].message_type = _UNINTERPRETEDOPTION
+_UNINTERPRETEDOPTION_NAMEPART.containing_type = _UNINTERPRETEDOPTION
+_UNINTERPRETEDOPTION.fields_by_name['name'].message_type = _UNINTERPRETEDOPTION_NAMEPART
+_SOURCECODEINFO_LOCATION.containing_type = _SOURCECODEINFO
+_SOURCECODEINFO.fields_by_name['location'].message_type = _SOURCECODEINFO_LOCATION
+DESCRIPTOR.message_types_by_name['FileDescriptorSet'] = _FILEDESCRIPTORSET
+DESCRIPTOR.message_types_by_name['FileDescriptorProto'] = _FILEDESCRIPTORPROTO
+DESCRIPTOR.message_types_by_name['DescriptorProto'] = _DESCRIPTORPROTO
+DESCRIPTOR.message_types_by_name['FieldDescriptorProto'] = _FIELDDESCRIPTORPROTO
+DESCRIPTOR.message_types_by_name['OneofDescriptorProto'] = _ONEOFDESCRIPTORPROTO
+DESCRIPTOR.message_types_by_name['EnumDescriptorProto'] = _ENUMDESCRIPTORPROTO
+DESCRIPTOR.message_types_by_name['EnumValueDescriptorProto'] = _ENUMVALUEDESCRIPTORPROTO
+DESCRIPTOR.message_types_by_name['ServiceDescriptorProto'] = _SERVICEDESCRIPTORPROTO
+DESCRIPTOR.message_types_by_name['MethodDescriptorProto'] = _METHODDESCRIPTORPROTO
+DESCRIPTOR.message_types_by_name['FileOptions'] = _FILEOPTIONS
+DESCRIPTOR.message_types_by_name['MessageOptions'] = _MESSAGEOPTIONS
+DESCRIPTOR.message_types_by_name['FieldOptions'] = _FIELDOPTIONS
+DESCRIPTOR.message_types_by_name['EnumOptions'] = _ENUMOPTIONS
+DESCRIPTOR.message_types_by_name['EnumValueOptions'] = _ENUMVALUEOPTIONS
+DESCRIPTOR.message_types_by_name['ServiceOptions'] = _SERVICEOPTIONS
+DESCRIPTOR.message_types_by_name['MethodOptions'] = _METHODOPTIONS
+DESCRIPTOR.message_types_by_name['UninterpretedOption'] = _UNINTERPRETEDOPTION
+DESCRIPTOR.message_types_by_name['SourceCodeInfo'] = _SOURCECODEINFO
+_sym_db.RegisterFileDescriptor(DESCRIPTOR)
+
+FileDescriptorSet = _reflection.GeneratedProtocolMessageType('FileDescriptorSet', (_message.Message,), dict(
+  DESCRIPTOR = _FILEDESCRIPTORSET,
+  __module__ = 'google.protobuf.descriptor_pb2'
+  # @@protoc_insertion_point(class_scope:google.protobuf.FileDescriptorSet)
+  ))
+_sym_db.RegisterMessage(FileDescriptorSet)
+
+FileDescriptorProto = _reflection.GeneratedProtocolMessageType('FileDescriptorProto', (_message.Message,), dict(
+  DESCRIPTOR = _FILEDESCRIPTORPROTO,
+  __module__ = 'google.protobuf.descriptor_pb2'
+  # @@protoc_insertion_point(class_scope:google.protobuf.FileDescriptorProto)
+  ))
+_sym_db.RegisterMessage(FileDescriptorProto)
+
+DescriptorProto = _reflection.GeneratedProtocolMessageType('DescriptorProto', (_message.Message,), dict(
+
+  ExtensionRange = _reflection.GeneratedProtocolMessageType('ExtensionRange', (_message.Message,), dict(
+    DESCRIPTOR = _DESCRIPTORPROTO_EXTENSIONRANGE,
+    __module__ = 'google.protobuf.descriptor_pb2'
+    # @@protoc_insertion_point(class_scope:google.protobuf.DescriptorProto.ExtensionRange)
+    ))
+  ,
+  DESCRIPTOR = _DESCRIPTORPROTO,
+  __module__ = 'google.protobuf.descriptor_pb2'
+  # @@protoc_insertion_point(class_scope:google.protobuf.DescriptorProto)
+  ))
+_sym_db.RegisterMessage(DescriptorProto)
+_sym_db.RegisterMessage(DescriptorProto.ExtensionRange)
+
+FieldDescriptorProto = _reflection.GeneratedProtocolMessageType('FieldDescriptorProto', (_message.Message,), dict(
+  DESCRIPTOR = _FIELDDESCRIPTORPROTO,
+  __module__ = 'google.protobuf.descriptor_pb2'
+  # @@protoc_insertion_point(class_scope:google.protobuf.FieldDescriptorProto)
+  ))
+_sym_db.RegisterMessage(FieldDescriptorProto)
+
+OneofDescriptorProto = _reflection.GeneratedProtocolMessageType('OneofDescriptorProto', (_message.Message,), dict(
+  DESCRIPTOR = _ONEOFDESCRIPTORPROTO,
+  __module__ = 'google.protobuf.descriptor_pb2'
+  # @@protoc_insertion_point(class_scope:google.protobuf.OneofDescriptorProto)
+  ))
+_sym_db.RegisterMessage(OneofDescriptorProto)
+
+EnumDescriptorProto = _reflection.GeneratedProtocolMessageType('EnumDescriptorProto', (_message.Message,), dict(
+  DESCRIPTOR = _ENUMDESCRIPTORPROTO,
+  __module__ = 'google.protobuf.descriptor_pb2'
+  # @@protoc_insertion_point(class_scope:google.protobuf.EnumDescriptorProto)
+  ))
+_sym_db.RegisterMessage(EnumDescriptorProto)
+
+EnumValueDescriptorProto = _reflection.GeneratedProtocolMessageType('EnumValueDescriptorProto', (_message.Message,), dict(
+  DESCRIPTOR = _ENUMVALUEDESCRIPTORPROTO,
+  __module__ = 'google.protobuf.descriptor_pb2'
+  # @@protoc_insertion_point(class_scope:google.protobuf.EnumValueDescriptorProto)
+  ))
+_sym_db.RegisterMessage(EnumValueDescriptorProto)
+
+ServiceDescriptorProto = _reflection.GeneratedProtocolMessageType('ServiceDescriptorProto', (_message.Message,), dict(
+  DESCRIPTOR = _SERVICEDESCRIPTORPROTO,
+  __module__ = 'google.protobuf.descriptor_pb2'
+  # @@protoc_insertion_point(class_scope:google.protobuf.ServiceDescriptorProto)
+  ))
+_sym_db.RegisterMessage(ServiceDescriptorProto)
+
+MethodDescriptorProto = _reflection.GeneratedProtocolMessageType('MethodDescriptorProto', (_message.Message,), dict(
+  DESCRIPTOR = _METHODDESCRIPTORPROTO,
+  __module__ = 'google.protobuf.descriptor_pb2'
+  # @@protoc_insertion_point(class_scope:google.protobuf.MethodDescriptorProto)
+  ))
+_sym_db.RegisterMessage(MethodDescriptorProto)
+
+FileOptions = _reflection.GeneratedProtocolMessageType('FileOptions', (_message.Message,), dict(
+  DESCRIPTOR = _FILEOPTIONS,
+  __module__ = 'google.protobuf.descriptor_pb2'
+  # @@protoc_insertion_point(class_scope:google.protobuf.FileOptions)
+  ))
+_sym_db.RegisterMessage(FileOptions)
+
+MessageOptions = _reflection.GeneratedProtocolMessageType('MessageOptions', (_message.Message,), dict(
+  DESCRIPTOR = _MESSAGEOPTIONS,
+  __module__ = 'google.protobuf.descriptor_pb2'
+  # @@protoc_insertion_point(class_scope:google.protobuf.MessageOptions)
+  ))
+_sym_db.RegisterMessage(MessageOptions)
+
+FieldOptions = _reflection.GeneratedProtocolMessageType('FieldOptions', (_message.Message,), dict(
+  DESCRIPTOR = _FIELDOPTIONS,
+  __module__ = 'google.protobuf.descriptor_pb2'
+  # @@protoc_insertion_point(class_scope:google.protobuf.FieldOptions)
+  ))
+_sym_db.RegisterMessage(FieldOptions)
+
+EnumOptions = _reflection.GeneratedProtocolMessageType('EnumOptions', (_message.Message,), dict(
+  DESCRIPTOR = _ENUMOPTIONS,
+  __module__ = 'google.protobuf.descriptor_pb2'
+  # @@protoc_insertion_point(class_scope:google.protobuf.EnumOptions)
+  ))
+_sym_db.RegisterMessage(EnumOptions)
+
+EnumValueOptions = _reflection.GeneratedProtocolMessageType('EnumValueOptions', (_message.Message,), dict(
+  DESCRIPTOR = _ENUMVALUEOPTIONS,
+  __module__ = 'google.protobuf.descriptor_pb2'
+  # @@protoc_insertion_point(class_scope:google.protobuf.EnumValueOptions)
+  ))
+_sym_db.RegisterMessage(EnumValueOptions)
+
+ServiceOptions = _reflection.GeneratedProtocolMessageType('ServiceOptions', (_message.Message,), dict(
+  DESCRIPTOR = _SERVICEOPTIONS,
+  __module__ = 'google.protobuf.descriptor_pb2'
+  # @@protoc_insertion_point(class_scope:google.protobuf.ServiceOptions)
+  ))
+_sym_db.RegisterMessage(ServiceOptions)
+
+MethodOptions = _reflection.GeneratedProtocolMessageType('MethodOptions', (_message.Message,), dict(
+  DESCRIPTOR = _METHODOPTIONS,
+  __module__ = 'google.protobuf.descriptor_pb2'
+  # @@protoc_insertion_point(class_scope:google.protobuf.MethodOptions)
+  ))
+_sym_db.RegisterMessage(MethodOptions)
+
+UninterpretedOption = _reflection.GeneratedProtocolMessageType('UninterpretedOption', (_message.Message,), dict(
+
+  NamePart = _reflection.GeneratedProtocolMessageType('NamePart', (_message.Message,), dict(
+    DESCRIPTOR = _UNINTERPRETEDOPTION_NAMEPART,
+    __module__ = 'google.protobuf.descriptor_pb2'
+    # @@protoc_insertion_point(class_scope:google.protobuf.UninterpretedOption.NamePart)
+    ))
+  ,
+  DESCRIPTOR = _UNINTERPRETEDOPTION,
+  __module__ = 'google.protobuf.descriptor_pb2'
+  # @@protoc_insertion_point(class_scope:google.protobuf.UninterpretedOption)
+  ))
+_sym_db.RegisterMessage(UninterpretedOption)
+_sym_db.RegisterMessage(UninterpretedOption.NamePart)
+
+SourceCodeInfo = _reflection.GeneratedProtocolMessageType('SourceCodeInfo', (_message.Message,), dict(
+
+  Location = _reflection.GeneratedProtocolMessageType('Location', (_message.Message,), dict(
+    DESCRIPTOR = _SOURCECODEINFO_LOCATION,
+    __module__ = 'google.protobuf.descriptor_pb2'
+    # @@protoc_insertion_point(class_scope:google.protobuf.SourceCodeInfo.Location)
+    ))
+  ,
+  DESCRIPTOR = _SOURCECODEINFO,
+  __module__ = 'google.protobuf.descriptor_pb2'
+  # @@protoc_insertion_point(class_scope:google.protobuf.SourceCodeInfo)
+  ))
+_sym_db.RegisterMessage(SourceCodeInfo)
+_sym_db.RegisterMessage(SourceCodeInfo.Location)
+
+
+# @@protoc_insertion_point(module_scope)
diff --git a/acts/framework/acts/controllers/buds_lib/dev_utils/proto/gen/nanopb_pb2.py b/acts/framework/acts/controllers/buds_lib/dev_utils/proto/gen/nanopb_pb2.py
new file mode 100644
index 0000000..ddf3569
--- /dev/null
+++ b/acts/framework/acts/controllers/buds_lib/dev_utils/proto/gen/nanopb_pb2.py
@@ -0,0 +1,258 @@
+# Generated by the protocol buffer compiler.  DO NOT EDIT!
+# source: nanopb.proto
+
+import sys
+_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
+from google.protobuf.internal import enum_type_wrapper
+from google.protobuf import descriptor as _descriptor
+from google.protobuf import message as _message
+from google.protobuf import reflection as _reflection
+from google.protobuf import symbol_database as _symbol_database
+from google.protobuf import descriptor_pb2
+# @@protoc_insertion_point(imports)
+
+_sym_db = _symbol_database.Default()
+
+
+from google.protobuf import descriptor_pb2 as google_dot_protobuf_dot_descriptor__pb2
+
+
+DESCRIPTOR = _descriptor.FileDescriptor(
+  name='nanopb.proto',
+  package='',
+  syntax='proto2',
+  serialized_pb=_b('\n\x0cnanopb.proto\x1a google/protobuf/descriptor.proto\"\x80\x02\n\rNanoPBOptions\x12\x10\n\x08max_size\x18\x01 \x01(\x05\x12\x11\n\tmax_count\x18\x02 \x01(\x05\x12&\n\x08int_size\x18\x07 \x01(\x0e\x32\x08.IntSize:\nIS_DEFAULT\x12$\n\x04type\x18\x03 \x01(\x0e\x32\n.FieldType:\nFT_DEFAULT\x12\x18\n\nlong_names\x18\x04 \x01(\x08:\x04true\x12\x1c\n\rpacked_struct\x18\x05 \x01(\x08:\x05\x66\x61lse\x12\x1b\n\x0cskip_message\x18\x06 \x01(\x08:\x05\x66\x61lse\x12\x18\n\tno_unions\x18\x08 \x01(\x08:\x05\x66\x61lse\x12\r\n\x05msgid\x18\t \x01(\r*Z\n\tFieldType\x12\x0e\n\nFT_DEFAULT\x10\x00\x12\x0f\n\x0b\x46T_CALLBACK\x10\x01\x12\x0e\n\nFT_POINTER\x10\x04\x12\r\n\tFT_STATIC\x10\x02\x12\r\n\tFT_IGNORE\x10\x03*D\n\x07IntSize\x12\x0e\n\nIS_DEFAULT\x10\x00\x12\x08\n\x04IS_8\x10\x08\x12\t\n\x05IS_16\x10\x10\x12\t\n\x05IS_32\x10 \x12\t\n\x05IS_64\x10@:E\n\x0enanopb_fileopt\x12\x1c.google.protobuf.FileOptions\x18\xf2\x07 \x01(\x0b\x32\x0e.NanoPBOptions:G\n\rnanopb_msgopt\x12\x1f.google.protobuf.MessageOptions\x18\xf2\x07 \x01(\x0b\x32\x0e.NanoPBOptions:E\n\x0enanopb_enumopt\x12\x1c.google.protobuf.EnumOptions\x18\xf2\x07 \x01(\x0b\x32\x0e.NanoPBOptions:>\n\x06nanopb\x12\x1d.google.protobuf.FieldOptions\x18\xf2\x07 \x01(\x0b\x32\x0e.NanoPBOptionsB\x1a\n\x18\x66i.kapsi.koti.jpa.nanopb')
+  ,
+  dependencies=[google_dot_protobuf_dot_descriptor__pb2.DESCRIPTOR,])
+
+_FIELDTYPE = _descriptor.EnumDescriptor(
+  name='FieldType',
+  full_name='FieldType',
+  filename=None,
+  file=DESCRIPTOR,
+  values=[
+    _descriptor.EnumValueDescriptor(
+      name='FT_DEFAULT', index=0, number=0,
+      options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='FT_CALLBACK', index=1, number=1,
+      options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='FT_POINTER', index=2, number=4,
+      options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='FT_STATIC', index=3, number=2,
+      options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='FT_IGNORE', index=4, number=3,
+      options=None,
+      type=None),
+  ],
+  containing_type=None,
+  options=None,
+  serialized_start=309,
+  serialized_end=399,
+)
+_sym_db.RegisterEnumDescriptor(_FIELDTYPE)
+
+FieldType = enum_type_wrapper.EnumTypeWrapper(_FIELDTYPE)
+_INTSIZE = _descriptor.EnumDescriptor(
+  name='IntSize',
+  full_name='IntSize',
+  filename=None,
+  file=DESCRIPTOR,
+  values=[
+    _descriptor.EnumValueDescriptor(
+      name='IS_DEFAULT', index=0, number=0,
+      options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='IS_8', index=1, number=8,
+      options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='IS_16', index=2, number=16,
+      options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='IS_32', index=3, number=32,
+      options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='IS_64', index=4, number=64,
+      options=None,
+      type=None),
+  ],
+  containing_type=None,
+  options=None,
+  serialized_start=401,
+  serialized_end=469,
+)
+_sym_db.RegisterEnumDescriptor(_INTSIZE)
+
+IntSize = enum_type_wrapper.EnumTypeWrapper(_INTSIZE)
+FT_DEFAULT = 0
+FT_CALLBACK = 1
+FT_POINTER = 4
+FT_STATIC = 2
+FT_IGNORE = 3
+IS_DEFAULT = 0
+IS_8 = 8
+IS_16 = 16
+IS_32 = 32
+IS_64 = 64
+
+NANOPB_FILEOPT_FIELD_NUMBER = 1010
+nanopb_fileopt = _descriptor.FieldDescriptor(
+  name='nanopb_fileopt', full_name='nanopb_fileopt', index=0,
+  number=1010, type=11, cpp_type=10, label=1,
+  has_default_value=False, default_value=None,
+  message_type=None, enum_type=None, containing_type=None,
+  is_extension=True, extension_scope=None,
+  options=None)
+NANOPB_MSGOPT_FIELD_NUMBER = 1010
+nanopb_msgopt = _descriptor.FieldDescriptor(
+  name='nanopb_msgopt', full_name='nanopb_msgopt', index=1,
+  number=1010, type=11, cpp_type=10, label=1,
+  has_default_value=False, default_value=None,
+  message_type=None, enum_type=None, containing_type=None,
+  is_extension=True, extension_scope=None,
+  options=None)
+NANOPB_ENUMOPT_FIELD_NUMBER = 1010
+nanopb_enumopt = _descriptor.FieldDescriptor(
+  name='nanopb_enumopt', full_name='nanopb_enumopt', index=2,
+  number=1010, type=11, cpp_type=10, label=1,
+  has_default_value=False, default_value=None,
+  message_type=None, enum_type=None, containing_type=None,
+  is_extension=True, extension_scope=None,
+  options=None)
+NANOPB_FIELD_NUMBER = 1010
+nanopb = _descriptor.FieldDescriptor(
+  name='nanopb', full_name='nanopb', index=3,
+  number=1010, type=11, cpp_type=10, label=1,
+  has_default_value=False, default_value=None,
+  message_type=None, enum_type=None, containing_type=None,
+  is_extension=True, extension_scope=None,
+  options=None)
+
+
+_NANOPBOPTIONS = _descriptor.Descriptor(
+  name='NanoPBOptions',
+  full_name='NanoPBOptions',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='max_size', full_name='NanoPBOptions.max_size', index=0,
+      number=1, type=5, cpp_type=1, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='max_count', full_name='NanoPBOptions.max_count', index=1,
+      number=2, type=5, cpp_type=1, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='int_size', full_name='NanoPBOptions.int_size', index=2,
+      number=7, type=14, cpp_type=8, label=1,
+      has_default_value=True, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='type', full_name='NanoPBOptions.type', index=3,
+      number=3, type=14, cpp_type=8, label=1,
+      has_default_value=True, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='long_names', full_name='NanoPBOptions.long_names', index=4,
+      number=4, type=8, cpp_type=7, label=1,
+      has_default_value=True, default_value=True,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='packed_struct', full_name='NanoPBOptions.packed_struct', index=5,
+      number=5, type=8, cpp_type=7, label=1,
+      has_default_value=True, default_value=False,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='skip_message', full_name='NanoPBOptions.skip_message', index=6,
+      number=6, type=8, cpp_type=7, label=1,
+      has_default_value=True, default_value=False,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='no_unions', full_name='NanoPBOptions.no_unions', index=7,
+      number=8, type=8, cpp_type=7, label=1,
+      has_default_value=True, default_value=False,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='msgid', full_name='NanoPBOptions.msgid', index=8,
+      number=9, type=13, cpp_type=3, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  options=None,
+  is_extendable=False,
+  syntax='proto2',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=51,
+  serialized_end=307,
+)
+
+_NANOPBOPTIONS.fields_by_name['int_size'].enum_type = _INTSIZE
+_NANOPBOPTIONS.fields_by_name['type'].enum_type = _FIELDTYPE
+DESCRIPTOR.message_types_by_name['NanoPBOptions'] = _NANOPBOPTIONS
+DESCRIPTOR.enum_types_by_name['FieldType'] = _FIELDTYPE
+DESCRIPTOR.enum_types_by_name['IntSize'] = _INTSIZE
+DESCRIPTOR.extensions_by_name['nanopb_fileopt'] = nanopb_fileopt
+DESCRIPTOR.extensions_by_name['nanopb_msgopt'] = nanopb_msgopt
+DESCRIPTOR.extensions_by_name['nanopb_enumopt'] = nanopb_enumopt
+DESCRIPTOR.extensions_by_name['nanopb'] = nanopb
+_sym_db.RegisterFileDescriptor(DESCRIPTOR)
+
+NanoPBOptions = _reflection.GeneratedProtocolMessageType('NanoPBOptions', (_message.Message,), dict(
+  DESCRIPTOR = _NANOPBOPTIONS,
+  __module__ = 'nanopb_pb2'
+  # @@protoc_insertion_point(class_scope:NanoPBOptions)
+  ))
+_sym_db.RegisterMessage(NanoPBOptions)
+
+nanopb_fileopt.message_type = _NANOPBOPTIONS
+google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension(nanopb_fileopt)
+nanopb_msgopt.message_type = _NANOPBOPTIONS
+google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension(nanopb_msgopt)
+nanopb_enumopt.message_type = _NANOPBOPTIONS
+google_dot_protobuf_dot_descriptor__pb2.EnumOptions.RegisterExtension(nanopb_enumopt)
+nanopb.message_type = _NANOPBOPTIONS
+google_dot_protobuf_dot_descriptor__pb2.FieldOptions.RegisterExtension(nanopb)
+
+DESCRIPTOR.has_options = True
+DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\n\030fi.kapsi.koti.jpa.nanopb'))
+# @@protoc_insertion_point(module_scope)
diff --git a/acts/framework/acts/controllers/buds_lib/dev_utils/proto/gen/plugin_pb2.py b/acts/framework/acts/controllers/buds_lib/dev_utils/proto/gen/plugin_pb2.py
new file mode 100644
index 0000000..d693863
--- /dev/null
+++ b/acts/framework/acts/controllers/buds_lib/dev_utils/proto/gen/plugin_pb2.py
@@ -0,0 +1,188 @@
+# Generated by the protocol buffer compiler.  DO NOT EDIT!
+# source: plugin.proto
+
+import sys
+_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
+from google.protobuf import descriptor as _descriptor
+from google.protobuf import message as _message
+from google.protobuf import reflection as _reflection
+from google.protobuf import symbol_database as _symbol_database
+from google.protobuf import descriptor_pb2
+# @@protoc_insertion_point(imports)
+
+_sym_db = _symbol_database.Default()
+
+
+from google.protobuf import descriptor_pb2 as google_dot_protobuf_dot_descriptor__pb2
+
+
+DESCRIPTOR = _descriptor.FileDescriptor(
+  name='plugin.proto',
+  package='google.protobuf.compiler',
+  syntax='proto2',
+  serialized_pb=_b('\n\x0cplugin.proto\x12\x18google.protobuf.compiler\x1a google/protobuf/descriptor.proto\"}\n\x14\x43odeGeneratorRequest\x12\x18\n\x10\x66ile_to_generate\x18\x01 \x03(\t\x12\x11\n\tparameter\x18\x02 \x01(\t\x12\x38\n\nproto_file\x18\x0f \x03(\x0b\x32$.google.protobuf.FileDescriptorProto\"\xaa\x01\n\x15\x43odeGeneratorResponse\x12\r\n\x05\x65rror\x18\x01 \x01(\t\x12\x42\n\x04\x66ile\x18\x0f \x03(\x0b\x32\x34.google.protobuf.compiler.CodeGeneratorResponse.File\x1a>\n\x04\x46ile\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x17\n\x0finsertion_point\x18\x02 \x01(\t\x12\x0f\n\x07\x63ontent\x18\x0f \x01(\tB,\n\x1c\x63om.google.protobuf.compilerB\x0cPluginProtos')
+  ,
+  dependencies=[google_dot_protobuf_dot_descriptor__pb2.DESCRIPTOR,])
+
+
+
+
+_CODEGENERATORREQUEST = _descriptor.Descriptor(
+  name='CodeGeneratorRequest',
+  full_name='google.protobuf.compiler.CodeGeneratorRequest',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='file_to_generate', full_name='google.protobuf.compiler.CodeGeneratorRequest.file_to_generate', index=0,
+      number=1, type=9, cpp_type=9, label=3,
+      has_default_value=False, default_value=[],
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='parameter', full_name='google.protobuf.compiler.CodeGeneratorRequest.parameter', index=1,
+      number=2, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='proto_file', full_name='google.protobuf.compiler.CodeGeneratorRequest.proto_file', index=2,
+      number=15, type=11, cpp_type=10, label=3,
+      has_default_value=False, default_value=[],
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  options=None,
+  is_extendable=False,
+  syntax='proto2',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=76,
+  serialized_end=201,
+)
+
+
+_CODEGENERATORRESPONSE_FILE = _descriptor.Descriptor(
+  name='File',
+  full_name='google.protobuf.compiler.CodeGeneratorResponse.File',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='name', full_name='google.protobuf.compiler.CodeGeneratorResponse.File.name', index=0,
+      number=1, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='insertion_point', full_name='google.protobuf.compiler.CodeGeneratorResponse.File.insertion_point', index=1,
+      number=2, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='content', full_name='google.protobuf.compiler.CodeGeneratorResponse.File.content', index=2,
+      number=15, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  options=None,
+  is_extendable=False,
+  syntax='proto2',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=312,
+  serialized_end=374,
+)
+
+_CODEGENERATORRESPONSE = _descriptor.Descriptor(
+  name='CodeGeneratorResponse',
+  full_name='google.protobuf.compiler.CodeGeneratorResponse',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='error', full_name='google.protobuf.compiler.CodeGeneratorResponse.error', index=0,
+      number=1, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='file', full_name='google.protobuf.compiler.CodeGeneratorResponse.file', index=1,
+      number=15, type=11, cpp_type=10, label=3,
+      has_default_value=False, default_value=[],
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+  ],
+  extensions=[
+  ],
+  nested_types=[_CODEGENERATORRESPONSE_FILE, ],
+  enum_types=[
+  ],
+  options=None,
+  is_extendable=False,
+  syntax='proto2',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=204,
+  serialized_end=374,
+)
+
+_CODEGENERATORREQUEST.fields_by_name['proto_file'].message_type = google_dot_protobuf_dot_descriptor__pb2._FILEDESCRIPTORPROTO
+_CODEGENERATORRESPONSE_FILE.containing_type = _CODEGENERATORRESPONSE
+_CODEGENERATORRESPONSE.fields_by_name['file'].message_type = _CODEGENERATORRESPONSE_FILE
+DESCRIPTOR.message_types_by_name['CodeGeneratorRequest'] = _CODEGENERATORREQUEST
+DESCRIPTOR.message_types_by_name['CodeGeneratorResponse'] = _CODEGENERATORRESPONSE
+_sym_db.RegisterFileDescriptor(DESCRIPTOR)
+
+CodeGeneratorRequest = _reflection.GeneratedProtocolMessageType('CodeGeneratorRequest', (_message.Message,), dict(
+  DESCRIPTOR = _CODEGENERATORREQUEST,
+  __module__ = 'plugin_pb2'
+  # @@protoc_insertion_point(class_scope:google.protobuf.compiler.CodeGeneratorRequest)
+  ))
+_sym_db.RegisterMessage(CodeGeneratorRequest)
+
+CodeGeneratorResponse = _reflection.GeneratedProtocolMessageType('CodeGeneratorResponse', (_message.Message,), dict(
+
+  File = _reflection.GeneratedProtocolMessageType('File', (_message.Message,), dict(
+    DESCRIPTOR = _CODEGENERATORRESPONSE_FILE,
+    __module__ = 'plugin_pb2'
+    # @@protoc_insertion_point(class_scope:google.protobuf.compiler.CodeGeneratorResponse.File)
+    ))
+  ,
+  DESCRIPTOR = _CODEGENERATORRESPONSE,
+  __module__ = 'plugin_pb2'
+  # @@protoc_insertion_point(class_scope:google.protobuf.compiler.CodeGeneratorResponse)
+  ))
+_sym_db.RegisterMessage(CodeGeneratorResponse)
+_sym_db.RegisterMessage(CodeGeneratorResponse.File)
+
+
+DESCRIPTOR.has_options = True
+DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\n\034com.google.protobuf.compilerB\014PluginProtos'))
+# @@protoc_insertion_point(module_scope)
diff --git a/acts/framework/acts/controllers/buds_lib/dev_utils/proto/google/protobuf/descriptor.proto b/acts/framework/acts/controllers/buds_lib/dev_utils/proto/google/protobuf/descriptor.proto
new file mode 100644
index 0000000..e17c0cc
--- /dev/null
+++ b/acts/framework/acts/controllers/buds_lib/dev_utils/proto/google/protobuf/descriptor.proto
@@ -0,0 +1,714 @@
+// Protocol Buffers - Google's data interchange format
+// Copyright 2008 Google Inc.  All rights reserved.
+// https://developers.google.com/protocol-buffers/
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+//     * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+//     * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following disclaimer
+// in the documentation and/or other materials provided with the
+// distribution.
+//     * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived from
+// this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Author: kenton@google.com (Kenton Varda)
+//  Based on original Protocol Buffers design by
+//  Sanjay Ghemawat, Jeff Dean, and others.
+//
+// The messages in this file describe the definitions found in .proto files.
+// A valid .proto file can be translated directly to a FileDescriptorProto
+// without any other information (e.g. without reading its imports).
+
+
+syntax = "proto2";
+
+package google.protobuf;
+option java_package = "com.google.protobuf";
+option java_outer_classname = "DescriptorProtos";
+
+// descriptor.proto must be optimized for speed because reflection-based
+// algorithms don't work during bootstrapping.
+option optimize_for = SPEED;
+
+// The protocol compiler can output a FileDescriptorSet containing the .proto
+// files it parses.
+message FileDescriptorSet {
+  repeated FileDescriptorProto file = 1;
+}
+
+// Describes a complete .proto file.
+message FileDescriptorProto {
+  optional string name = 1;       // file name, relative to root of source tree
+  optional string package = 2;    // e.g. "foo", "foo.bar", etc.
+
+  // Names of files imported by this file.
+  repeated string dependency = 3;
+  // Indexes of the public imported files in the dependency list above.
+  repeated int32 public_dependency = 10;
+  // Indexes of the weak imported files in the dependency list.
+  // For Google-internal migration only. Do not use.
+  repeated int32 weak_dependency = 11;
+
+  // All top-level definitions in this file.
+  repeated DescriptorProto message_type = 4;
+  repeated EnumDescriptorProto enum_type = 5;
+  repeated ServiceDescriptorProto service = 6;
+  repeated FieldDescriptorProto extension = 7;
+
+  optional FileOptions options = 8;
+
+  // This field contains optional information about the original source code.
+  // You may safely remove this entire field without harming runtime
+  // functionality of the descriptors -- the information is needed only by
+  // development tools.
+  optional SourceCodeInfo source_code_info = 9;
+
+  // The syntax of the proto file.
+  // The supported values are "proto2" and "proto3".
+  optional string syntax = 12;
+}
+
+// Describes a message type.
+message DescriptorProto {
+  optional string name = 1;
+
+  repeated FieldDescriptorProto field = 2;
+  repeated FieldDescriptorProto extension = 6;
+
+  repeated DescriptorProto nested_type = 3;
+  repeated EnumDescriptorProto enum_type = 4;
+
+  message ExtensionRange {
+    optional int32 start = 1;
+    optional int32 end = 2;
+  }
+  repeated ExtensionRange extension_range = 5;
+
+  repeated OneofDescriptorProto oneof_decl = 8;
+
+  optional MessageOptions options = 7;
+}
+
+// Describes a field within a message.
+message FieldDescriptorProto {
+  enum Type {
+    // 0 is reserved for errors.
+    // Order is weird for historical reasons.
+    TYPE_DOUBLE         = 1;
+    TYPE_FLOAT          = 2;
+    // Not ZigZag encoded.  Negative numbers take 10 bytes.  Use TYPE_SINT64 if
+    // negative values are likely.
+    TYPE_INT64          = 3;
+    TYPE_UINT64         = 4;
+    // Not ZigZag encoded.  Negative numbers take 10 bytes.  Use TYPE_SINT32 if
+    // negative values are likely.
+    TYPE_INT32          = 5;
+    TYPE_FIXED64        = 6;
+    TYPE_FIXED32        = 7;
+    TYPE_BOOL           = 8;
+    TYPE_STRING         = 9;
+    TYPE_GROUP          = 10;  // Tag-delimited aggregate.
+    TYPE_MESSAGE        = 11;  // Length-delimited aggregate.
+
+    // New in version 2.
+    TYPE_BYTES          = 12;
+    TYPE_UINT32         = 13;
+    TYPE_ENUM           = 14;
+    TYPE_SFIXED32       = 15;
+    TYPE_SFIXED64       = 16;
+    TYPE_SINT32         = 17;  // Uses ZigZag encoding.
+    TYPE_SINT64         = 18;  // Uses ZigZag encoding.
+  };
+
+  enum Label {
+    // 0 is reserved for errors
+    LABEL_OPTIONAL      = 1;
+    LABEL_REQUIRED      = 2;
+    LABEL_REPEATED      = 3;
+    // TODO(sanjay): Should we add LABEL_MAP?
+  };
+
+  optional string name = 1;
+  optional int32 number = 3;
+  optional Label label = 4;
+
+  // If type_name is set, this need not be set.  If both this and type_name
+  // are set, this must be one of TYPE_ENUM, TYPE_MESSAGE or TYPE_GROUP.
+  optional Type type = 5;
+
+  // For message and enum types, this is the name of the type.  If the name
+  // starts with a '.', it is fully-qualified.  Otherwise, C++-like scoping
+  // rules are used to find the type (i.e. first the nested types within this
+  // message are searched, then within the parent, on up to the root
+  // namespace).
+  optional string type_name = 6;
+
+  // For extensions, this is the name of the type being extended.  It is
+  // resolved in the same manner as type_name.
+  optional string extendee = 2;
+
+  // For numeric types, contains the original text representation of the value.
+  // For booleans, "true" or "false".
+  // For strings, contains the default text contents (not escaped in any way).
+  // For bytes, contains the C escaped value.  All bytes >= 128 are escaped.
+  // TODO(kenton):  Base-64 encode?
+  optional string default_value = 7;
+
+  // If set, gives the index of a oneof in the containing type's oneof_decl
+  // list.  This field is a member of that oneof.  Extensions of a oneof should
+  // not set this since the oneof to which they belong will be inferred based
+  // on the extension range containing the extension's field number.
+  optional int32 oneof_index = 9;
+
+  optional FieldOptions options = 8;
+}
+
+// Describes a oneof.
+message OneofDescriptorProto {
+  optional string name = 1;
+}
+
+// Describes an enum type.
+message EnumDescriptorProto {
+  optional string name = 1;
+
+  repeated EnumValueDescriptorProto value = 2;
+
+  optional EnumOptions options = 3;
+}
+
+// Describes a value within an enum.
+message EnumValueDescriptorProto {
+  optional string name = 1;
+  optional int32 number = 2;
+
+  optional EnumValueOptions options = 3;
+}
+
+// Describes a service.
+message ServiceDescriptorProto {
+  optional string name = 1;
+  repeated MethodDescriptorProto method = 2;
+
+  optional ServiceOptions options = 3;
+}
+
+// Describes a method of a service.
+message MethodDescriptorProto {
+  optional string name = 1;
+
+  // Input and output type names.  These are resolved in the same way as
+  // FieldDescriptorProto.type_name, but must refer to a message type.
+  optional string input_type = 2;
+  optional string output_type = 3;
+
+  optional MethodOptions options = 4;
+
+  // Identifies if client streams multiple client messages
+  optional bool client_streaming = 5 [default=false];
+  // Identifies if server streams multiple server messages
+  optional bool server_streaming = 6 [default=false];
+}
+
+
+// ===================================================================
+// Options
+
+// Each of the definitions above may have "options" attached.  These are
+// just annotations which may cause code to be generated slightly differently
+// or may contain hints for code that manipulates protocol messages.
+//
+// Clients may define custom options as extensions of the *Options messages.
+// These extensions may not yet be known at parsing time, so the parser cannot
+// store the values in them.  Instead it stores them in a field in the *Options
+// message called uninterpreted_option. This field must have the same name
+// across all *Options messages. We then use this field to populate the
+// extensions when we build a descriptor, at which point all protos have been
+// parsed and so all extensions are known.
+//
+// Extension numbers for custom options may be chosen as follows:
+// * For options which will only be used within a single application or
+//   organization, or for experimental options, use field numbers 50000
+//   through 99999.  It is up to you to ensure that you do not use the
+//   same number for multiple options.
+// * For options which will be published and used publicly by multiple
+//   independent entities, e-mail protobuf-global-extension-registry@google.com
+//   to reserve extension numbers. Simply provide your project name (e.g.
+//   Object-C plugin) and your porject website (if available) -- there's no need
+//   to explain how you intend to use them. Usually you only need one extension
+//   number. You can declare multiple options with only one extension number by
+//   putting them in a sub-message. See the Custom Options section of the docs
+//   for examples:
+//   https://developers.google.com/protocol-buffers/docs/proto#options
+//   If this turns out to be popular, a web service will be set up
+//   to automatically assign option numbers.
+
+
+message FileOptions {
+
+  // Sets the Java package where classes generated from this .proto will be
+  // placed.  By default, the proto package is used, but this is often
+  // inappropriate because proto packages do not normally start with backwards
+  // domain names.
+  optional string java_package = 1;
+
+
+  // If set, all the classes from the .proto file are wrapped in a single
+  // outer class with the given name.  This applies to both Proto1
+  // (equivalent to the old "--one_java_file" option) and Proto2 (where
+  // a .proto always translates to a single class, but you may want to
+  // explicitly choose the class name).
+  optional string java_outer_classname = 8;
+
+  // If set true, then the Java code generator will generate a separate .java
+  // file for each top-level message, enum, and service defined in the .proto
+  // file.  Thus, these types will *not* be nested inside the outer class
+  // named by java_outer_classname.  However, the outer class will still be
+  // generated to contain the file's getDescriptor() method as well as any
+  // top-level extensions defined in the file.
+  optional bool java_multiple_files = 10 [default=false];
+
+  // If set true, then the Java code generator will generate equals() and
+  // hashCode() methods for all messages defined in the .proto file.
+  // - In the full runtime, this is purely a speed optimization, as the
+  // AbstractMessage base class includes reflection-based implementations of
+  // these methods.
+  //- In the lite runtime, setting this option changes the semantics of
+  // equals() and hashCode() to more closely match those of the full runtime;
+  // the generated methods compute their results based on field values rather
+  // than object identity. (Implementations should not assume that hashcodes
+  // will be consistent across runtimes or versions of the protocol compiler.)
+  optional bool java_generate_equals_and_hash = 20 [default=false];
+
+  // If set true, then the Java2 code generator will generate code that
+  // throws an exception whenever an attempt is made to assign a non-UTF-8
+  // byte sequence to a string field.
+  // Message reflection will do the same.
+  // However, an extension field still accepts non-UTF-8 byte sequences.
+  // This option has no effect on when used with the lite runtime.
+  optional bool java_string_check_utf8 = 27 [default=false];
+
+
+  // Generated classes can be optimized for speed or code size.
+  enum OptimizeMode {
+    SPEED = 1;        // Generate complete code for parsing, serialization,
+                      // etc.
+    CODE_SIZE = 2;    // Use ReflectionOps to implement these methods.
+    LITE_RUNTIME = 3; // Generate code using MessageLite and the lite runtime.
+  }
+  optional OptimizeMode optimize_for = 9 [default=SPEED];
+
+  // Sets the Go package where structs generated from this .proto will be
+  // placed. If omitted, the Go package will be derived from the following:
+  //   - The basename of the package import path, if provided.
+  //   - Otherwise, the package statement in the .proto file, if present.
+  //   - Otherwise, the basename of the .proto file, without extension.
+  optional string go_package = 11;
+
+
+
+  // Should generic services be generated in each language?  "Generic" services
+  // are not specific to any particular RPC system.  They are generated by the
+  // main code generators in each language (without additional plugins).
+  // Generic services were the only kind of service generation supported by
+  // early versions of google.protobuf.
+  //
+  // Generic services are now considered deprecated in favor of using plugins
+  // that generate code specific to your particular RPC system.  Therefore,
+  // these default to false.  Old code which depends on generic services should
+  // explicitly set them to true.
+  optional bool cc_generic_services = 16 [default=false];
+  optional bool java_generic_services = 17 [default=false];
+  optional bool py_generic_services = 18 [default=false];
+
+  // Is this file deprecated?
+  // Depending on the target platform, this can emit Deprecated annotations
+  // for everything in the file, or it will be completely ignored; in the very
+  // least, this is a formalization for deprecating files.
+  optional bool deprecated = 23 [default=false];
+
+
+  // Enables the use of arenas for the proto messages in this file. This applies
+  // only to generated classes for C++.
+  optional bool cc_enable_arenas = 31 [default=false];
+
+
+  // The parser stores options it doesn't recognize here. See above.
+  repeated UninterpretedOption uninterpreted_option = 999;
+
+  // Clients can define custom options in extensions of this message. See above.
+  extensions 1000 to max;
+}
+
+message MessageOptions {
+  // Set true to use the old proto1 MessageSet wire format for extensions.
+  // This is provided for backwards-compatibility with the MessageSet wire
+  // format.  You should not use this for any other reason:  It's less
+  // efficient, has fewer features, and is more complicated.
+  //
+  // The message must be defined exactly as follows:
+  //   message Foo {
+  //     option message_set_wire_format = true;
+  //     extensions 4 to max;
+  //   }
+  // Note that the message cannot have any defined fields; MessageSets only
+  // have extensions.
+  //
+  // All extensions of your type must be singular messages; e.g. they cannot
+  // be int32s, enums, or repeated messages.
+  //
+  // Because this is an option, the above two restrictions are not enforced by
+  // the protocol compiler.
+  optional bool message_set_wire_format = 1 [default=false];
+
+  // Disables the generation of the standard "descriptor()" accessor, which can
+  // conflict with a field of the same name.  This is meant to make migration
+  // from proto1 easier; new code should avoid fields named "descriptor".
+  optional bool no_standard_descriptor_accessor = 2 [default=false];
+
+  // Is this message deprecated?
+  // Depending on the target platform, this can emit Deprecated annotations
+  // for the message, or it will be completely ignored; in the very least,
+  // this is a formalization for deprecating messages.
+  optional bool deprecated = 3 [default=false];
+
+  // Whether the message is an automatically generated map entry type for the
+  // maps field.
+  //
+  // For maps fields:
+  //     map<KeyType, ValueType> map_field = 1;
+  // The parsed descriptor looks like:
+  //     message MapFieldEntry {
+  //         option map_entry = true;
+  //         optional KeyType key = 1;
+  //         optional ValueType value = 2;
+  //     }
+  //     repeated MapFieldEntry map_field = 1;
+  //
+  // Implementations may choose not to generate the map_entry=true message, but
+  // use a native map in the target language to hold the keys and values.
+  // The reflection APIs in such implementions still need to work as
+  // if the field is a repeated message field.
+  //
+  // NOTE: Do not set the option in .proto files. Always use the maps syntax
+  // instead. The option should only be implicitly set by the proto compiler
+  // parser.
+  optional bool map_entry = 7;
+
+  // The parser stores options it doesn't recognize here. See above.
+  repeated UninterpretedOption uninterpreted_option = 999;
+
+  // Clients can define custom options in extensions of this message. See above.
+  extensions 1000 to max;
+}
+
+message FieldOptions {
+  // The ctype option instructs the C++ code generator to use a different
+  // representation of the field than it normally would.  See the specific
+  // options below.  This option is not yet implemented in the open source
+  // release -- sorry, we'll try to include it in a future version!
+  optional CType ctype = 1 [default = STRING];
+  enum CType {
+    // Default mode.
+    STRING = 0;
+
+    CORD = 1;
+
+    STRING_PIECE = 2;
+  }
+  // The packed option can be enabled for repeated primitive fields to enable
+  // a more efficient representation on the wire. Rather than repeatedly
+  // writing the tag and type for each element, the entire array is encoded as
+  // a single length-delimited blob.
+  optional bool packed = 2;
+
+
+
+  // Should this field be parsed lazily?  Lazy applies only to message-type
+  // fields.  It means that when the outer message is initially parsed, the
+  // inner message's contents will not be parsed but instead stored in encoded
+  // form.  The inner message will actually be parsed when it is first accessed.
+  //
+  // This is only a hint.  Implementations are free to choose whether to use
+  // eager or lazy parsing regardless of the value of this option.  However,
+  // setting this option true suggests that the protocol author believes that
+  // using lazy parsing on this field is worth the additional bookkeeping
+  // overhead typically needed to implement it.
+  //
+  // This option does not affect the public interface of any generated code;
+  // all method signatures remain the same.  Furthermore, thread-safety of the
+  // interface is not affected by this option; const methods remain safe to
+  // call from multiple threads concurrently, while non-const methods continue
+  // to require exclusive access.
+  //
+  //
+  // Note that implementations may choose not to check required fields within
+  // a lazy sub-message.  That is, calling IsInitialized() on the outher message
+  // may return true even if the inner message has missing required fields.
+  // This is necessary because otherwise the inner message would have to be
+  // parsed in order to perform the check, defeating the purpose of lazy
+  // parsing.  An implementation which chooses not to check required fields
+  // must be consistent about it.  That is, for any particular sub-message, the
+  // implementation must either *always* check its required fields, or *never*
+  // check its required fields, regardless of whether or not the message has
+  // been parsed.
+  optional bool lazy = 5 [default=false];
+
+  // Is this field deprecated?
+  // Depending on the target platform, this can emit Deprecated annotations
+  // for accessors, or it will be completely ignored; in the very least, this
+  // is a formalization for deprecating fields.
+  optional bool deprecated = 3 [default=false];
+
+  // For Google-internal migration only. Do not use.
+  optional bool weak = 10 [default=false];
+
+
+
+  // The parser stores options it doesn't recognize here. See above.
+  repeated UninterpretedOption uninterpreted_option = 999;
+
+  // Clients can define custom options in extensions of this message. See above.
+  extensions 1000 to max;
+}
+
+message EnumOptions {
+
+  // Set this option to true to allow mapping different tag names to the same
+  // value.
+  optional bool allow_alias = 2;
+
+  // Is this enum deprecated?
+  // Depending on the target platform, this can emit Deprecated annotations
+  // for the enum, or it will be completely ignored; in the very least, this
+  // is a formalization for deprecating enums.
+  optional bool deprecated = 3 [default=false];
+
+  // The parser stores options it doesn't recognize here. See above.
+  repeated UninterpretedOption uninterpreted_option = 999;
+
+  // Clients can define custom options in extensions of this message. See above.
+  extensions 1000 to max;
+}
+
+message EnumValueOptions {
+  // Is this enum value deprecated?
+  // Depending on the target platform, this can emit Deprecated annotations
+  // for the enum value, or it will be completely ignored; in the very least,
+  // this is a formalization for deprecating enum values.
+  optional bool deprecated = 1 [default=false];
+
+  // The parser stores options it doesn't recognize here. See above.
+  repeated UninterpretedOption uninterpreted_option = 999;
+
+  // Clients can define custom options in extensions of this message. See above.
+  extensions 1000 to max;
+}
+
+message ServiceOptions {
+
+  // Note:  Field numbers 1 through 32 are reserved for Google's internal RPC
+  //   framework.  We apologize for hoarding these numbers to ourselves, but
+  //   we were already using them long before we decided to release Protocol
+  //   Buffers.
+
+  // Is this service deprecated?
+  // Depending on the target platform, this can emit Deprecated annotations
+  // for the service, or it will be completely ignored; in the very least,
+  // this is a formalization for deprecating services.
+  optional bool deprecated = 33 [default=false];
+
+  // The parser stores options it doesn't recognize here. See above.
+  repeated UninterpretedOption uninterpreted_option = 999;
+
+  // Clients can define custom options in extensions of this message. See above.
+  extensions 1000 to max;
+}
+
+message MethodOptions {
+
+  // Note:  Field numbers 1 through 32 are reserved for Google's internal RPC
+  //   framework.  We apologize for hoarding these numbers to ourselves, but
+  //   we were already using them long before we decided to release Protocol
+  //   Buffers.
+
+  // Is this method deprecated?
+  // Depending on the target platform, this can emit Deprecated annotations
+  // for the method, or it will be completely ignored; in the very least,
+  // this is a formalization for deprecating methods.
+  optional bool deprecated = 33 [default=false];
+
+  // The parser stores options it doesn't recognize here. See above.
+  repeated UninterpretedOption uninterpreted_option = 999;
+
+  // Clients can define custom options in extensions of this message. See above.
+  extensions 1000 to max;
+}
+
+
+// A message representing a option the parser does not recognize. This only
+// appears in options protos created by the compiler::Parser class.
+// DescriptorPool resolves these when building Descriptor objects. Therefore,
+// options protos in descriptor objects (e.g. returned by Descriptor::options(),
+// or produced by Descriptor::CopyTo()) will never have UninterpretedOptions
+// in them.
+message UninterpretedOption {
+  // The name of the uninterpreted option.  Each string represents a segment in
+  // a dot-separated name.  is_extension is true iff a segment represents an
+  // extension (denoted with parentheses in options specs in .proto files).
+  // E.g.,{ ["foo", false], ["bar.baz", true], ["qux", false] } represents
+  // "foo.(bar.baz).qux".
+  message NamePart {
+    required string name_part = 1;
+    required bool is_extension = 2;
+  }
+  repeated NamePart name = 2;
+
+  // The value of the uninterpreted option, in whatever type the tokenizer
+  // identified it as during parsing. Exactly one of these should be set.
+  optional string identifier_value = 3;
+  optional uint64 positive_int_value = 4;
+  optional int64 negative_int_value = 5;
+  optional double double_value = 6;
+  optional bytes string_value = 7;
+  optional string aggregate_value = 8;
+}
+
+// ===================================================================
+// Optional source code info
+
+// Encapsulates information about the original source file from which a
+// FileDescriptorProto was generated.
+message SourceCodeInfo {
+  // A Location identifies a piece of source code in a .proto file which
+  // corresponds to a particular definition.  This information is intended
+  // to be useful to IDEs, code indexers, documentation generators, and similar
+  // tools.
+  //
+  // For example, say we have a file like:
+  //   message Foo {
+  //     optional string foo = 1;
+  //   }
+  // Let's look at just the field definition:
+  //   optional string foo = 1;
+  //   ^       ^^     ^^  ^  ^^^
+  //   a       bc     de  f  ghi
+  // We have the following locations:
+  //   span   path               represents
+  //   [a,i)  [ 4, 0, 2, 0 ]     The whole field definition.
+  //   [a,b)  [ 4, 0, 2, 0, 4 ]  The label (optional).
+  //   [c,d)  [ 4, 0, 2, 0, 5 ]  The type (string).
+  //   [e,f)  [ 4, 0, 2, 0, 1 ]  The name (foo).
+  //   [g,h)  [ 4, 0, 2, 0, 3 ]  The number (1).
+  //
+  // Notes:
+  // - A location may refer to a repeated field itself (i.e. not to any
+  //   particular index within it).  This is used whenever a set of elements are
+  //   logically enclosed in a single code segment.  For example, an entire
+  //   extend block (possibly containing multiple extension definitions) will
+  //   have an outer location whose path refers to the "extensions" repeated
+  //   field without an index.
+  // - Multiple locations may have the same path.  This happens when a single
+  //   logical declaration is spread out across multiple places.  The most
+  //   obvious example is the "extend" block again -- there may be multiple
+  //   extend blocks in the same scope, each of which will have the same path.
+  // - A location's span is not always a subset of its parent's span.  For
+  //   example, the "extendee" of an extension declaration appears at the
+  //   beginning of the "extend" block and is shared by all extensions within
+  //   the block.
+  // - Just because a location's span is a subset of some other location's span
+  //   does not mean that it is a descendent.  For example, a "group" defines
+  //   both a type and a field in a single declaration.  Thus, the locations
+  //   corresponding to the type and field and their components will overlap.
+  // - Code which tries to interpret locations should probably be designed to
+  //   ignore those that it doesn't understand, as more types of locations could
+  //   be recorded in the future.
+  repeated Location location = 1;
+  message Location {
+    // Identifies which part of the FileDescriptorProto was defined at this
+    // location.
+    //
+    // Each element is a field number or an index.  They form a path from
+    // the root FileDescriptorProto to the place where the definition.  For
+    // example, this path:
+    //   [ 4, 3, 2, 7, 1 ]
+    // refers to:
+    //   file.message_type(3)  // 4, 3
+    //       .field(7)         // 2, 7
+    //       .name()           // 1
+    // This is because FileDescriptorProto.message_type has field number 4:
+    //   repeated DescriptorProto message_type = 4;
+    // and DescriptorProto.field has field number 2:
+    //   repeated FieldDescriptorProto field = 2;
+    // and FieldDescriptorProto.name has field number 1:
+    //   optional string name = 1;
+    //
+    // Thus, the above path gives the location of a field name.  If we removed
+    // the last element:
+    //   [ 4, 3, 2, 7 ]
+    // this path refers to the whole field declaration (from the beginning
+    // of the label to the terminating semicolon).
+    repeated int32 path = 1 [packed=true];
+
+    // Always has exactly three or four elements: start line, start column,
+    // end line (optional, otherwise assumed same as start line), end column.
+    // These are packed into a single field for efficiency.  Note that line
+    // and column numbers are zero-based -- typically you will want to add
+    // 1 to each before displaying to a user.
+    repeated int32 span = 2 [packed=true];
+
+    // If this SourceCodeInfo represents a complete declaration, these are any
+    // comments appearing before and after the declaration which appear to be
+    // attached to the declaration.
+    //
+    // A series of line comments appearing on consecutive lines, with no other
+    // tokens appearing on those lines, will be treated as a single comment.
+    //
+    // Only the comment content is provided; comment markers (e.g. //) are
+    // stripped out.  For block comments, leading whitespace and an asterisk
+    // will be stripped from the beginning of each line other than the first.
+    // Newlines are included in the output.
+    //
+    // Examples:
+    //
+    //   optional int32 foo = 1;  // Comment attached to foo.
+    //   // Comment attached to bar.
+    //   optional int32 bar = 2;
+    //
+    //   optional string baz = 3;
+    //   // Comment attached to baz.
+    //   // Another line attached to baz.
+    //
+    //   // Comment attached to qux.
+    //   //
+    //   // Another line attached to qux.
+    //   optional double qux = 4;
+    //
+    //   optional string corge = 5;
+    //   /* Block comment attached
+    //    * to corge.  Leading asterisks
+    //    * will be removed. */
+    //   /* Block comment attached to
+    //    * grault. */
+    //   optional int32 grault = 6;
+    optional string leading_comments = 3;
+    optional string trailing_comments = 4;
+  }
+}
diff --git a/acts/framework/acts/controllers/buds_lib/dev_utils/proto/nanopb.proto b/acts/framework/acts/controllers/buds_lib/dev_utils/proto/nanopb.proto
new file mode 100644
index 0000000..5053dfd
--- /dev/null
+++ b/acts/framework/acts/controllers/buds_lib/dev_utils/proto/nanopb.proto
@@ -0,0 +1,89 @@
+// Custom options for defining:
+// - Maximum size of string/bytes
+// - Maximum number of elements in array
+//
+// These are used by nanopb to generate statically allocable structures
+// for memory-limited environments.
+
+syntax = "proto2";
+import "google/protobuf/descriptor.proto";
+
+option java_package = "fi.kapsi.koti.jpa.nanopb";
+
+enum FieldType {
+    FT_DEFAULT = 0; // Automatically decide field type, generate static field if possible.
+    FT_CALLBACK = 1; // Always generate a callback field.
+    FT_POINTER = 4; // Always generate a dynamically allocated field.
+    FT_STATIC = 2; // Generate a static field or raise an exception if not possible.
+    FT_IGNORE = 3; // Ignore the field completely.
+}
+
+enum IntSize {
+    IS_DEFAULT = 0; // Default, 32/64bit based on type in .proto
+    IS_8 = 8;
+    IS_16 = 16;
+    IS_32 = 32;
+    IS_64 = 64;
+}
+
+// This is the inner options message, which basically defines options for
+// a field. When it is used in message or file scope, it applies to all
+// fields.
+message NanoPBOptions {
+  // Allocated size for 'bytes' and 'string' fields.
+  optional int32 max_size = 1;
+
+  // Allocated number of entries in arrays ('repeated' fields)
+  optional int32 max_count = 2;
+
+  // Size of integer fields. Can save some memory if you don't need
+  // full 32 bits for the value.
+  optional IntSize int_size = 7 [default = IS_DEFAULT];
+
+  // Force type of field (callback or static allocation)
+  optional FieldType type = 3 [default = FT_DEFAULT];
+
+  // Use long names for enums, i.e. EnumName_EnumValue.
+  optional bool long_names = 4 [default = true];
+
+  // Add 'packed' attribute to generated structs.
+  // Note: this cannot be used on CPUs that break on unaligned
+  // accesses to variables.
+  optional bool packed_struct = 5 [default = false];
+
+  // Skip this message
+  optional bool skip_message = 6 [default = false];
+
+  // Generate oneof fields as normal optional fields instead of union.
+  optional bool no_unions = 8 [default = false];
+
+  // integer type tag for a message
+  optional uint32 msgid = 9;
+}
+
+// Extensions to protoc 'Descriptor' type in order to define options
+// inside a .proto file.
+//
+// Protocol Buffers extension number registry
+// --------------------------------
+// Project:  Nanopb
+// Contact:  Petteri Aimonen <jpa@kapsi.fi>
+// Web site: http://kapsi.fi/~jpa/nanopb
+// Extensions: 1010 (all types)
+// --------------------------------
+
+extend google.protobuf.FileOptions {
+    optional NanoPBOptions nanopb_fileopt = 1010;
+}
+
+extend google.protobuf.MessageOptions {
+    optional NanoPBOptions nanopb_msgopt = 1010;
+}
+
+extend google.protobuf.EnumOptions {
+    optional NanoPBOptions nanopb_enumopt = 1010;
+}
+
+extend google.protobuf.FieldOptions {
+    optional NanoPBOptions nanopb = 1010;
+}
diff --git a/acts/framework/acts/controllers/buds_lib/dev_utils/proto/plugin.proto b/acts/framework/acts/controllers/buds_lib/dev_utils/proto/plugin.proto
new file mode 100644
index 0000000..e627289
--- /dev/null
+++ b/acts/framework/acts/controllers/buds_lib/dev_utils/proto/plugin.proto
@@ -0,0 +1,148 @@
+// Protocol Buffers - Google's data interchange format
+// Copyright 2008 Google Inc.  All rights reserved.
+// https://developers.google.com/protocol-buffers/
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+//     * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+//     * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following disclaimer
+// in the documentation and/or other materials provided with the
+// distribution.
+//     * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived from
+// this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Author: kenton@google.com (Kenton Varda)
+//
+// WARNING:  The plugin interface is currently EXPERIMENTAL and is subject to
+//   change.
+//
+// protoc (aka the Protocol Compiler) can be extended via plugins.  A plugin is
+// just a program that reads a CodeGeneratorRequest from stdin and writes a
+// CodeGeneratorResponse to stdout.
+//
+// Plugins written using C++ can use google/protobuf/compiler/plugin.h instead
+// of dealing with the raw protocol defined here.
+//
+// A plugin executable needs only to be placed somewhere in the path.  The
+// plugin should be named "protoc-gen-$NAME", and will then be used when the
+// flag "--${NAME}_out" is passed to protoc.
+
+syntax = "proto2";
+package google.protobuf.compiler;
+option java_package = "com.google.protobuf.compiler";
+option java_outer_classname = "PluginProtos";
+
+import "google/protobuf/descriptor.proto";
+
+// An encoded CodeGeneratorRequest is written to the plugin's stdin.
+message CodeGeneratorRequest {
+  // The .proto files that were explicitly listed on the command-line.  The
+  // code generator should generate code only for these files.  Each file's
+  // descriptor will be included in proto_file, below.
+  repeated string file_to_generate = 1;
+
+  // The generator parameter passed on the command-line.
+  optional string parameter = 2;
+
+  // FileDescriptorProtos for all files in files_to_generate and everything
+  // they import.  The files will appear in topological order, so each file
+  // appears before any file that imports it.
+  //
+  // protoc guarantees that all proto_files will be written after
+  // the fields above, even though this is not technically guaranteed by the
+  // protobuf wire format.  This theoretically could allow a plugin to stream
+  // in the FileDescriptorProtos and handle them one by one rather than read
+  // the entire set into memory at once.  However, as of this writing, this
+  // is not similarly optimized on protoc's end -- it will store all fields in
+  // memory at once before sending them to the plugin.
+  repeated FileDescriptorProto proto_file = 15;
+}
+
+// The plugin writes an encoded CodeGeneratorResponse to stdout.
+message CodeGeneratorResponse {
+  // Error message.  If non-empty, code generation failed.  The plugin process
+  // should exit with status code zero even if it reports an error in this way.
+  //
+  // This should be used to indicate errors in .proto files which prevent the
+  // code generator from generating correct code.  Errors which indicate a
+  // problem in protoc itself -- such as the input CodeGeneratorRequest being
+  // unparseable -- should be reported by writing a message to stderr and
+  // exiting with a non-zero status code.
+  optional string error = 1;
+
+  // Represents a single generated file.
+  message File {
+    // The file name, relative to the output directory.  The name must not
+    // contain "." or ".." components and must be relative, not be absolute (so,
+    // the file cannot lie outside the output directory).  "/" must be used as
+    // the path separator, not "\".
+    //
+    // If the name is omitted, the content will be appended to the previous
+    // file.  This allows the generator to break large files into small chunks,
+    // and allows the generated text to be streamed back to protoc so that large
+    // files need not reside completely in memory at one time.  Note that as of
+    // this writing protoc does not optimize for this -- it will read the entire
+    // CodeGeneratorResponse before writing files to disk.
+    optional string name = 1;
+
+    // If non-empty, indicates that the named file should already exist, and the
+    // content here is to be inserted into that file at a defined insertion
+    // point.  This feature allows a code generator to extend the output
+    // produced by another code generator.  The original generator may provide
+    // insertion points by placing special annotations in the file that look
+    // like:
+    //   @@protoc_insertion_point(NAME)
+    // The annotation can have arbitrary text before and after it on the line,
+    // which allows it to be placed in a comment.  NAME should be replaced with
+    // an identifier naming the point -- this is what other generators will use
+    // as the insertion_point.  Code inserted at this point will be placed
+    // immediately above the line containing the insertion point (thus multiple
+    // insertions to the same point will come out in the order they were added).
+    // The double-@ is intended to make it unlikely that the generated code
+    // could contain things that look like insertion points by accident.
+    //
+    // For example, the C++ code generator places the following line in the
+    // .pb.h files that it generates:
+    //   // @@protoc_insertion_point(namespace_scope)
+    // This line appears within the scope of the file's package namespace, but
+    // outside of any particular class.  Another plugin can then specify the
+    // insertion_point "namespace_scope" to generate additional classes or
+    // other declarations that should be placed in this scope.
+    //
+    // Note that if the line containing the insertion point begins with
+    // whitespace, the same whitespace will be added to every line of the
+    // inserted text.  This is useful for languages like Python, where
+    // indentation matters.  In these languages, the insertion point comment
+    // should be indented the same amount as any inserted code will need to be
+    // in order to work correctly in that context.
+    //
+    // The code generator that generates the initial file and the one which
+    // inserts into it must both run as part of a single invocation of protoc.
+    // Code generators are executed in the order in which they appear on the
+    // command line.
+    //
+    // If |insertion_point| is present, |name| must also be present.
+    optional string insertion_point = 2;
+
+    // The file contents.
+    optional string content = 15;
+  }
+  repeated File file = 15;
+}
diff --git a/acts/framework/acts/controllers/buds_lib/latency.py b/acts/framework/acts/controllers/buds_lib/latency.py
new file mode 100644
index 0000000..7c5357b
--- /dev/null
+++ b/acts/framework/acts/controllers/buds_lib/latency.py
@@ -0,0 +1,281 @@
+#!/usr/bin/env python3
+#
+#   Copyright 2018 - The Android Open Source Project
+#
+#   Licensed under the Apache License, Version 2.0 (the "License");
+#   you may not use this file except in compliance with the License.
+#   You may obtain a copy of the License at
+#
+#       http://www.apache.org/licenses/LICENSE-2.0
+#
+#   Unless required by applicable law or agreed to in writing, software
+#   distributed under the License is distributed on an "AS IS" BASIS,
+#   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#   See the License for the specific language governing permissions and
+#   limitations under the License.
+
+"""Processes profiling data to output latency numbers."""
+#
+# Type "python latency.py -h" for help
+#
+# Currently the log data is assumed to be in the following format:
+# PROF:<event-id> <timestamp>
+# The <event-id> and <timestamp> can be in the form of any valid
+# (positive)integer literal in Python
+# Examples:
+#   PROF:0x0006 0x00000155e0d043f1
+#   PROF:6 1468355593201
+
+import argparse
+from collections import defaultdict
+import csv
+import logging
+import math
+import os
+import string
+import xml.etree.ElementTree as ET
+
+valid_fname_chars = '-_.()%s%s' % (string.ascii_letters, string.digits)
+PERCENTILE_STEP = 1
+PROFILER_DATA_PREFIX = 'PROF:'
+
+
+class EventPair(object):
+
+    def __init__(self, pair_id, latency, name):
+        self.pair_id = pair_id
+        self.latency = latency
+        self.name = name
+
+
+class LatencyEntry(object):
+
+    def __init__(self, start_timestamp, latency):
+        self.start_timestamp = start_timestamp
+        self.latency = latency
+
+
+def parse_xml(xml_file):
+    """
+    Parse the configuration xml file.
+
+    Returns:
+      event_pairs_by_pair_id: dict mapping event id to event pair object
+      event_pairs_by_start_id: dict mapping starting event to list of event pairs
+                               with that starting event.
+      event_pairs_by_end_id: dict mapping ending event to list of event pairs
+                             with that ending event.
+    """
+    root = ET.parse(xml_file).getroot()
+    event_pairs = root.findall('event-pair')
+    event_pairs_by_pair_id = {}
+    event_pairs_by_start_id = defaultdict(list)
+    event_pairs_by_end_id = defaultdict(list)
+
+    for event_pair in event_pairs:
+        start_evt = root.find(
+            "./event[@id='{0:}']".format(event_pair.attrib['start-event']))
+        end_evt = root.find(
+            "./event[@id='{0:}']".format(event_pair.attrib['end-event']))
+        start = int(start_evt.attrib['id'], 0)
+        end = int(end_evt.attrib['id'], 0)
+        paird_id = start << 32 | end
+        if paird_id in event_pairs_by_pair_id:
+            logging.error('Latency event repeated: start id = %d, end id = %d',
+                          start,
+                          end)
+            continue
+        # Create the output file name base by concatenating:
+        # "input file name base" + start event name + "_to_" + end event name
+        evt_pair_name = start_evt.attrib['name'] + '_to_' + end_evt.attrib[
+            'name']
+        evt_pair_name = [
+            c if c in valid_fname_chars else '_' for c in evt_pair_name
+        ]
+        evt_pair_name = ''.join(evt_pair_name)
+        evt_list = EventPair(paird_id, 0, evt_pair_name)
+        event_pairs_by_pair_id[paird_id] = evt_list
+        event_pairs_by_start_id[start].append(evt_list)
+        event_pairs_by_end_id[end].append(evt_list)
+    return (event_pairs_by_pair_id, event_pairs_by_start_id,
+            event_pairs_by_end_id)
+
+
+def percentile_to_index(num_entries, percentile):
+    """
+    Returns the index in an array corresponding to a percentile.
+
+    Arguments:
+      num_entries: the number of entries in the array.
+      percentile: which percentile to calculate the index for.
+    Returns:
+      ind: the index in the array corresponding to the percentile.
+    """
+    ind = int(math.floor(float(num_entries) * percentile / 100))
+    if ind > 0:
+        ind -= 1
+    return ind
+
+
+def compute_latencies(input_file, event_pairs_by_start_id,
+                      event_pairs_by_end_id):
+    """Parse the input data file and compute latencies."""
+    line_num = 0
+    lat_tables_by_pair_id = defaultdict(list)
+    while True:
+        line_num += 1
+        line = input_file.readline()
+        if not line:
+            break
+        data = line.partition(PROFILER_DATA_PREFIX)[2]
+        if not data:
+            continue
+        try:
+            event_id, timestamp = [int(x, 0) for x in data.split()]
+        except ValueError:
+            logging.error('Badly formed event entry at line #%s: %s', line_num,
+                          line)
+            continue
+        # We use event_pair.latency to temporarily store the timestamp
+        # of the start event
+        for event_pair in event_pairs_by_start_id[event_id]:
+            event_pair.latency = timestamp
+        for event_pair in event_pairs_by_end_id[event_id]:
+            # compute the latency only if we have seen the corresponding
+            # start event already
+            if event_pair.latency:
+                lat_tables_by_pair_id[event_pair.pair_id].append(
+                    LatencyEntry(event_pair.latency,
+                                 timestamp - event_pair.latency))
+                event_pair.latency = 0
+    return lat_tables_by_pair_id
+
+
+def write_data(fname_base, event_pairs_by_pair_id, lat_tables_by_pair_id):
+    for event_id, lat_table in lat_tables_by_pair_id.items():
+        event_pair = event_pairs_by_pair_id[event_id]
+        with open(fname_base + '_' + event_pair.name + '_data.csv',
+                  'wb') as out_file:
+            csv_writer = csv.writer(out_file)
+            for dat in lat_table:
+                csv_writer.writerow([dat.start_timestamp, dat.latency])
+
+
+def write_summary(fname_base, event_pairs_by_pair_id, lat_tables_by_pair_id):
+    summaries = get_summaries(event_pairs_by_pair_id, lat_tables_by_pair_id)
+    for event_id, lat_table in lat_tables_by_pair_id.items():
+        event_pair = event_pairs_by_pair_id[event_id]
+        summary = summaries[event_pair.name]
+        latencies = summary['latencies']
+        num_latencies = summary['num_latencies']
+        with open(fname_base + '_' + event_pair.name + '_summary.txt',
+                  'wb') as out_file:
+            csv_writer = csv.writer(out_file)
+            csv_writer.writerow(['Percentile', 'Latency'])
+            # Write percentile table
+            for percentile in range(1, 101):
+                ind = percentile_to_index(num_latencies, percentile)
+                csv_writer.writerow([percentile, latencies[ind]])
+
+            # Write summary
+            print('\n\nTotal number of samples = {}'.format(num_latencies),
+                  file=out_file)
+            print('Min = {}'.format(summary['min_lat']), file=out_file)
+            print('Max = {}'.format(summary['max_lat']), file=out_file)
+            print('Average = {}'.format(summary['average_lat']), file=out_file)
+            print('Median = {}'.format(summary['median_lat']), file=out_file)
+            print('90 %ile = {}'.format(summary['90pctile']), file=out_file)
+            print('95 %ile = {}'.format(summary['95pctile']), file=out_file)
+
+
+def process_latencies(config_xml, input_file):
+    """
+    End to end function to compute latencies and summaries from input file.
+    Writes latency results to files in current directory.
+
+    Arguments:
+       config_xml: xml file specifying which event pairs to compute latency
+                   btwn.
+       input_file: text file containing the timestamped events, like a log file.
+    """
+    # Parse the event configuration file
+    (event_pairs_by_pair_id, event_pairs_by_start_id,
+     event_pairs_by_end_id) = parse_xml(config_xml)
+    # Compute latencies
+    lat_tables_by_pair_id = compute_latencies(input_file,
+                                              event_pairs_by_start_id,
+                                              event_pairs_by_end_id)
+    fname_base = os.path.splitext(os.path.basename(input_file.name))[0]
+    # Write the latency data and summary to respective files
+    write_data(fname_base, event_pairs_by_pair_id, lat_tables_by_pair_id)
+    write_summary(fname_base, event_pairs_by_pair_id, lat_tables_by_pair_id)
+
+
+def get_summaries(event_pairs_by_pair_id, lat_tables_by_pair_id):
+    """
+    Process significant summaries from a table of latencies.
+
+    Arguments:
+      event_pairs_by_pair_id: dict mapping event id to event pair object
+      lat_tables_by_pair_id: dict mapping event id to latency table
+    Returns:
+      summaries: dict mapping event pair name to significant summary metrics.
+    """
+    summaries = {}
+    for event_id, lat_table in lat_tables_by_pair_id.items():
+        event_summary = {}
+        event_pair = event_pairs_by_pair_id[event_id]
+        latencies = [entry.latency for entry in lat_table]
+        latencies.sort()
+        event_summary['latencies'] = latencies
+        event_summary['num_latencies'] = len(latencies)
+        event_summary['min_lat'] = latencies[0]
+        event_summary['max_lat'] = latencies[-1]
+        event_summary['average_lat'] = sum(latencies) / len(latencies)
+        event_summary['median'] = latencies[len(latencies) // 2]
+        event_summary['90pctile'] = latencies[percentile_to_index(
+            len(latencies), 90)]
+        event_summary['95pctile'] = latencies[percentile_to_index(
+            len(latencies), 95)]
+        summaries[event_pair.name] = event_summary
+    return summaries
+
+
+def get_summaries_from_log(input_file_name, config_xml=None):
+    """
+    End to end function to compute latencies and summaries from input file.
+    Returns a summary dictionary.
+
+    Arguments:
+      input_file_name: text file containing the timestamped events, like a
+                       log file.
+      config_xml: xml file specifying which event pairs to compute latency btwn.
+    Returns:
+      summaries: dict mapping event pair name to significant summary metrics.
+    """
+    config_xml = config_xml or os.path.join(os.path.dirname(__file__),
+                                            'latency.xml')
+    (event_pairs_by_pair_id, event_pairs_by_start_id,
+     event_pairs_by_end_id) = parse_xml(config_xml)
+    # Compute latencies
+    input_file = open(input_file_name, 'r')
+    lat_tables_by_pair_id = compute_latencies(input_file,
+                                              event_pairs_by_start_id,
+                                              event_pairs_by_end_id)
+    return get_summaries(event_pairs_by_pair_id, lat_tables_by_pair_id)
+
+
+if __name__ == '__main__':
+    # Parse command-line arguments
+    parser = argparse.ArgumentParser(
+        description='Processes profiling data to output latency numbers')
+    parser.add_argument(
+        '--events-config',
+        type=argparse.FileType('r'),
+        default=os.path.join(os.path.dirname(__file__), 'latency.xml'),
+        help='The configuration XML file for events.'
+             ' If not specified uses latency.xml from current folder')
+    parser.add_argument(
+        'input', type=argparse.FileType('r'), help='The input log')
+    args = parser.parse_args()
+    process_latencies(args.events_config, args.input)
diff --git a/acts/framework/acts/controllers/buds_lib/latency.xml b/acts/framework/acts/controllers/buds_lib/latency.xml
new file mode 100644
index 0000000..320979b
--- /dev/null
+++ b/acts/framework/acts/controllers/buds_lib/latency.xml
@@ -0,0 +1,22 @@
+<?xml version="1.0"?>
+<root>
+    <event id="1" name="Button Down" />
+    <event id="3" name="Play/Pause Button Event" />
+    <event id="4" name="A2DP Start Ind" />
+    <event id="6" name="A2DP Start Streaming" />
+    <event id="9" name="AVRCP Play Notification" />
+    <event id="10" name="AVRCP Pause Notification" />
+    <event id="12" name="Voice Cmd Btn Held" />
+    <event id="13" name="Voice Cmd Btn Released" />
+
+    <!-- Event pairs that we are interested in measuring the latency of -->
+    <event-pair start-event="1" end-event="3" />
+    <event-pair start-event="1" end-event="12" />
+    <event-pair start-event="3" end-event="9" />
+    <event-pair start-event="9" end-event="6" />
+    <event-pair start-event="1" end-event="6" />
+    <event-pair start-event="3" end-event="10" />
+    <event-pair start-event="1" end-event="10" />
+    <event-pair start-event="12" end-event="13" />
+    <event-pair start-event="13" end-event="6" />
+</root>
diff --git a/acts/framework/acts/controllers/buds_lib/logserial.py b/acts/framework/acts/controllers/buds_lib/logserial.py
new file mode 100644
index 0000000..49b750a
--- /dev/null
+++ b/acts/framework/acts/controllers/buds_lib/logserial.py
@@ -0,0 +1,415 @@
+#!/usr/bin/env python3
+#
+#   Copyright 2018 - The Android Open Source Project
+#
+#   Licensed under the Apache License, Version 2.0 (the "License");
+#   you may not use this file except in compliance with the License.
+#   You may obtain a copy of the License at
+#
+#       http://www.apache.org/licenses/LICENSE-2.0
+#
+#   Unless required by applicable law or agreed to in writing, software
+#   distributed under the License is distributed on an "AS IS" BASIS,
+#   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#   See the License for the specific language governing permissions and
+#   limitations under the License.
+
+import os
+import re
+import select
+import subprocess
+import sys
+import time
+import uuid
+from threading import Thread
+
+import serial
+from serial.tools import list_ports
+
+from acts import tracelogger
+from logging import Logger
+import logging as py_logging
+
+logging = tracelogger.TakoTraceLogger(Logger(__file__))
+
+RETRIES = 0
+
+
+class LogSerialException(Exception):
+    """LogSerial Exception."""
+
+
+class PortCheck(object):
+    def get_serial_ports(self):
+        """Gets the computer available serial ports.
+
+        Returns:
+            Dictionary object with all the serial port names.
+        """
+        result = {}
+        ports = list_ports.comports()
+        for port_name, description, address in ports:
+            result[port_name] = (description, address)
+        return result
+
+    # TODO: Clean up this function. The boolean logic can be simplified.
+    def search_port_by_property(self, search_params):
+        """Search ports by a dictionary of the search parameters.
+
+        Args:
+            search_params: Dictionary object with the parameters
+                           to search. i.e:
+                           {'ID_SERIAL_SHORT':'213213',
+                           'ID_USB_INTERFACE_NUM': '01'}
+        Returns:
+            Array with the ports found
+        """
+        ports_result = []
+        for port in self.get_serial_ports():
+            properties = self.get_port_properties(port=port)
+            if properties:
+                properties_exists = True
+                for port_property in search_params:
+                    properties_exists *= (port_property in properties)
+                properties_exists = True if properties_exists == 1 else False
+                if properties_exists:
+                    found = True
+                    for port_property in search_params.keys():
+                        search_value = search_params[port_property]
+                        if properties[port_property] == search_value:
+                            found *= True
+                        else:
+                            found = False
+                            break
+                    found = True if found == 1 else False
+                    if found:
+                        ports_result.append(port)
+        return ports_result
+
+    def get_port_properties(self, port):
+        """Get all the properties from a given port.
+
+        Args:
+            port: String object with the port name. i.e. '/dev/ttyACM1'
+
+        Returns:
+            dictionary object with all the properties.
+        """
+        ports = self.get_serial_ports()
+        if port in ports:
+            result = {}
+            port_address = ports[port][1]
+            property_list = None
+            if sys.platform.startswith('linux') or sys.platform.startswith(
+                    'cygwin'):
+                try:
+                    command = 'udevadm info -q property -n {}'.format(port)
+                    property_list = subprocess.check_output(command, shell=True)
+                    property_list = property_list.decode(errors='replace')
+                except subprocess.CalledProcessError as error:
+                    logging.error(error)
+                if property_list:
+                    properties = filter(None, property_list.split('\n'))
+                    for prop in properties:
+                        p = prop.split('=')
+                        result[p[0]] = p[1]
+            elif sys.platform.startswith('win'):
+                regex = ('(?P<type>[A-Z]*)\sVID\:PID\=(?P<vid>\w*)'
+                         '\:(?P<pid>\w*)\s+(?P<adprop>.*$)')
+                m = re.search(regex, port_address)
+                if m:
+                    result['type'] = m.group('type')
+                    result['vid'] = m.group('vid')
+                    result['pid'] = m.group('pid')
+                    adprop = m.group('adprop').strip()
+                    if adprop:
+                        prop_array = adprop.split(' ')
+                        for prop in prop_array:
+                            p = prop.split('=')
+                            result[p[0]] = p[1]
+                    if 'LOCATION' in result:
+                        interface = int(result['LOCATION'].split('.')[1])
+                        if interface < 10:
+                            result['ID_USB_INTERFACE_NUM'] = '0{}'.format(
+                                interface)
+                        else:
+                            result['ID_USB_INTERFACE_NUM'] = '{}'.format(
+                                interface)
+                    win_vid_pid = '*VID_{}*PID_{}*'.format(result['vid'],
+                                                           result['pid'])
+                    command = (
+                            'powershell gwmi "Win32_USBControllerDevice |' +
+                            ' %{[wmi]($_.Dependent)} |' +
+                            ' Where-Object -Property PNPDeviceID -Like "' +
+                            win_vid_pid + '" |' +
+                            ' Where-Object -Property Service -Eq "usbccgp" |' +
+                            ' Select-Object -Property PNPDeviceID"')
+                    res = subprocess.check_output(command, shell=True)
+                    r = res.decode('ascii')
+                    m = re.search('USB\\\\.*', r)
+                    if m:
+                        result['ID_SERIAL_SHORT'] = (
+                            m.group().strip().split('\\')[2])
+            return result
+
+    def port_exists(self, port):
+        """Check if a serial port exists in the computer by the port name.
+
+        Args:
+            port: String object with the port name. i.e. '/dev/ttyACM1'
+
+        Returns:
+            True if it was found, False if not.
+        """
+        exists = port in self.get_serial_ports()
+        return exists
+
+
+class LogSerial(object):
+    def __init__(self,
+                 port,
+                 baudrate,
+                 bytesize=8,
+                 parity='N',
+                 stopbits=1,
+                 timeout=0.15,
+                 retries=0,
+                 flush_output=True,
+                 terminator='\n',
+                 output_path=None,
+                 serial_logger=None):
+        global RETRIES
+        self.set_log = False
+        self.output_path = None
+        self.set_output_path(output_path)
+        if serial_logger:
+            self.set_logger(serial_logger)
+        self.monitor_port = PortCheck()
+        if self.monitor_port.port_exists(port=port):
+            self.connection_handle = serial.Serial()
+            RETRIES = retries
+            self.reading = True
+            self.log = []
+            self.log_thread = Thread()
+            self.command_ini_index = None
+            self.is_logging = False
+            self.flush_output = flush_output
+            self.terminator = terminator
+            if port:
+                self.connection_handle.port = port
+            if baudrate:
+                self.connection_handle.baudrate = baudrate
+            if bytesize:
+                self.connection_handle.bytesize = bytesize
+            if parity:
+                self.connection_handle.parity = parity
+            if stopbits:
+                self.connection_handle.stopbits = stopbits
+            if timeout:
+                self.connection_handle.timeout = timeout
+            try:
+                self.open()
+            except Exception as e:
+                self.close()
+                logging.error(e)
+        else:
+            raise LogSerialException(
+                'The port {} does not exist'.format(port))
+
+    def set_logger(self, serial_logger):
+        global logging
+        logging = serial_logger
+        logger_path = py_logging.log_path
+        if self.output_path != logger_path:
+            logging.info(
+                'Logserial output path changed to: {}'.format(logger_path))
+        self.output_path = logger_path
+        self.set_log = True
+
+    def set_output_path(self, output_path):
+        """Set the output path for the flushed log.
+
+        Args:
+            output_path: String object with the path
+        """
+        if output_path:
+            if os.path.exists(output_path):
+                self.output_path = output_path
+            else:
+                raise LogSerialException('The output path does not exist.')
+
+    def refresh_port_connection(self, port):
+        """Will update the port connection without closing the read thread.
+
+        Args:
+            port: String object with the new port name. i.e. '/dev/ttyACM1'
+
+        Raises:
+            LogSerialException if the port is not alive.
+        """
+        if self.monitor_port.port_exists(port=port):
+            self.connection_handle.port = port
+            self.open()
+        else:
+            raise LogSerialException(
+                'The port {} does not exist'.format(port))
+
+    def is_port_alive(self):
+        """Verify if the current port is alive in the computer.
+
+        Returns:
+            True if its alive, False if its missing.
+        """
+        alive = self.monitor_port.port_exists(port=self.connection_handle.port)
+        return alive
+
+    # @retry(Exception, tries=RETRIES, delay=1, backoff=2)
+    def open(self):
+        """Will open the connection with the current port settings."""
+        while self.connection_handle.isOpen():
+            self.connection_handle.close()
+            time.sleep(0.5)
+        self.connection_handle.open()
+        if self.flush_output:
+            self.flush()
+        self.start_reading()
+        logging.info('Connection Open')
+
+    def close(self):
+        """Will close the connection and the read thread."""
+        self.stop_reading()
+        if self.connection_handle:
+            self.connection_handle.close()
+        if not self.set_log:
+            logging.flush_log()
+        self.flush_log()
+        logging.info('Connection Closed')
+
+    def flush(self):
+        """Will flush any input from the serial connection."""
+        self.write('\n')
+        self.connection_handle.flushInput()
+        self.connection_handle.flush()
+        flushed = 0
+        while True:
+            ready_r, _, ready_x = (select.select([self.connection_handle], [],
+                                                 [self.connection_handle], 0))
+            if ready_x:
+                logging.exception('exception from serial port')
+                return
+            elif ready_r:
+                flushed += 1
+                # This may cause underlying buffering.
+                self.connection_handle.read(1)
+                # Flush the underlying buffer too.
+                self.connection_handle.flush()
+            else:
+                break
+            if flushed > 0:
+                logging.debug('dropped >{} bytes'.format(flushed))
+
+    def write(self, command, wait_time=0.2):
+        """Will write into the serial connection.
+
+        Args:
+            command: String object with the text to write.
+            wait_time: Float object with the seconds to wait after the
+                       command was issued.
+        """
+        if command:
+            if self.terminator:
+                command += self.terminator
+            self.command_ini_index = len(self.log)
+            self.connection_handle.write(command.encode())
+            if wait_time:
+                time.sleep(wait_time)
+            logging.info('cmd [{}] sent.'.format(command.strip()))
+
+    def flush_log(self):
+        """Will output the log into a CSV file."""
+        if len(self.log) > 0:
+            path = ''
+            if not self.output_path:
+                self.output_path = os.getcwd()
+            elif not os.path.exists(self.output_path):
+                self.output_path = os.getcwd()
+            path = os.path.join(self.output_path,
+                                str(uuid.uuid4()) + '_serial.log')
+            with open(path, 'a') as log_file:
+                for info in self.log:
+                    log_file.write('{}, {}\n'.format(info[0], info[1]))
+
+    def read(self):
+        """Will read from the log the output from the serial connection
+        after a write command was issued. It will take the initial time
+        of the command as a reference.
+
+        Returns:
+            Array object with the log lines.
+        """
+        buf_read = []
+        command_end_index = len(self.log)
+        info = self.query_serial_log(self.command_ini_index, command_end_index)
+        for line in info:
+            buf_read.append(line[1])
+        self.command_ini_index = command_end_index
+        return buf_read
+
+    def get_all_log(self):
+        """Gets the log object that collects the logs.
+
+        Returns:
+            DataFrame object with all the logs.
+        """
+        return self.log
+
+    def query_serial_log(self, from_index, to_index):
+        """Will query the session log from a given time in EPOC format.
+
+        Args:
+            from_timestamp: Double value with the EPOC timestamp to start
+                            the search.
+            to_timestamp: Double value with the EPOC timestamp to finish the
+                          rearch.
+
+        Returns:
+            DataFrame with the result query.
+        """
+        if from_index < to_index:
+            info = self.log[from_index:to_index]
+            return info
+
+    def _start_reading_thread(self):
+        if self.connection_handle.isOpen():
+            self.reading = True
+            while self.reading:
+                try:
+                    data = self.connection_handle.readline().decode('utf-8')
+                    if data:
+                        self.is_logging = True
+                        data.replace('/n', '')
+                        data.replace('/r', '')
+                        data = data.strip()
+                        self.log.append([time.time(), data])
+                    else:
+                        self.is_logging = False
+                except Exception:
+                    time.sleep(1)
+            logging.info('Read thread closed')
+
+    def start_reading(self):
+        """Method to start the log collection."""
+        if not self.log_thread.isAlive():
+            self.log_thread = Thread(target=self._start_reading_thread, args=())
+            self.log_thread.daemon = True
+            try:
+                self.log_thread.start()
+            except(KeyboardInterrupt, SystemExit):
+                self.close()
+        else:
+            logging.warning('Not running log thread, is already alive')
+
+    def stop_reading(self):
+        """Method to stop the log collection."""
+        self.reading = False
+        self.log_thread.join(timeout=600)
diff --git a/acts/framework/acts/controllers/buds_lib/test_actions/__init__.py b/acts/framework/acts/controllers/buds_lib/test_actions/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/acts/framework/acts/controllers/buds_lib/test_actions/__init__.py
diff --git a/acts/framework/acts/controllers/buds_lib/test_actions/agsa_acts.py b/acts/framework/acts/controllers/buds_lib/test_actions/agsa_acts.py
new file mode 100644
index 0000000..0141795
--- /dev/null
+++ b/acts/framework/acts/controllers/buds_lib/test_actions/agsa_acts.py
@@ -0,0 +1,153 @@
+#!/usr/bin/env python3
+#
+#   Copyright 2018 - The Android Open Source Project
+#
+#   Licensed under the Apache License, Version 2.0 (the "License");
+#   you may not use this file except in compliance with the License.
+#   You may obtain a copy of the License at
+#
+#       http://www.apache.org/licenses/LICENSE-2.0
+#
+#   Unless required by applicable law or agreed to in writing, software
+#   distributed under the License is distributed on an "AS IS" BASIS,
+#   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#   See the License for the specific language governing permissions and
+#   limitations under the License.
+
+import datetime
+import re
+
+from acts.controllers.adb import AdbError
+from acts.controllers.buds_lib.test_actions.base_test_actions import BaseTestAction
+from acts.controllers.buds_lib.test_actions.base_test_actions import timed_action
+
+PHONE_DFU_PATH = ('/storage/emulated/0/Android/data/com.google.android'
+                  '.googlequicksearchbox/files/download_cache/apollo.dfu')
+
+AGSA_BROADCAST = (
+    'am  broadcast -a \'action_ota\' --es dfu_url %s --es build_label 9.9.9 '
+    '--ez is_force %s com.google.android.googlequicksearchbox/com.google'
+    '.android.apps.gsa.broadcastreceiver.CommonBroadcastReceiver')
+
+
+class AgsaOTAError(Exception):
+    """OTA Error"""
+
+
+class AgsaTestActions(BaseTestAction):
+    """AGSA test action library."""
+
+    def __init__(self, android_dev, logger=None):
+        """
+        Simple init code to keep the android object for future reference.
+        Args:
+           android_dev: devcontrollers.android_device.AndroidDevice
+        """
+        super(AgsaTestActions, self).__init__(logger)
+        self.dut = android_dev
+
+    @timed_action
+    def _initiate_agsa_ota(self, file_path, destination=None, force=True):
+        """Pushes the dfu file to phone and issues broadcast to start AGSA OTA
+
+        Args:
+            file_path: (string) path of dfu file
+            destination: (string) destination path on the phone uses
+                         $PHONE_DFU_PATH if not specified
+            force: (bool) option to force the issued broadcast?
+        """
+        if not destination:
+            destination = PHONE_DFU_PATH
+        if self.dut.push_file_to_phone(file_path, destination):
+            if force:
+                force = 'true'
+            else:
+                force = 'false'
+
+            command = AGSA_BROADCAST % (destination, force)
+            output = self.dut.adb.shell(command.split())
+            if 'result=0' in output:
+                self.logger.info('Agsa broadcast successful!')
+                return True
+            else:
+                self.logger.error('Agsa broadcast failed')
+                return False
+
+    @timed_action
+    def _wait_for_ota_to_finish(self, timeout=660):
+        """Logcat is continuously read to keep track of the OTA transfer
+
+        Args:
+           timeout: (int) time to wait before timing out.
+
+        Returns:
+            True on success
+
+        Raises: AgsaOTAError if the timeout is reached.
+        """
+        # regex that confirms completion
+        transfer_completion_match = \
+            re.compile('OTA progress: 100 %|OTA img rcvd')
+        # time now + $timeout
+        expiry_time = datetime.datetime.now() + \
+                      datetime.timedelta(seconds=timeout)
+        self.logger.info('Waiting for OTA transfer to complete....')
+        while True:
+            # time now - 1 minute: to be used in getting logs from a minute back
+            now_plus_minute = datetime.datetime.now() - \
+                              datetime.timedelta(seconds=5)
+            try:
+                # grep logcat for 'DeviceLog'
+                filtered_log = self.dut.logcat_filter_message(
+                    now_plus_minute.strftime('%m-%d %H:%M:%S.000'),
+                    'Devicelog:')
+                if filtered_log and \
+                        transfer_completion_match.search(filtered_log):
+                    self.logger.info('Transfer completed!')
+                    break
+            except AdbError:
+                # gets thrown if no matching string is found
+                pass
+            if datetime.datetime.now() > expiry_time:
+                self.logger.error('Timed out waiting for OTA to complete.')
+                raise AgsaOTAError('Timed out waiting for OTA to complete.')
+        return True
+
+    @timed_action
+    def initiate_agsa_and_wait_until_transfer(self, file_path, destination=None,
+                                              force=True, timeout=660):
+        """Calls _initiate_agsa_ota and _wait_for_ota_to_finish
+
+        Returns:
+            True on success and False otherwise
+        """
+        self._initiate_agsa_ota(file_path, destination, force)
+        return self._wait_for_ota_to_finish(timeout)
+
+    @timed_action
+    def install_agsa(self, version, force=False):
+        """
+        Installs the specified version of AGSA if different from the one
+        currently installed, unless force is set to True.
+
+        Args:
+            version: (string) ex: '7.14.21.release'
+            force: (bool) installs only if currently installed version is
+                   different than the one to be installed. True installs
+                   by-passing version check
+        Return:
+            True on Success and False otherwise
+        """
+        # get currently installed version, and install agsa only if different
+        # from what is requested
+        current_version = self.dut.get_agsa_version()
+        if (not (version.replace('alpha', '').replace('release', '')
+                 in current_version)) or force:
+            self.logger.info('Current AGSA version is %s' % current_version)
+            self.logger.info('Installing AGSA version %s...' % version)
+            if self.and_actions.install_agsa(version):
+                self.logger.info('Install success!')
+                return True
+            else:
+                self.logger.error('Failed to install version %s' % version)
+                return False
diff --git a/acts/framework/acts/controllers/buds_lib/test_actions/apollo_acts.py b/acts/framework/acts/controllers/buds_lib/test_actions/apollo_acts.py
new file mode 100644
index 0000000..afb4fa8
--- /dev/null
+++ b/acts/framework/acts/controllers/buds_lib/test_actions/apollo_acts.py
@@ -0,0 +1,617 @@
+#!/usr/bin/env python3
+#
+#   Copyright 2018 - The Android Open Source Project
+#
+#   Licensed under the Apache License, Version 2.0 (the "License");
+#   you may not use this file except in compliance with the License.
+#   You may obtain a copy of the License at
+#
+#       http://www.apache.org/licenses/LICENSE-2.0
+#
+#   Unless required by applicable law or agreed to in writing, software
+#   distributed under the License is distributed on an "AS IS" BASIS,
+#   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#   See the License for the specific language governing permissions and
+#   limitations under the License.
+"""
+A comprehensive interface for performing test actions on an Apollo device.
+"""
+
+import time
+
+from acts.controllers.buds_lib.apollo_lib import DeviceError
+from acts.controllers.buds_lib.test_actions.agsa_acts import AgsaOTAError
+from acts.controllers.buds_lib.test_actions.base_test_actions import BaseTestAction
+from acts.controllers.buds_lib.test_actions.base_test_actions import timed_action
+from acts.controllers.buds_lib.test_actions.bt_utils import BTUtils
+from acts.libs.utils.timer import TimeRecorder
+from acts.test_utils.tel.tel_test_utils import initiate_call
+from acts.test_utils.tel.tel_test_utils import wait_for_droid_in_call
+from acts.utils import wait_until
+
+PACKAGE_NAME_AGSA = 'com.google.android.googlequicksearchbox'
+PACKAGE_NAME_GMS = 'com.google.android.gms'
+PACKAGE_NAME_NEARBY = 'com.google.android.gms.policy_nearby'
+PACKAGE_NAME_SETTINGS = 'com.android.settings'
+BISTO_MP_DETECT_HEADER = 'Pixel Buds'
+BISTO_MP_DEVICE_TEXT = 'Pixel Buds'
+BISTO_MP_DETECT_TEXT = BISTO_MP_DETECT_HEADER + BISTO_MP_DEVICE_TEXT
+BISTO_MP_CANCEL_TEXT = 'CANCEL'
+BISTO_MP_CONNECT_TEXT = 'TAP TO CONNECT'
+BISTO_MP_CONNECT_FAIL_TEXT = 'Can\'t connect to'
+BISTO_MP_CONNECT_RETRY_TEXT = 'TRY AGAIN'
+BISTO_MP_CONNECTED_TEXT = 'Now set up your Google Assistant'
+BISTO_MP_CONNECTED_EXIT_TEXT = 'NO THANKS'
+BISTO_MP_EXIT_PROMPT_TEXT = 'Exit setup?'
+BISTO_MP_EXIT_CONFIRM_TEXT = 'EXIT'
+PROFILES_CONNECTED = {
+    'HFP(pri.)': 'TRUE',
+    'A2DP(pri)': 'TRUE',
+}
+PROFILES_DISCONNECTED = {
+    'HFP(pri.)': 'FALSE',
+    'A2DP(pri)': 'FALSE',
+}
+COMP_PROFILE_CONNECTED = {'Comp': 'TRUE'}
+COMP_PROFILE_DISCONNECTED = {'Comp': 'FALSE'}
+AVRCPSTATUS = 'AvrcpPlayPause'
+DEFAULT_TIMEOUT = 60  # wait 60 seconds max for bond/connect.
+DEFAULT_CMD_INTERVAL = 0.5  # default interval between serial commands
+DEFAULT_CMD_RETRY = 5  # default retry times when a command failed.
+DEFAULT_BT_PROFILES = [
+    'HFP Pri', 'HFP Sec', 'A2DP Pri', 'A2DP Sec', 'CTRL', 'AUDIO', 'DEBUG',
+    'TRANS'
+]
+DEFAULT_BT_STATUS = ['A2DP(pri)', 'HFP(pri.)', 'Comp']
+
+
+class TestActsError(Exception):
+    """Exception from Apollo Acts Error."""
+
+
+class ApolloTestActions(BaseTestAction):
+    """Test action class for all Apollo test actions."""
+
+    def __init__(self, apollo_dev, logger=None):
+        """
+        Args:
+             apollo_dev: apollo.lib.apollo_lib.Device the Apollo device
+        """
+        super(ApolloTestActions, self).__init__(logger)
+        self.dut = apollo_dev
+        # need a embedded timer for connection time measurements.
+        self.measurement_timer = TimeRecorder()
+
+    def bluetooth_get_status(self):
+        status = self.dut.get_bt_status()
+        self.logger.info(status)
+
+    def wait_for_bluetooth_disconnection(self, timeout=60):
+        """ Set pairing mode and disconnect.
+
+        This action will wait until the apollo profiles are false.
+
+        Args:
+             timeout: integer, timeout value in seconds.
+        """
+        result = True
+        apollo_status = self.dut.get_bt_status()
+        self.logger.info('Waiting for the disconnection.')
+        time.sleep(1)
+        ini_time = time.time()
+        while len(apollo_status) != len(
+            [s for s in apollo_status.values() if s == 'FALSE']):
+            apollo_status = self.dut.get_bt_status()
+            if (time.time() - ini_time) > timeout:
+                self.logger.warning('Timeout waiting for the disconnection.')
+                result = False
+            time.sleep(1)
+        return result
+
+    def pair(self, phone, companion_app=True):
+        """Pairs phone with apollo and validates bluetooth profiles.
+
+        Args:
+            phone: android phone
+            apollo: apollo device
+            companion_app (optional): True if the phone has a companion app
+                                      installed. False otherwise.
+
+        Raises:
+            TestActsError: Bluetooth pairing failed/ Dut BT status check failed.
+        """
+        bt_util = BTUtils()
+        target_addr = self.dut.bluetooth_address
+        if bt_util.android_device_in_connected_state(phone, target_addr):
+            self.logger.info('Already paired and connected, skipping pairing.')
+        else:
+            if bt_util.android_device_in_paired_state(phone, target_addr):
+                self.logger.info(
+                    'Device is paired but not connected, unpair first.')
+                if not bt_util.bt_unpair(phone, self.dut):
+                    raise TestActsError('Unable to unpair the device')
+            result, _ = bt_util.bt_pair_and_connect(phone, self.dut)
+            if not result:
+                raise TestActsError('Bluetooth pairing failed.')
+            self.logger.info('DEVICE PAIRED')
+            if companion_app:
+                profiles = PROFILES_CONNECTED.copy()
+                profiles.update(COMP_PROFILE_CONNECTED)
+            else:
+                profiles = PROFILES_CONNECTED
+            self.logger.info(profiles)
+            if not bt_util.check_device_bt(device=self.dut, profiles=profiles):
+                raise TestActsError('Dut BT status check failed.')
+            else:
+                return True
+
+    def unpair(self, phone, companion_app=True, factory_reset_dut=True):
+        """Unpairs phone from apollo and validates bluetooth profiles.
+
+        Args:
+            phone: android phone
+            apollo: apollo device
+            companion_app (optional): True if the phone has a companion app
+                                      installed. False otherwise.
+
+        Raises:
+            TestActsError: Bluetooth unpairing/Dut BT status check failed.
+        """
+        bt_util = BTUtils()
+        target_addr = self.dut.bluetooth_address
+        if not bt_util.android_device_in_paired_state(phone, target_addr):
+            self.logger.info('Device is already unpaired, skipping unpairing.')
+        else:
+            result = bt_util.bt_unpair(
+                phone, self.dut, factory_reset_dut=factory_reset_dut)
+            if not result:
+                raise TestActsError('Bluetooth unpairing failed.')
+            if companion_app:
+                profiles = PROFILES_DISCONNECTED.copy()
+                profiles.update(COMP_PROFILE_DISCONNECTED)
+            else:
+                profiles = PROFILES_DISCONNECTED
+            if not bt_util.check_device_bt(device=self.dut, profiles=profiles):
+                raise TestActsError('Dut BT status check failed.')
+            else:
+                return True
+
+    def is_paired(self, phone):
+        """Check if the given apollo is paired with the android device.
+
+        Args:
+            phone: android phone
+            apollo: apollo device
+
+        Returns:
+            Bool: True if apollo is paired with the phone.
+        """
+        bt_util = BTUtils()
+        target_addr = self.dut.bluetooth_address
+        return bt_util.android_device_in_paired_state(phone, target_addr)
+
+    def send_music_play_event_and_validate(self):
+        """Send the play event on Apollo and validate the response and DSP
+        Status.
+
+        Raises:
+            TestActsError: Error while playing the music.
+        """
+        play_detection_timeout = 1
+        if self.dut.is_streaming():
+            self.logger.info('Music already streaming. Skipping play event..')
+            return
+        self.logger.info('Playing video...')
+        is_played = self.dut.music_control_events(
+            AVRCPSTATUS, self.dut.apollo_log_regex.AVRCP_PLAY_REGEX)
+        if not is_played:
+            self.logger.error('AVRCP Played status not found')
+            raise TestActsError('AVRCP Played status not found.')
+        wait_until(
+            lambda: self.dut.is_streaming(),
+            play_detection_timeout,
+            sleep_s=0.25)
+        if not self.dut.is_streaming():
+            self.logger.error('Device is NOT in a deviceA2DPStreaming state')
+            raise TestActsError(
+                'Device is NOT in a deviceA2DPStreaming state.')
+
+    def send_music_pause_event_and_validate(self):
+        """Send the pause event on Apollo and validate the responses and DSP
+        Status.
+
+        Raises:
+            TestActsError: Error while pausing the music.
+        """
+        paused_detection_timeout = 10
+        if not self.dut.is_streaming():
+            self.logger.info('Music not streaming. Skipping pause event..')
+            return
+        self.logger.info("Pausing video...")
+        is_paused = self.dut.music_control_events(
+            AVRCPSTATUS, self.dut.apollo_log_regex.AVRCP_PAUSE_REGEX)
+        if not is_paused:
+            self.logger.error('AVRCP Paused statue not found')
+            raise TestActsError('AVRCP Paused status not found.')
+        wait_until(
+            lambda: not self.dut.is_streaming(),
+            paused_detection_timeout,
+            sleep_s=0.25)
+        if self.dut.is_streaming():
+            self.logger.error('Device is still in deviceA2DPStreaming state')
+            raise TestActsError(
+                'Device is still in deviceA2DPStreaming state.')
+
+    def vol_down_and_validate(self):
+        """Send volume down twice and validate by comparing two levels
+
+        Raises:
+            TestActsError: Error
+        """
+        self.logger.info('Decreasing volume')
+        before_vol = self.dut.volume('Down', 1)
+        time.sleep(2)
+        after_vol = self.dut.volume('Down', 1)
+        if not after_vol or not before_vol or after_vol >= before_vol:
+            self.logger.error(
+                'Unable to decrease the volume. Before: %s. After: %s' %
+                (before_vol, after_vol))
+            raise TestActsError('error decreasing volume')
+
+    def vol_up_and_validate(self):
+        """Send volume up twice and validate by comparing two levels
+
+        Raises:
+            TestActsError: Error
+        """
+        self.logger.info('Increasing volume')
+        before_vol = self.dut.volume('Up', 1)
+        time.sleep(2)
+        after_vol = self.dut.volume('Up', 1)
+        if not after_vol or not before_vol or after_vol <= before_vol:
+            self.logger.error(
+                'Unable to increase the volume. Before: %s. After: %s' %
+                (before_vol, after_vol))
+            raise TestActsError('error increasing volume')
+
+    def call_and_validate_ringing(self,
+                                  calling_phone,
+                                  number_to_call,
+                                  call_retries=10):
+        for i in range(call_retries):
+            initiate_call(self.logger, calling_phone, number_to_call)
+            is_calling = wait_for_droid_in_call(
+                self.logger, calling_phone, max_time=10)
+            if is_calling:
+                self.logger.info('Call initiated!')
+                break
+            else:
+                self.logger.warning('Call is not initiating.')
+                if i == call_retries:
+                    self.logger.error('Call initiation retries exhausted')
+                    raise TestActsError(
+                        '%s retries failed to initiate the call' %
+                        (call_retries))
+            self.logger.warning('Retrying call...')
+        # wait for offhook state and return
+        wait_until(
+            (lambda: calling_phone.droid.telecomGetCallState() == 'OFFHOOK'),
+            timeout_s=40,
+            condition=True,
+            sleep_s=.5)
+        self.logger.info('Phone call initiated on %s' % calling_phone.serial)
+
+    def answer_phone_and_validate_call_received(self, receiving_phone):
+        # wait until the phone rings (assumes that a call is initiated prior to
+        # running the command)
+        wait_until(
+            lambda: receiving_phone.droid.telecomGetCallState() == 'RINGING',
+            timeout_s=40,
+            condition=True,
+            sleep_s=.5)
+        self.logger.info('Ring detected on %s - now answering the call...' %
+                         (receiving_phone.serial))
+        # answer the phone call
+        self.dut.tap()
+        # wait until OFFHOOK state
+        wait_until(
+            lambda: receiving_phone.droid.telecomGetCallState() == 'OFFHOOK',
+            timeout_s=40,
+            condition=True,
+            sleep_s=.5)
+
+    def hangup_phone_and_validate_call_hung(self, receiving_phone):
+        # wait for phone to be in OFFHOOK state (assumed that a call is answered
+        # and engaged)
+        wait_until(
+            lambda: receiving_phone.droid.telecomGetCallState() == 'OFFHOOK',
+            timeout_s=40,
+            condition=True,
+            sleep_s=.5)
+        # end the call (post and pre 1663 have different way of ending call)
+        self.logger.info(
+            'Hanging up the call on %s...' % receiving_phone.serial)
+        if self.dut.version < 1663:
+            self.dut.tap()
+        else:
+            self.dut.hold(duration=100)
+        # wait for idle state
+        wait_until(
+            lambda: receiving_phone.droid.telecomGetCallState() == 'IDLE',
+            timeout_s=40,
+            condition=True,
+            sleep_s=.5)
+
+    @timed_action
+    def factory_reset(self):
+        ret = False
+        try:
+            self.dut.factory_reset()
+            ret = True
+        except DeviceError as ex:
+            self.logger.warning('Failed to reset Apollo: %s' % ex)
+        return ret
+
+    @timed_action
+    def wait_for_magic_pairing_notification(self, android_act, timeout=60):
+        dut_detected = False
+        start_time = time.time()
+        self.logger.info('Waiting for MP prompt: %s' % BISTO_MP_DEVICE_TEXT)
+        while not dut_detected:
+            android_act.dut.ui_util.uia.wait.update()
+            self.sleep(1)
+            if android_act.dut.ui_util.uia(
+                    textContains=BISTO_MP_DETECT_HEADER, enabled=True).exists:
+                if android_act.dut.ui_util.uia(
+                        textContains=BISTO_MP_DEVICE_TEXT,
+                        enabled=True).exists:
+                    self.logger.info('DUT Apollo MP prompt detected!')
+                    dut_detected = True
+                else:
+                    self.logger.info(
+                        'NONE DUT Apollo MP prompt detected! Cancel and RETRY!'
+                    )
+                    android_act.dut.ui_util.click_by_text(BISTO_MP_CANCEL_TEXT)
+            if time.time() - start_time > timeout:
+                break
+        if not dut_detected:
+            self.logger.info(
+                'Failed to get %s MP prompt' % BISTO_MP_DEVICE_TEXT)
+        return dut_detected
+
+    @timed_action
+    def start_magic_pairing(self, android_act, timeout=30, retries=3):
+        paired = False
+        android_act.dut.ui_util.click_by_text(
+            BISTO_MP_CONNECT_TEXT, timeout=timeout)
+        connect_start_time = time.time()
+        count = 0
+        timeout = 30
+
+        while not paired and count < retries:
+            android_act.dut.ui_util.uia.wait.update()
+            self.sleep(1)
+            if time.time() - connect_start_time > timeout:
+                self.logger.info('Time out! %s seconds' % time)
+                android_act.app_force_close_agsa()
+                self.logger.info('Timeout(s): %s' % timeout)
+                break
+            if android_act.dut.ui_util.uia(
+                    textContains=BISTO_MP_CONNECT_FAIL_TEXT,
+                    enabled=True).exists:
+                count += 1
+                self.logger.info('MP FAILED! Retry %s.' % count)
+                android_act.dut.ui_util.click_by_text(
+                    BISTO_MP_CONNECT_RETRY_TEXT)
+                connect_start_time = time.time()
+            elif android_act.dut.ui_util.uia(
+                    textContains=BISTO_MP_CONNECTED_TEXT, enabled=True).exists:
+                self.logger.info('MP SUCCESSFUL! Exiting AGSA...')
+                paired = True
+                android_act.dut.ui_util.click_by_text(
+                    BISTO_MP_CONNECTED_EXIT_TEXT)
+                android_act.dut.ui_util.wait_for_text(
+                    BISTO_MP_EXIT_PROMPT_TEXT)
+                android_act.dut.ui_util.click_by_text(
+                    BISTO_MP_EXIT_CONFIRM_TEXT)
+        return paired
+
+    @timed_action
+    def turn_bluetooth_on(self):
+        self.dut.cmd('pow 1')
+        return True
+
+    @timed_action
+    def turn_bluetooth_off(self):
+        self.dut.cmd('pow 0')
+        return True
+
+    @timed_action
+    def wait_for_bluetooth_a2dp_hfp(self,
+                                    timeout=DEFAULT_TIMEOUT,
+                                    interval=DEFAULT_CMD_INTERVAL):
+        """Wait for BT connection by checking if A2DP and HFP connected.
+
+        This is used for BT pair+connect test.
+
+        Args:
+            timeout: float, timeout value in second.
+            interval: float, float, interval between polling BT profiles.
+            timer: TimeRecorder, time recorder to save the connection time.
+        """
+        # Need to check these two profiles
+        pass_profiles = ['A2DP Pri', 'HFP Pri']
+        ret = False
+        try:
+            ret = self._wait_for_bluetooth_profile_connection(
+                pass_profiles, timeout, interval, self.measurement_timer)
+        except DeviceError as ex:
+            self.logger.warning('Failed to wait for BT connection: %s' % ex)
+        return ret
+
+    def _wait_for_bluetooth_profile_connection(self, profiles_to_check,
+                                               timeout, interval, timer):
+        """A generic method to wait for specified BT profile connection.
+
+        Args:
+            profiles_to_check: list, profile names (A2DP, HFP, etc.) to be
+                               checked.
+            timeout: float, timeout value in second.
+            interval: float, interval between polling BT profiles.
+            timer: TimeRecorder, time recorder to save the connection time.
+
+        Returns:
+            bool, True if checked profiles are connected, False otherwise.
+        """
+        timer.start_timer(profiles_to_check, force=True)
+        start_time = time.time()
+        while time.time() - start_time < timeout:
+            profiles = self._bluetooth_check_profile_connection()
+            for profile in profiles:
+                if profiles[profile]:
+                    timer.stop_timer(profile)
+            # now check if the specified profile connected.
+            all_connected = True
+            for profile in profiles_to_check:
+                if not profiles[profile]:
+                    all_connected = False
+                    break
+            if all_connected:
+                return True
+            time.sleep(interval)
+        # make sure the profile timer are stopped.
+        timer.stop_timer(profiles_to_check)
+        return False
+
+    def _bluetooth_check_profile_connection(self):
+        """Return profile connection in a boolean dict.
+
+        key=<profile name>, val = T/F
+        """
+        profiles = dict()
+        output = self.dut.get_conn_devices()
+        # need to strip all whitespaces.
+        conn_devs = {}
+
+        for key in output:
+            conn_devs[key.strip()] = output[key].strip()
+        for key in conn_devs:
+            self.logger.info('%s:%s' % (key, conn_devs[key]))
+            if 'XXXXXXXX' in conn_devs[key]:
+                profiles[key] = conn_devs[key]
+            else:
+                profiles[key] = False
+        return profiles
+
+    @timed_action
+    def wait_for_bluetooth_status_connection_all(
+            self, timeout=DEFAULT_TIMEOUT, interval=DEFAULT_CMD_INTERVAL):
+        """Wait for BT connection by checking if A2DP, HFP and COMP connected.
+
+        This is used for BT reconnect test.
+
+        Args:
+            timeout: float, timeout value in second.
+            interval: float, float, interval between polling BT profiles.
+        """
+        ret = False
+        self.measurement_timer.start_timer(DEFAULT_BT_STATUS, force=True)
+        # All profile not connected by default.
+        connected_status = {key: False for key in DEFAULT_BT_STATUS}
+        start_time = time.time()
+        while time.time() < start_time + timeout:
+            try:
+                time.sleep(interval)
+                status = self.dut.get_bt_status()
+                for key in DEFAULT_BT_STATUS:
+                    if (not connected_status[key] and key in status
+                            and 'TRUE' == status[key]):
+                        self.measurement_timer.stop_timer(key)
+                        connected_status[key] = True
+                        self.logger.info(
+                            'BT status %s connected at %fs.' %
+                            (key, self.measurement_timer.elapsed(key)))
+                if False not in connected_status.values():
+                    ret = True
+                    break
+            except DeviceError as ex:
+                self.logger.warning(
+                    'Device exception when waiting for reconnection: %s' % ex)
+        self.measurement_timer.stop_timer(DEFAULT_BT_STATUS)
+        return ret
+
+    def initiate_ota_via_agsa_verify_transfer_completion_in_logcat(
+            self,
+            agsa_action,
+            dfu_path,
+            destination=None,
+            force=True,
+            apply_image=True,
+            reconnect=True):
+        """
+        Starts an OTA by issuing an intent to AGSA after copying the dfu file to
+        the appropriate location on the phone
+
+        Args:
+            agsa_action: projects.agsa.lib.test_actions.agsa_acts
+                         .AgsaTestActions
+            dfu_path: string - absolute path of dfu file
+            destination: string - absolute path of file on phone if not
+                         specified will use
+                         /storage/emulated/0/Android/data/com.google.android
+                         .googlequicksearchbox/files/download_cache/apollo.dfu
+            force: value set in the intent sent to AGSA
+            True if success False otherwise
+        """
+        try:
+            agsa_action.initiate_agsa_and_wait_until_transfer(
+                dfu_path, destination=destination, force=force)
+            if apply_image:
+                # set in case
+                self.dut.set_in_case(reconnect=reconnect)
+        except AgsaOTAError as ex:
+            self.logger.error('Failed to OTA via AGSA %s' % ex)
+            return False
+        except DeviceError as ex:
+            self.logger.error('Failed to bring up device %s' % ex)
+            return False
+        return True
+
+    @timed_action
+    def wait_for_bluetooth_a2dp_hfp_rfcomm_connect(
+            self, address, timeout=DEFAULT_TIMEOUT,
+            interval=DEFAULT_CMD_INTERVAL):
+        """Wait for BT reconnection by checking if A2DP, HFP and COMP connected
+        to the specified address.
+
+        This is used for BT connection switch test.
+
+        Args:
+            address: str, MAC of the address to connect.
+            timeout: float, timeout value in second.
+            interval: float, float, interval between polling BT profiles.
+
+        Returns:
+            True if the specified address is connected. False otherwise.
+        """
+        last_4_hex = address.replace(':', '')[-4:].lower()
+        profiles_to_check = ['HFP Pri', 'A2DP Pri', 'CTRL', 'AUDIO']
+        self.measurement_timer.start_timer(profiles_to_check, force=True)
+        end_time = time.time() + timeout
+        all_connected = True
+        while time.time() < end_time:
+            all_connected = True
+            profiles = self._bluetooth_check_profile_connection()
+            for profile in profiles_to_check:
+                if (profile in profiles and profiles[profile]
+                        and last_4_hex in profiles[profile].lower()):
+                    self.measurement_timer.stop_timer(profile)
+                else:
+                    all_connected = False
+            if all_connected:
+                break
+            time.sleep(interval)
+        # make sure the profile timer are stopped.
+        self.measurement_timer.stop_timer(profiles_to_check)
+
+        return all_connected
diff --git a/acts/framework/acts/controllers/buds_lib/test_actions/audio_utils.py b/acts/framework/acts/controllers/buds_lib/test_actions/audio_utils.py
new file mode 100644
index 0000000..42f8c46
--- /dev/null
+++ b/acts/framework/acts/controllers/buds_lib/test_actions/audio_utils.py
@@ -0,0 +1,99 @@
+#!/usr/bin/env python3
+#
+#   Copyright 2018 - The Android Open Source Project
+#
+#   Licensed under the Apache License, Version 2.0 (the "License");
+#   you may not use this file except in compliance with the License.
+#   You may obtain a copy of the License at
+#
+#       http://www.apache.org/licenses/LICENSE-2.0
+#
+#   Unless required by applicable law or agreed to in writing, software
+#   distributed under the License is distributed on an "AS IS" BASIS,
+#   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#   See the License for the specific language governing permissions and
+#   limitations under the License.
+
+"""A generic library for audio related test actions"""
+
+import datetime
+import time
+
+from acts import tracelogger
+from acts import utils
+
+
+class AudioUtilsError(Exception):
+    """Generic AudioUtils Error."""
+
+
+class AudioUtils(object):
+    """A utility that manages generic audio interactions and actions on one or
+    more devices under test.
+
+    To be maintained such that it is compatible with any devices that pair with
+    phone.
+    """
+
+    def __init__(self):
+        self.logger = tracelogger.TakoTraceLogger()
+
+    def play_audio_into_device(self, audio_file_path, audio_player, dut):
+        """Open mic on DUT, play audio into DUT, close mic on DUT.
+
+        Args:
+            audio_file_path: the path to the audio file to play, relative to the
+                           audio_player
+            audio_player: the device from which to play the audio file
+            dut: the device with the microphone
+
+        Returns:
+            bool: result of opening and closing DUT mic
+        """
+
+        if not dut.open_mic():
+            self.logger.error('DUT open_mic did not return True')
+            return False
+        audio_player.play(audio_file_path)
+        if not dut.close_mic():
+            self.logger.error('DUT close_mic did not return True.')
+            return False
+        return True
+
+    def get_agsa_interpretation_of_audio_file(self, audio_file_path,
+                                              target_interpretation,
+                                              audio_player, dut,
+                                              android_device):
+        """Gets AGSA interpretation from playing audio into DUT.
+
+        **IMPORTANT**: AGSA on android device must be connected to DUT and able
+        to receive info from DUT mic.
+
+        Args:
+          audio_file_path: the path to the audio file to play, relative to the
+                           audio_player
+          target_interpretation: what agsa interpretation should be
+          audio_player: the device from which to play the audio file
+          dut: the device with the microphone
+          android_device: android device to which dut is connected
+
+        Returns:
+          interpretation: agsa interpretation of audio file
+          score: similarity score between interpretation and target
+                 interpretation
+        """
+
+        play_start_time = datetime.datetime.now()
+        interpretation, score = '', 0.0
+        if self.play_audio_into_device(audio_file_path=audio_file_path,
+                                       audio_player=audio_player,
+                                       dut=dut):
+            time.sleep(1)
+            interpretation = android_device.agsa_interpretation(
+                cutoff_time=play_start_time,
+                target_interpretation=target_interpretation,
+                source='bisto')
+            score = utils.string_similarity(target_interpretation,
+                                            interpretation)
+
+        return interpretation, score
diff --git a/acts/framework/acts/controllers/buds_lib/test_actions/base_test_actions.py b/acts/framework/acts/controllers/buds_lib/test_actions/base_test_actions.py
new file mode 100644
index 0000000..7b6cbc4
--- /dev/null
+++ b/acts/framework/acts/controllers/buds_lib/test_actions/base_test_actions.py
@@ -0,0 +1,191 @@
+#!/usr/bin/env python3
+#
+#   Copyright 2018 - The Android Open Source Project
+#
+#   Licensed under the Apache License, Version 2.0 (the "License");
+#   you may not use this file except in compliance with the License.
+#   You may obtain a copy of the License at
+#
+#       http://www.apache.org/licenses/LICENSE-2.0
+#
+#   Unless required by applicable law or agreed to in writing, software
+#   distributed under the License is distributed on an "AS IS" BASIS,
+#   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#   See the License for the specific language governing permissions and
+#   limitations under the License.
+
+"""Base test action class, provide a base class for representing a collection of
+test actions.
+"""
+
+import datetime
+import inspect
+import time
+
+from acts import tracelogger
+from acts.libs.utils.timer import TimeRecorder
+
+# All methods start with "_" are considered hidden.
+DEFAULT_HIDDEN_ACTION_PREFIX = '_'
+
+
+def timed_action(method):
+    """A common decorator for test actions."""
+
+    def timed(self, *args, **kw):
+        """Log the enter/exit/time of the action method."""
+        func_name = self._convert_default_action_name(method.__name__)
+        if not func_name:
+            func_name = method.__name__
+        self.logger.step('%s...' % func_name)
+        self.timer.start_timer(func_name, True)
+        result = method(self, *args, **kw)
+        # TODO: Method run time collected can be used for automatic KPI checks
+        self.timer.stop_timer(func_name)
+        return result
+
+    return timed
+
+
+class TestActionNotFoundError(Exception):
+    pass
+
+
+class BaseTestAction(object):
+    """Class for organizing a collection of test actions.
+
+    Test actions are just normal python methods, and should perform a specified
+    action. @timed_action decorator can log the entry/exit of the test action,
+    and the execution time.
+
+    The BaseTestAction class also provides a mapping between human friendly
+    names and test action methods in order to support configuration base
+    execution. By default, all methods not hidden (not start with "_") is
+    exported as human friendly name by replacing "_" with space.
+
+    Test action method can be called directly, or via
+    _perform_action(<human friendly name>, <args...>)
+    method.
+    """
+
+    @classmethod
+    def _fill_default_action_map(cls):
+        """Parse current class and get all test actions methods."""
+        # a <human readable name>:<method name> map.
+        cls._action_map = dict()
+        for name, _ in inspect.getmembers(cls, inspect.ismethod):
+            act_name = cls._convert_default_action_name(name)
+            if act_name:
+                cls._action_map[act_name] = name
+
+    @classmethod
+    def _convert_default_action_name(cls, func_name):
+        """Default conversion between method name -> human readable action name.
+        """
+        if not func_name.startswith(DEFAULT_HIDDEN_ACTION_PREFIX):
+            act_name = func_name.lower()
+            act_name = act_name.replace('_', ' ')
+            act_name = act_name.title()
+            return act_name.strip()
+        else:
+            return ''
+
+    @classmethod
+    def _add_action_alias(cls, default_act_name, alias):
+        """Add an alias to an existing test action."""
+        if default_act_name in cls._action_map:
+            cls._action_map[alias] = cls._action_map[default_act_name]
+            return True
+        else:
+            return False
+
+    @classmethod
+    def _get_action_names(cls):
+        if not hasattr(cls, '_action_map'):
+            cls._fill_default_action_map()
+        return cls._action_map.keys()
+
+    @classmethod
+    def get_current_time_logcat_format(cls):
+        return datetime.datetime.now().strftime('%m-%d %H:%M:%S.000')
+
+    @classmethod
+    def _action_exists(cls, action_name):
+        """Verify if an human friendly action name exists or not."""
+        if not hasattr(cls, '_action_map'):
+            cls._fill_default_action_map()
+        return action_name in cls._action_map
+
+    @classmethod
+    def _validate_actions(cls, action_list):
+        """Verify if an human friendly action name exists or not.
+
+        Args:
+          :param action_list: list of actions to be validated.
+
+        Returns:
+          tuple of (is valid, list of invalid/non-existent actions)
+        """
+        not_found = []
+        for action_name in action_list:
+            if not cls._action_exists(action_name):
+                not_found.append(action_name)
+        all_valid = False if not_found else True
+        return all_valid, not_found
+
+    def __init__(self, logger=None):
+        if logger is None:
+            self.logger = tracelogger.TakoTraceLogger()
+        else:
+            self.logger = logger
+        self.timer = TimeRecorder()
+        self._fill_default_action_map()
+
+    def __enter__(self):
+        return self
+
+    def __exit__(self, *args):
+        pass
+
+    def _perform_action(self, action_name, *args, **kwargs):
+        """Perform the specified human readable action."""
+        if action_name not in self._action_map:
+            raise TestActionNotFoundError('Action %s not found this class.'
+                                          % action_name)
+
+        method = self._action_map[action_name]
+        ret = getattr(self, method)(*args, **kwargs)
+        return ret
+
+    @timed_action
+    def print_actions(self):
+        """Example action methods.
+
+        All test action method must:
+            1. return a value. False means action failed, any other value means
+               pass.
+            2. should not start with "_". Methods start with "_" is hidden.
+        All test action method may:
+            1. have optional arguments. Mutable argument can be used to pass
+               value
+            2. raise exceptions. Test case class is expected to handle
+               exceptions
+        """
+        num_acts = len(self._action_map)
+        self.logger.i('I can do %d action%s:' %
+                      (num_acts, 's' if num_acts != 1 else ''))
+        for act in self._action_map.keys():
+            self.logger.i(' - %s' % act)
+        return True
+
+    @timed_action
+    def sleep(self, seconds):
+        self.logger.i('%s seconds' % seconds)
+        time.sleep(seconds)
+
+
+if __name__ == '__main__':
+    acts = BaseTestAction()
+    acts.print_actions()
+    acts._perform_action('print actions')
+    print(acts._get_action_names())
diff --git a/acts/framework/acts/controllers/buds_lib/test_actions/bt_utils.py b/acts/framework/acts/controllers/buds_lib/test_actions/bt_utils.py
new file mode 100644
index 0000000..08de786
--- /dev/null
+++ b/acts/framework/acts/controllers/buds_lib/test_actions/bt_utils.py
@@ -0,0 +1,292 @@
+#!/usr/bin/env python3
+#
+#   Copyright 2018 - The Android Open Source Project
+#
+#   Licensed under the Apache License, Version 2.0 (the "License");
+#   you may not use this file except in compliance with the License.
+#   You may obtain a copy of the License at
+#
+#       http://www.apache.org/licenses/LICENSE-2.0
+#
+#   Unless required by applicable law or agreed to in writing, software
+#   distributed under the License is distributed on an "AS IS" BASIS,
+#   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#   See the License for the specific language governing permissions and
+#   limitations under the License.
+
+# TODO: In the future to decide whether to move it to a common directory rather
+# than the one specific to apollo.
+# TODO: The move is contingent on understanding the functions that should be
+# supported by the dut device (sec_device).
+
+"""A generic library with bluetooth related functions. The connection is assumed
+to be between and android phone with any dut (referred to as secondary device)
+device that supports the following calls:
+        sec_device.turn_on_bluetooth()
+        sec_device.is_bt_enabled():
+        sec_device.bluetooth_address
+        sec_device.set_pairing_mode()
+        sec_device.factory_reset()
+
+"""
+import queue
+import time
+
+from acts import tracelogger
+from acts.utils import wait_until
+from acts.utils import TimeoutError
+
+# Add connection profile for future devices in this dictionary
+WEARABLE_BT_PROTOCOLS = {
+    'rio': {
+        'Comp. App': 'FALSE',
+        'HFP (pri.)': 'FALSE',
+        'HFP (sec.)': 'FALSE',
+        'A2DP (pri.)': 'FALSE',
+        'A2DP (sec.)': 'FALSE',
+    },
+    'apollo': {
+        'Comp': 'FALSE',
+        'HFP(pri.)': 'FALSE',
+        'HFP(sec.)': 'FALSE',
+        'A2DP(pri)': 'FALSE',
+        'A2DP(sec)': 'FALSE',
+    }
+}
+
+
+class BTUtilsError(Exception):
+    """Generic BTUtils error"""
+
+
+class BTUtils(object):
+    """A utility that provides access to bluetooth controls.
+
+    This class to be maintained as a generic class such that it is compatible
+    with any devices that pair with a phone.
+    """
+
+    def __init__(self):
+        self.default_timeout = 60
+        self.logger = tracelogger.TakoTraceLogger(file_name=__file__)
+
+    def bt_pair_and_connect(self, pri_device, sec_device):
+        """Pair and connect a pri_device to a sec_device.
+
+        Args:
+        pri_device: an android device with sl4a installed.
+        sec_device: a wearable device.
+
+        Returns:
+        (Tuple)True if pair and connect successful. False Otherwise.
+        Time in ms to execute the flow.
+        """
+
+        pair_result, pair_time = self.bt_pair(pri_device, sec_device)
+        if not pair_result:
+            return pair_result, 0
+        connect_result, connect_time = self.bt_connect(pri_device, sec_device)
+        return connect_result, pair_time + connect_time
+
+    def bt_pair(self, pri_device, sec_device):
+        """Pair a pri_device to a sec_device.
+
+        Args:
+        pri_device: an android device with sl4a installed.
+        sec_device: a wearable device.
+
+        Returns:
+            (Tuple)True if pair successful. False Otherwise.
+            Time in ms to execute the flow.
+         """
+        start_time = end_time = time.time()
+        # Enable BT on the primary device if it's not currently ON.
+        if not pri_device.droid.bluetoothCheckState():
+            pri_device.droid.bluetoothToggleState(True)
+            try:
+                pri_device.ed.pop_event(event_name='BluetoothStateChangedOn',
+                                        timeout=10)
+            except queue.Empty:
+                raise BTUtilsError(
+                    'Failed to toggle Bluetooth on the primary device.')
+        sec_device.turn_on_bluetooth()
+        if not sec_device.is_bt_enabled():
+            self.logger.error(
+                'Could not turn on Bluetooth on secondary devices.')
+            return False, 0
+        target_addr = sec_device.bluetooth_address
+        sec_device.set_pairing_mode()
+
+        pri_device.droid.bluetoothDiscoverAndBond(target_addr)
+        # Loop until we have bonded successfully or timeout.
+        self.logger.info('Verifying devices are bonded')
+        wait_until(lambda: self.android_device_in_paired_state(pri_device,
+                                                               target_addr),
+                   self.default_timeout)
+        end_time = time.time()
+        return True, end_time - start_time
+
+    def bt_connect(self, pri_device, sec_device):
+        """Connect a previously paired sec_device to a pri_device.
+
+        Args:
+          pri_device: an android device with sl4a installed.
+          sec_device: a wearable device.
+
+        Returns:
+          (Tuple)True if connect successful. False otherwise.
+          Time in ms to execute the flow.
+        """
+        start_time = end_time = time.time()
+        target_addr = sec_device.bluetooth_address
+        # First check that devices are bonded.
+        paired = False
+        for paired_device in pri_device.droid.bluetoothGetBondedDevices():
+            if paired_device['address'] == target_addr:
+                paired = True
+                break
+        if not paired:
+            self.logger.error('Not paired to %s', sec_device.device_name)
+            return False, 0
+
+        self.logger.info('Attempting to connect.')
+        pri_device.droid.bluetoothConnectBonded(target_addr)
+
+        self.logger.info('Verifying devices are connected')
+        wait_until(
+            lambda: self.android_device_in_connected_state(pri_device,
+                                                           target_addr),
+            self.default_timeout)
+        end_time = time.time()
+        return True, end_time - start_time
+
+    def android_device_in_paired_state(self, device, mac_address):
+        """Check device in paired list."""
+        bonded_devices = device.droid.bluetoothGetBondedDevices()
+        for d in bonded_devices:
+            if d['address'] == mac_address:
+                self.logger.info('Successfully bonded to device')
+                return True
+        return False
+
+    def android_device_in_connected_state(self, device, mac_address):
+        """Check device in connected list."""
+        connected_devices = device.droid.bluetoothGetConnectedDevices()
+        for d in connected_devices:
+            if d['address'] == mac_address:
+                self.logger.info('Successfully connected to device')
+                return True
+        return False
+
+    def bt_unpair(self, pri_device, sec_device, factory_reset_dut=True):
+        """Unpairs two Android devices using bluetooth.
+
+        Args:
+          pri_device: an android device with sl4a installed.
+          sec_device: a wearable device.
+
+        Returns:
+          (Tuple)True: if the devices successfully unpaired.
+          Time in ms to execute the flow.
+        Raises:
+          Error: When devices fail to unpair.
+        """
+        target_address = sec_device.bluetooth_address
+        if not self.android_device_in_paired_state(pri_device, target_address):
+            self.logger.debug('Already unpaired.')
+            return True, 0
+        self.logger.debug('Unpairing from %s' % target_address)
+        start_time = end_time = time.time()
+        assert (True is pri_device.droid.bluetoothUnbond(target_address),
+                'Failed to request device unpairing.')
+
+        # Check that devices have unpaired successfully.
+        self.logger.debug('Verifying devices are unpaired')
+
+        # Loop until we have unbonded successfully or timeout.
+        wait_until(
+            lambda: self.android_device_in_paired_state(pri_device,
+                                                        target_address),
+            self.default_timeout,
+            condition=False)
+
+        self.logger.info('Successfully unpaired from %s' % target_address)
+        if factory_reset_dut:
+            self.logger.info('Factory reset DUT')
+            sec_device.factory_reset()
+        end_time = time.time()
+        return True, end_time - start_time
+
+    def check_device_bt(self, device, **kwargs):
+        """Check the Bluetooth connection status from device.
+
+        Args:
+          device: a wearable device.
+          **kwargs: additional parameters
+
+        Returns:
+          True: if bt status check success, False otherwise.
+        """
+        if device.dut_type in ['rio', 'apollo']:
+            profiles = kwargs.get('profiles')
+            return self.check_dut_status(device, profiles)
+
+    def check_dut_status(self, device, profiles=None):
+        """Check the Bluetooth connection status from rio/apollo device.
+
+        Args:
+          device: rio/apollo device
+          profiles: A dict of profiles, eg. {'HFP (pri.)': 'TRUE', 'Comp. App':
+            'TRUE', 'A2DP (pri.)': 'TRUE'}
+
+        Returns:
+          True: if bt status check success, False otherwise.
+        """
+        expected = WEARABLE_BT_PROTOCOLS
+        self.logger.info(profiles)
+        for key in profiles:
+            expected[device.dut_type][key] = profiles[key]
+        try:
+            wait_until(lambda: self._compare_profile(device,
+                                                     expected[device.dut_type]),
+                       self.default_timeout)
+        except TimeoutError:
+            status = device.get_bt_status()
+            msg_fmt = self._get_formatted_output(expected[device.dut_type],
+                                                 status)
+            self.logger.error(msg_fmt)
+            return False
+        return True
+
+    def _get_formatted_output(self, expected, actual):
+        """On BT status mismatch generate formatted output string.
+
+        Args:
+          expected: Expected BT status hash.
+          actual: Actual BT status hash from Rio.
+
+        Returns:
+          Formatted mismatch string.
+
+        Raises:
+          Error: When unexpcted parameter encounterd.
+        """
+        msg = ''
+        mismatch_format = '{}: Expected {} Actual {}. '
+        if actual is None:
+            raise BTUtilsError('None is not expected.')
+        for key in expected.keys():
+            if expected[key] != actual[key]:
+                msg += mismatch_format.format(key, expected[key], actual[key])
+        return msg
+
+    def _compare_profile(self, device, expected):
+        """Compare input expected profile with actual."""
+        actual = device.get_bt_status()
+        if actual is None:
+            raise BTUtilsError('None is not expected.')
+        for key in expected.keys():
+            if expected[key] != actual[key]:
+                return False
+        return True
+
diff --git a/acts/framework/acts/keys.py b/acts/framework/acts/keys.py
index 6c7d4ff..fbb1457 100644
--- a/acts/framework/acts/keys.py
+++ b/acts/framework/acts/keys.py
@@ -42,6 +42,7 @@
     # Config names for controllers packaged in ACTS.
     key_android_device = "AndroidDevice"
     key_fuchsia_device = "FuchsiaDevice"
+    key_buds_device = "BudsDevice"
     key_chameleon_device = "ChameleonDevice"
     key_native_android_device = "NativeAndroidDevice"
     key_relay_device = "RelayDevice"
@@ -64,6 +65,7 @@
     m_key_monsoon = "monsoon"
     m_key_android_device = "android_device"
     m_key_fuchsia_device = "fuchsia_device"
+    m_key_buds_device = "buds_controller"
     m_key_chameleon_device = "chameleon_controller"
     m_key_native_android_device = "native_android_device"
     m_key_relay_device = "relay_device_controller"
@@ -84,6 +86,7 @@
     builtin_controller_names = [
         key_android_device,
         key_fuchsia_device,
+        key_buds_device,
         key_native_android_device,
         key_relay_device,
         key_access_point,
diff --git a/acts/framework/acts/libs/utils/__init__.py b/acts/framework/acts/libs/utils/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/acts/framework/acts/libs/utils/__init__.py
diff --git a/acts/framework/acts/libs/utils/timer.py b/acts/framework/acts/libs/utils/timer.py
new file mode 100644
index 0000000..2350aa9
--- /dev/null
+++ b/acts/framework/acts/libs/utils/timer.py
@@ -0,0 +1,132 @@
+"""A simple timer class to keep record of the elapsed time."""
+
+import time
+
+
+class TimeRecorder(object):
+    """Main class to keep time records.
+
+    A timer record contains an ID, a start timestamp, and an optional stop
+    timestamps. The elapsed time calculated as stop - start.
+    If the stop timestamp is not set, current system time will be used.
+
+    Example usage:
+    >>> timer = TimeRecorder()
+    >>> # start a single timer, ID = 'lunch'
+    >>> timer.start_timer('lunch')
+    >>> # start two timers at the same time
+    >>> timer.start_timer(['salad', 'dessert'])
+    >>> # stop a single timer
+    >>> timer.stop_timer('salad')
+    >>> # get elapsed time of all timers
+    >>> timer.elapsed()
+    """
+
+    def __init__(self):
+        self.recorder = dict()
+
+    def start_timer(self, record_ids='Default', force=False):
+        """Start one or more timer.
+
+        Starts one or more timer at current system time with the record ID
+        specified in record_ids. Will overwrite/restart existing timer.
+
+        Args:
+            record_ids: timer record IDs. Can be a string or a list of strings.
+                        If the record ID is a list, will start multiple timers
+                        at the same time.
+            force: Force update the timer's start time if the specified timer
+                   has already started. By default we won't update started timer
+                   again.
+
+        Returns:
+            Number of timer started.
+        """
+        if isinstance(record_ids, str):
+            record_ids = [record_ids]
+        start_time = time.time()
+        for rec in record_ids:
+            if force or rec not in self.recorder:
+                self.recorder[rec] = [start_time, None]
+        return len(record_ids)
+
+    def stop_timer(self, record_ids=None, force=False):
+        """Stop one or more timer.
+
+        Stops one or more timer at current system time.
+
+        Args:
+            record_ids: timer record IDs. Can be a string or a list of strings.
+                        If the record ID is a list, will stop multiple timers at
+                        the same time. By default, it will stop all timers.
+            force: Force update the timer's stop time if the specified timer has
+                   already stopped. By default we won't update stopped timer
+                   again.
+
+        Returns:
+            Number of timer stopped.
+        """
+        # stop all record if id is not provided.
+        if record_ids is None:
+            record_ids = self.recorder.keys()
+        elif isinstance(record_ids, str):
+            record_ids = [record_ids]
+        stop_time = time.time()
+        num_rec = 0
+        for rec in record_ids:
+            if rec in self.recorder:
+                if force or self.recorder[rec][1] is None:
+                    self.recorder[rec][1] = stop_time
+                    num_rec += 1
+        return num_rec
+
+    def elapsed(self, record_ids=None):
+        """Return elapsed time in seconds.
+
+        For records with no stop time, will calculate based on the current
+        system time.
+
+        Args:
+            record_ids: timer record IDs. Can be a string or a list of strings.
+                        If the record ID is a list, will compute the elapsed
+                        time for all specified timers. Default value (None)
+                        calculates elapsed time for all existing timers.
+
+        Returns:
+            The elapsed time. If the record_ids is a string, will return the
+            time in seconds as float type. If the record_ids is a list or
+            default (None), will return a dict of the <record id, elapsed time>.
+        """
+        single_record = False
+        if record_ids is None:
+            record_ids = self.recorder.keys()
+        elif isinstance(record_ids, str):
+            record_ids = [record_ids]
+            single_record = True
+        results = dict()
+        curr_time = time.time()
+        for rec in record_ids:
+            if rec in self.recorder:
+                if self.recorder[rec][1] is not None:
+                    results[rec] = self.recorder[rec][1] - self.recorder[rec][0]
+                else:
+                    results[rec] = curr_time - self.recorder[rec][0]
+        if not results:  # no valid record found
+            return None
+        elif single_record and len(record_ids) == 1:
+            # only 1 record is requested, return results directly
+            return results[record_ids[0]]
+        else:
+            return results  # multiple records, return a dict.
+
+    def clear(self, record_ids=None):
+        """Clear existing time records."""
+        if record_ids is None:
+            self.recorder = dict()
+            return
+
+        if isinstance(record_ids, str):
+            record_ids = [record_ids]
+        for rec in record_ids:
+            if rec in self.recorder:
+                del self.recorder[rec]
diff --git a/acts/framework/acts/logger.py b/acts/framework/acts/logger.py
index eb6bc57..5b757dd 100755
--- a/acts/framework/acts/logger.py
+++ b/acts/framework/acts/logger.py
@@ -21,6 +21,7 @@
 import os
 import re
 import sys
+from copy import copy
 
 from acts import tracelogger
 from acts.utils import create_dir
@@ -34,6 +35,62 @@
 logline_timestamp_re = re.compile("\d\d\d\d-\d\d-\d\d \d\d:\d\d:\d\d.\d\d\d")
 
 
+# yapf: disable
+class Style:
+    RESET  = '\033[0m'
+    BRIGHT = '\033[1m'
+    DIM    = '\033[2m'
+    NORMAL = '\033[22m'
+
+
+class Fore:
+    BLACK   = '\033[30m'
+    RED     = '\033[31m'
+    GREEN   = '\033[32m'
+    YELLOW  = '\033[33m'
+    BLUE    = '\033[34m'
+    MAGENTA = '\033[35m'
+    CYAN    = '\033[36m'
+    WHITE   = '\033[37m'
+    RESET   = '\033[39m'
+
+
+class Back:
+    BLACK   = '\033[40m'
+    RED     = '\033[41m'
+    GREEN   = '\033[42m'
+    YELLOW  = '\033[43m'
+    BLUE    = '\033[44m'
+    MAGENTA = '\033[45m'
+    CYAN    = '\033[46m'
+    WHITE   = '\033[47m'
+    RESET   = '\033[49m'
+
+
+LOG_LEVELS = {
+  'DEBUG':     {'level': 10, 'style': Fore.GREEN + Style.BRIGHT},
+  'CASE':      {'level': 11, 'style': Back.BLUE + Fore.WHITE + Style.BRIGHT},
+  'SUITE':     {'level': 12, 'style': Back.MAGENTA + Fore.WHITE + Style.BRIGHT},
+  'INFO':      {'level': 20, 'style': Style.NORMAL},
+  'STEP':      {'level': 15, 'style': Fore.WHITE + Style.BRIGHT},
+  'WARNING':   {'level': 30, 'style': Fore.YELLOW + Style.BRIGHT},
+  'ERROR':     {'level': 40, 'style': Fore.RED + Style.BRIGHT},
+  'EXCEPTION': {'level': 45, 'style': Back.RED + Fore.WHITE + Style.BRIGHT},
+  'DEVICE':    {'level': 51, 'style': Fore.CYAN + Style.BRIGHT},
+}
+# yapf: enable
+
+
+class ColoredLogFormatter(logging.Formatter):
+    def format(self, record):
+        colored_record = copy(record)
+        level_name = colored_record.levelname
+        style = LOG_LEVELS[level_name]['style']
+        formatted_level_name = '%s%s%s' % (style, level_name, Style.RESET)
+        colored_record.levelname = formatted_level_name
+        return super().format(colored_record)
+
+
 def _parse_logline_timestamp(t):
     """Parses a logline timestamp into a tuple.
 
@@ -151,13 +208,19 @@
     # Log info to stream
     terminal_format = log_line_format
     if prefix:
-        terminal_format = "[{}] {}".format(prefix, log_line_format)
-    c_formatter = logging.Formatter(terminal_format, log_line_time_format)
-    ch = logging.StreamHandler(sys.stdout)
-    ch.setFormatter(c_formatter)
-    ch.setLevel(logging.INFO)
-    # Log everything to file
+        terminal_format = '[{}] {}'.format(prefix, log_line_format)
+    # A formatter for logging everything with timestamps
     f_formatter = logging.Formatter(log_line_format, log_line_time_format)
+    # Same as above, but with color, and any potential prefix
+    c_formatter = ColoredLogFormatter(terminal_format, log_line_time_format)
+
+    ch = logging.StreamHandler(sys.stdout)
+    if os.isatty(sys.stdout.fileno()):
+        ch.setFormatter(c_formatter)
+    else:
+        ch.setFormatter(f_formatter)
+    ch.setLevel(logging.INFO)
+
     # All the logs of this test class go into one directory
     if filename is None:
         filename = get_log_file_timestamp()
@@ -178,6 +241,13 @@
     log.addHandler(fh_error)
     log.log_path = log_path
     logging.log_path = log_path
+    _enable_additional_log_levels()
+
+
+def _enable_additional_log_levels():
+    """Enables logging levels used for tracing tests and debugging devices."""
+    for log_type, log_data in LOG_LEVELS.items():
+        logging.addLevelName(log_data['level'], log_type)
 
 
 def kill_test_logger(logger):
@@ -266,6 +336,8 @@
 
             <TESTBED> <TIME> <LOG_LEVEL> [tag123] logged message
     """
+
     def logging_lambda(msg):
         return '[%s] %s' % (tag, msg)
+
     return create_logger(logging_lambda)
diff --git a/acts/framework/acts/test_utils/tel/tel_test_utils.py b/acts/framework/acts/test_utils/tel/tel_test_utils.py
index 81e2753..bba2167 100644
--- a/acts/framework/acts/test_utils/tel/tel_test_utils.py
+++ b/acts/framework/acts/test_utils/tel/tel_test_utils.py
@@ -2856,7 +2856,8 @@
 def trigger_modem_crash_by_modem(ad, timeout=120):
     begin_time = get_device_epoch_time(ad)
     ad.adb.shell(
-        "setprop persist.vendor.sys.modem.diag.mdlog false", ignore_status=True)
+        "setprop persist.vendor.sys.modem.diag.mdlog false",
+        ignore_status=True)
     # Legacy pixels use persist.sys.modem.diag.mdlog.
     ad.adb.shell(
         "setprop persist.sys.modem.diag.mdlog false", ignore_status=True)
@@ -6731,3 +6732,57 @@
 def get_screen_shot_logs(ads, test_name="", begin_time=None):
     for ad in ads:
         get_screen_shot_log(ad, test_name=test_name, begin_time=begin_time)
+
+
+def bring_up_connectivity_monitor(ad):
+    monitor_apk = None
+    for apk in ("com.google.telephonymonitor",
+                "com.google.android.connectivitymonitor"):
+        if ad.is_apk_installed(apk):
+            ad.log.info("apk %s is installed", apk)
+            monitor_apk = apk
+            break
+    if not monitor_apk:
+        ad.log.info("ConnectivityMonitor|TelephonyMonitor is not installed")
+        return False
+    toggle_connectivity_monitor_setting(ad, True)
+
+    if not ad.is_apk_running(monitor_apk):
+        ad.log.info("%s is not running", monitor_apk)
+        # Reboot
+        ad.log.info("reboot to bring up %s", monitor_apk)
+        reboot_device(ad)
+        for i in range(30):
+            if ad.is_apk_running(monitor_apk):
+                ad.log.info("%s is running after reboot", monitor_apk)
+                return True
+            else:
+                ad.log.info(
+                    "%s is not running after reboot. Wait and check again",
+                    monitor_apk)
+                time.sleep(30)
+        ad.log.error("%s is not running after reboot", monitor_apk)
+        return False
+    else:
+        ad.log.info("%s is running", monitor_apk)
+        return True
+
+
+def toggle_connectivity_monitor_setting(ad, state=True):
+    monitor_setting = ad.adb.getprop("persist.radio.enable_tel_mon")
+    ad.log.info("radio.enable_tel_mon setting is %s", monitor_setting)
+    current_state = True if monitor_setting == "user_enabled" else False
+    if current_state == state:
+        return True
+    elif state is None:
+        state = not current_state
+    expected_monitor_setting = "user_enabled" if state else "disabled"
+    cmd = "setprop persist.radio.enable_tel_mon %s" % expected_monitor_setting
+    ad.log.info("Toggle connectivity monitor by %s", cmd)
+    ad.adb.shell(
+        "am start -n com.android.settings/.DevelopmentSettings",
+        ignore_status=True)
+    ad.adb.shell(cmd)
+    monitor_setting = ad.adb.getprop("persist.radio.enable_tel_mon")
+    ad.log.info("radio.enable_tel_mon setting is %s", monitor_setting)
+    return monitor_setting == expected_monitor_setting
diff --git a/acts/framework/acts/test_utils/wifi/aware/AwareBaseTest.py b/acts/framework/acts/test_utils/wifi/aware/AwareBaseTest.py
index 9ca84d3..da60c40 100644
--- a/acts/framework/acts/test_utils/wifi/aware/AwareBaseTest.py
+++ b/acts/framework/acts/test_utils/wifi/aware/AwareBaseTest.py
@@ -23,93 +23,94 @@
 
 
 class AwareBaseTest(BaseTestClass):
-  def __init__(self, controllers):
-    super(AwareBaseTest, self).__init__(controllers)
+    def __init__(self, controllers):
+        super(AwareBaseTest, self).__init__(controllers)
 
-  # message ID counter to make sure all uses are unique
-  msg_id = 0
+    # message ID counter to make sure all uses are unique
+    msg_id = 0
 
-  # offset (in seconds) to separate the start-up of multiple devices.
-  # De-synchronizes the start-up time so that they don't start and stop scanning
-  # at the same time - which can lead to very long clustering times.
-  device_startup_offset = 2
+    # offset (in seconds) to separate the start-up of multiple devices.
+    # De-synchronizes the start-up time so that they don't start and stop scanning
+    # at the same time - which can lead to very long clustering times.
+    device_startup_offset = 2
 
-  def setup_test(self):
-    required_params = ("aware_default_power_mode", )
-    self.unpack_userparams(required_params)
+    def setup_test(self):
+        required_params = ("aware_default_power_mode", )
+        self.unpack_userparams(required_params)
 
-    for ad in self.android_devices:
-      asserts.skip_if(
-          not ad.droid.doesDeviceSupportWifiAwareFeature(),
-          "Device under test does not support Wi-Fi Aware - skipping test")
-      wutils.wifi_toggle_state(ad, True)
-      ad.droid.wifiP2pClose()
-      utils.set_location_service(ad, True)
-      aware_avail = ad.droid.wifiIsAwareAvailable()
-      if not aware_avail:
-        self.log.info('Aware not available. Waiting ...')
-        autils.wait_for_event(ad, aconsts.BROADCAST_WIFI_AWARE_AVAILABLE)
-      ad.ed.clear_all_events()
-      ad.aware_capabilities = autils.get_aware_capabilities(ad)
-      self.reset_device_parameters(ad)
-      self.reset_device_statistics(ad)
-      self.set_power_mode_parameters(ad)
-      ad.droid.wifiSetCountryCode(wutils.WifiEnums.CountryCode.US)
-      autils.configure_ndp_allow_any_override(ad, True)
-      # set randomization interval to 0 (disable) to reduce likelihood of
-      # interference in tests
-      autils.configure_mac_random_interval(ad, 0)
+        for ad in self.android_devices:
+            asserts.skip_if(
+                not ad.droid.doesDeviceSupportWifiAwareFeature(),
+                "Device under test does not support Wi-Fi Aware - skipping test"
+            )
+            wutils.wifi_toggle_state(ad, True)
+            ad.droid.wifiP2pClose()
+            utils.set_location_service(ad, True)
+            aware_avail = ad.droid.wifiIsAwareAvailable()
+            if not aware_avail:
+                self.log.info('Aware not available. Waiting ...')
+                autils.wait_for_event(ad,
+                                      aconsts.BROADCAST_WIFI_AWARE_AVAILABLE)
+            ad.ed.clear_all_events()
+            ad.aware_capabilities = autils.get_aware_capabilities(ad)
+            self.reset_device_parameters(ad)
+            self.reset_device_statistics(ad)
+            self.set_power_mode_parameters(ad)
+            ad.droid.wifiSetCountryCode(wutils.WifiEnums.CountryCode.US)
+            autils.configure_ndp_allow_any_override(ad, True)
+            # set randomization interval to 0 (disable) to reduce likelihood of
+            # interference in tests
+            autils.configure_mac_random_interval(ad, 0)
 
-  def teardown_test(self):
-    for ad in self.android_devices:
-      if not ad.droid.doesDeviceSupportWifiAwareFeature():
-        return
-      ad.droid.wifiP2pClose()
-      ad.droid.wifiAwareDestroyAll()
-      self.reset_device_parameters(ad)
-      autils.validate_forbidden_callbacks(ad)
+    def teardown_test(self):
+        for ad in self.android_devices:
+            if not ad.droid.doesDeviceSupportWifiAwareFeature():
+                return
+            ad.droid.wifiP2pClose()
+            ad.droid.wifiAwareDestroyAll()
+            self.reset_device_parameters(ad)
+            autils.validate_forbidden_callbacks(ad)
 
-  def reset_device_parameters(self, ad):
-    """Reset device configurations which may have been set by tests. Should be
+    def reset_device_parameters(self, ad):
+        """Reset device configurations which may have been set by tests. Should be
     done before tests start (in case previous one was killed without tearing
     down) and after they end (to leave device in usable state).
 
     Args:
       ad: device to be reset
     """
-    ad.adb.shell("cmd wifiaware reset")
+        ad.adb.shell("cmd wifiaware reset")
 
-  def reset_device_statistics(self, ad):
-    """Reset device statistics.
+    def reset_device_statistics(self, ad):
+        """Reset device statistics.
 
     Args:
         ad: device to be reset
     """
-    ad.adb.shell("cmd wifiaware native_cb get_cb_count --reset")
+        ad.adb.shell("cmd wifiaware native_cb get_cb_count --reset")
 
-  def set_power_mode_parameters(self, ad):
-    """Set the power configuration DW parameters for the device based on any
+    def set_power_mode_parameters(self, ad):
+        """Set the power configuration DW parameters for the device based on any
     configuration overrides (if provided)"""
-    if self.aware_default_power_mode == "INTERACTIVE":
-      autils.config_settings_high_power(ad)
-    elif self.aware_default_power_mode == "NON_INTERACTIVE":
-      autils.config_settings_low_power(ad)
-    else:
-      asserts.assert_false(
-          "The 'aware_default_power_mode' configuration must be INTERACTIVE or "
-          "NON_INTERACTIVE"
-      )
+        if self.aware_default_power_mode == "INTERACTIVE":
+            autils.config_settings_high_power(ad)
+        elif self.aware_default_power_mode == "NON_INTERACTIVE":
+            autils.config_settings_low_power(ad)
+        else:
+            asserts.assert_false(
+                "The 'aware_default_power_mode' configuration must be INTERACTIVE or "
+                "NON_INTERACTIVE")
 
-  def get_next_msg_id(self):
-    """Increment the message ID and returns the new value. Guarantees that
+    def get_next_msg_id(self):
+        """Increment the message ID and returns the new value. Guarantees that
     each call to the method returns a unique value.
 
     Returns: a new message id value.
     """
-    self.msg_id = self.msg_id + 1
-    return self.msg_id
+        self.msg_id = self.msg_id + 1
+        return self.msg_id
 
-  def on_fail(self, test_name, begin_time):
-    for ad in self.android_devices:
-      ad.take_bug_report(test_name, begin_time)
-      ad.cat_adb_log(test_name, begin_time)
+    def on_fail(self, test_name, begin_time):
+        for ad in self.android_devices:
+            ad.take_bug_report(test_name, begin_time)
+            ad.cat_adb_log(test_name, begin_time)
diff --git a/acts/framework/acts/test_utils/wifi/aware/aware_test_utils.py b/acts/framework/acts/test_utils/wifi/aware/aware_test_utils.py
index ca15db5..f838279 100644
--- a/acts/framework/acts/test_utils/wifi/aware/aware_test_utils.py
+++ b/acts/framework/acts/test_utils/wifi/aware/aware_test_utils.py
@@ -39,11 +39,11 @@
 
 
 def decorate_event(event_name, id):
-  return '%s_%d' % (event_name, id)
+    return '%s_%d' % (event_name, id)
 
 
 def wait_for_event(ad, event_name, timeout=EVENT_TIMEOUT):
-  """Wait for the specified event or timeout.
+    """Wait for the specified event or timeout.
 
   Args:
     ad: The android device
@@ -52,19 +52,21 @@
   Returns:
     The event (if available)
   """
-  prefix = ''
-  if hasattr(ad, 'pretty_name'):
-    prefix = '[%s] ' % ad.pretty_name
-  try:
-    event = ad.ed.pop_event(event_name, timeout)
-    ad.log.info('%s%s: %s', prefix, event_name, event['data'])
-    return event
-  except queue.Empty:
-    ad.log.info('%sTimed out while waiting for %s', prefix, event_name)
-    asserts.fail(event_name)
+    prefix = ''
+    if hasattr(ad, 'pretty_name'):
+        prefix = '[%s] ' % ad.pretty_name
+    try:
+        event = ad.ed.pop_event(event_name, timeout)
+        ad.log.info('%s%s: %s', prefix, event_name, event['data'])
+        return event
+    except queue.Empty:
+        ad.log.info('%sTimed out while waiting for %s', prefix, event_name)
+        asserts.fail(event_name)
 
-def wait_for_event_with_keys(ad, event_name, timeout=EVENT_TIMEOUT, *keyvalues):
-  """Wait for the specified event contain the key/value pairs or timeout
+
+def wait_for_event_with_keys(ad, event_name, timeout=EVENT_TIMEOUT,
+                             *keyvalues):
+    """Wait for the specified event contain the key/value pairs or timeout
 
   Args:
     ad: The android device
@@ -74,47 +76,51 @@
   Returns:
     The event (if available)
   """
-  def filter_callbacks(event, keyvalues):
-    for keyvalue in keyvalues:
-      key, value = keyvalue
-      if event['data'][key] != value:
-        return False
-    return True
 
-  prefix = ''
-  if hasattr(ad, 'pretty_name'):
-    prefix = '[%s] ' % ad.pretty_name
-  try:
-    event = ad.ed.wait_for_event(event_name, filter_callbacks, timeout,
-                                 keyvalues)
-    ad.log.info('%s%s: %s', prefix, event_name, event['data'])
-    return event
-  except queue.Empty:
-    ad.log.info('%sTimed out while waiting for %s (%s)', prefix, event_name,
-                keyvalues)
-    asserts.fail(event_name)
+    def filter_callbacks(event, keyvalues):
+        for keyvalue in keyvalues:
+            key, value = keyvalue
+            if event['data'][key] != value:
+                return False
+        return True
+
+    prefix = ''
+    if hasattr(ad, 'pretty_name'):
+        prefix = '[%s] ' % ad.pretty_name
+    try:
+        event = ad.ed.wait_for_event(event_name, filter_callbacks, timeout,
+                                     keyvalues)
+        ad.log.info('%s%s: %s', prefix, event_name, event['data'])
+        return event
+    except queue.Empty:
+        ad.log.info('%sTimed out while waiting for %s (%s)', prefix,
+                    event_name, keyvalues)
+        asserts.fail(event_name)
+
 
 def fail_on_event(ad, event_name, timeout=EVENT_TIMEOUT):
-  """Wait for a timeout period and looks for the specified event - fails if it
+    """Wait for a timeout period and looks for the specified event - fails if it
   is observed.
 
   Args:
     ad: The android device
     event_name: The event to wait for (and fail on its appearance)
   """
-  prefix = ''
-  if hasattr(ad, 'pretty_name'):
-    prefix = '[%s] ' % ad.pretty_name
-  try:
-    event = ad.ed.pop_event(event_name, timeout)
-    ad.log.info('%sReceived unwanted %s: %s', prefix, event_name, event['data'])
-    asserts.fail(event_name, extras=event)
-  except queue.Empty:
-    ad.log.info('%s%s not seen (as expected)', prefix, event_name)
-    return
+    prefix = ''
+    if hasattr(ad, 'pretty_name'):
+        prefix = '[%s] ' % ad.pretty_name
+    try:
+        event = ad.ed.pop_event(event_name, timeout)
+        ad.log.info('%sReceived unwanted %s: %s', prefix, event_name,
+                    event['data'])
+        asserts.fail(event_name, extras=event)
+    except queue.Empty:
+        ad.log.info('%s%s not seen (as expected)', prefix, event_name)
+        return
+
 
 def fail_on_event_with_keys(ad, event_name, timeout=EVENT_TIMEOUT, *keyvalues):
-  """Wait for a timeout period and looks for the specified event which contains
+    """Wait for a timeout period and looks for the specified event which contains
   the key/value pairs - fails if it is observed.
 
   Args:
@@ -123,47 +129,50 @@
     timeout: Number of seconds to wait
     keyvalues: (kay, value) pairs
   """
-  def filter_callbacks(event, keyvalues):
-    for keyvalue in keyvalues:
-      key, value = keyvalue
-      if event['data'][key] != value:
-        return False
-    return True
 
-  prefix = ''
-  if hasattr(ad, 'pretty_name'):
-    prefix = '[%s] ' % ad.pretty_name
-  try:
-    event = ad.ed.wait_for_event(event_name, filter_callbacks, timeout,
-                                 keyvalues)
-    ad.log.info('%sReceived unwanted %s: %s', prefix, event_name, event['data'])
-    asserts.fail(event_name, extras=event)
-  except queue.Empty:
-    ad.log.info('%s%s (%s) not seen (as expected)', prefix, event_name,
-                keyvalues)
-    return
+    def filter_callbacks(event, keyvalues):
+        for keyvalue in keyvalues:
+            key, value = keyvalue
+            if event['data'][key] != value:
+                return False
+        return True
+
+    prefix = ''
+    if hasattr(ad, 'pretty_name'):
+        prefix = '[%s] ' % ad.pretty_name
+    try:
+        event = ad.ed.wait_for_event(event_name, filter_callbacks, timeout,
+                                     keyvalues)
+        ad.log.info('%sReceived unwanted %s: %s', prefix, event_name,
+                    event['data'])
+        asserts.fail(event_name, extras=event)
+    except queue.Empty:
+        ad.log.info('%s%s (%s) not seen (as expected)', prefix, event_name,
+                    keyvalues)
+        return
+
 
 def verify_no_more_events(ad, timeout=EVENT_TIMEOUT):
-  """Verify that there are no more events in the queue.
+    """Verify that there are no more events in the queue.
   """
-  prefix = ''
-  if hasattr(ad, 'pretty_name'):
-    prefix = '[%s] ' % ad.pretty_name
-  should_fail = False
-  try:
-    while True:
-      event = ad.ed.pop_events('.*', timeout, freq=0)
-      ad.log.info('%sQueue contains %s', prefix, event)
-      should_fail = True
-  except queue.Empty:
-    if should_fail:
-      asserts.fail('%sEvent queue not empty' % prefix)
-    ad.log.info('%sNo events in the queue (as expected)', prefix)
-    return
+    prefix = ''
+    if hasattr(ad, 'pretty_name'):
+        prefix = '[%s] ' % ad.pretty_name
+    should_fail = False
+    try:
+        while True:
+            event = ad.ed.pop_events('.*', timeout, freq=0)
+            ad.log.info('%sQueue contains %s', prefix, event)
+            should_fail = True
+    except queue.Empty:
+        if should_fail:
+            asserts.fail('%sEvent queue not empty' % prefix)
+        ad.log.info('%sNo events in the queue (as expected)', prefix)
+        return
 
 
 def encode_list(list_of_objects):
-  """Converts the list of strings or bytearrays to a list of b64 encoded
+    """Converts the list of strings or bytearrays to a list of b64 encoded
   bytearrays.
 
   A None object is treated as a zero-length bytearray.
@@ -172,32 +181,33 @@
     list_of_objects: A list of strings or bytearray objects
   Returns: A list of the same objects, converted to bytes and b64 encoded.
   """
-  encoded_list = []
-  for obj in list_of_objects:
-    if obj is None:
-      obj = bytes()
-    if isinstance(obj, str):
-      encoded_list.append(base64.b64encode(bytes(obj, 'utf-8')).decode('utf-8'))
-    else:
-      encoded_list.append(base64.b64encode(obj).decode('utf-8'))
-  return encoded_list
+    encoded_list = []
+    for obj in list_of_objects:
+        if obj is None:
+            obj = bytes()
+        if isinstance(obj, str):
+            encoded_list.append(
+                base64.b64encode(bytes(obj, 'utf-8')).decode('utf-8'))
+        else:
+            encoded_list.append(base64.b64encode(obj).decode('utf-8'))
+    return encoded_list
 
 
 def decode_list(list_of_b64_strings):
-  """Converts the list of b64 encoded strings to a list of bytearray.
+    """Converts the list of b64 encoded strings to a list of bytearray.
 
   Args:
     list_of_b64_strings: list of strings, each of which is b64 encoded array
   Returns: a list of bytearrays.
   """
-  decoded_list = []
-  for str in list_of_b64_strings:
-    decoded_list.append(base64.b64decode(str))
-  return decoded_list
+    decoded_list = []
+    for str in list_of_b64_strings:
+        decoded_list.append(base64.b64decode(str))
+    return decoded_list
 
 
 def construct_max_match_filter(max_size):
-  """Constructs a maximum size match filter that fits into the 'max_size' bytes.
+    """Constructs a maximum size match filter that fits into the 'max_size' bytes.
 
   Match filters are a set of LVs (Length, Value pairs) where L is 1 byte. The
   maximum size match filter will contain max_size/2 LVs with all Vs (except
@@ -207,19 +217,19 @@
     max_size: Maximum size of the match filter.
   Returns: an array of bytearrays.
   """
-  mf_list = []
-  num_lvs = max_size // 2
-  for i in range(num_lvs - 1):
-    mf_list.append(bytes([i]))
-  if (max_size % 2 == 0):
-    mf_list.append(bytes([255]))
-  else:
-    mf_list.append(bytes([254, 255]))
-  return mf_list
+    mf_list = []
+    num_lvs = max_size // 2
+    for i in range(num_lvs - 1):
+        mf_list.append(bytes([i]))
+    if (max_size % 2 == 0):
+        mf_list.append(bytes([255]))
+    else:
+        mf_list.append(bytes([254, 255]))
+    return mf_list
 
 
 def assert_equal_strings(first, second, msg=None, extras=None):
-  """Assert equality of the string operands - where None is treated as equal to
+    """Assert equality of the string operands - where None is treated as equal to
   an empty string (''), otherwise fail the test.
 
   Error message is "first != second" by default. Additional explanation can
@@ -231,35 +241,37 @@
       extras: An optional field for extra information to be included in
               test result.
   """
-  if first == None:
-    first = ''
-  if second == None:
-    second = ''
-  asserts.assert_equal(first, second, msg, extras)
+    if first == None:
+        first = ''
+    if second == None:
+        second = ''
+    asserts.assert_equal(first, second, msg, extras)
 
 
 def get_aware_capabilities(ad):
-  """Get the Wi-Fi Aware capabilities from the specified device. The
+    """Get the Wi-Fi Aware capabilities from the specified device. The
   capabilities are a dictionary keyed by aware_const.CAP_* keys.
 
   Args:
     ad: the Android device
   Returns: the capability dictionary.
   """
-  return json.loads(ad.adb.shell('cmd wifiaware state_mgr get_capabilities'))
+    return json.loads(ad.adb.shell('cmd wifiaware state_mgr get_capabilities'))
+
 
 def get_wifi_mac_address(ad):
-  """Get the Wi-Fi interface MAC address as a upper-case string of hex digits
+    """Get the Wi-Fi interface MAC address as a upper-case string of hex digits
   without any separators (e.g. ':').
 
   Args:
     ad: Device on which to run.
   """
-  return ad.droid.wifiGetConnectionInfo()['mac_address'].upper().replace(
-      ':', '')
+    return ad.droid.wifiGetConnectionInfo()['mac_address'].upper().replace(
+        ':', '')
+
 
 def validate_forbidden_callbacks(ad, limited_cb=None):
-  """Validate that the specified callbacks have not been called more then permitted.
+    """Validate that the specified callbacks have not been called more then permitted.
 
   In addition to the input configuration also validates that forbidden callbacks
   have never been called.
@@ -269,26 +281,27 @@
     limited_cb: Dictionary of CB_EV_* ids and maximum permitted calls (0
                 meaning never).
   """
-  cb_data = json.loads(ad.adb.shell('cmd wifiaware native_cb get_cb_count'))
+    cb_data = json.loads(ad.adb.shell('cmd wifiaware native_cb get_cb_count'))
 
-  if limited_cb is None:
-    limited_cb = {}
-  # add callbacks which should never be called
-  limited_cb[aconsts.CB_EV_MATCH_EXPIRED] = 0
+    if limited_cb is None:
+        limited_cb = {}
+    # add callbacks which should never be called
+    limited_cb[aconsts.CB_EV_MATCH_EXPIRED] = 0
 
-  fail = False
-  for cb_event in limited_cb.keys():
-    if cb_event in cb_data:
-      if cb_data[cb_event] > limited_cb[cb_event]:
-        fail = True
-        ad.log.info(
-            'Callback %s observed %d times: more then permitted %d times',
-            cb_event, cb_data[cb_event], limited_cb[cb_event])
+    fail = False
+    for cb_event in limited_cb.keys():
+        if cb_event in cb_data:
+            if cb_data[cb_event] > limited_cb[cb_event]:
+                fail = True
+                ad.log.info(
+                    'Callback %s observed %d times: more then permitted %d times',
+                    cb_event, cb_data[cb_event], limited_cb[cb_event])
 
-  asserts.assert_false(fail, 'Forbidden callbacks observed', extras=cb_data)
+    asserts.assert_false(fail, 'Forbidden callbacks observed', extras=cb_data)
+
 
 def extract_stats(ad, data, results, key_prefix, log_prefix):
-  """Extract statistics from the data, store in the results dictionary, and
+    """Extract statistics from the data, store in the results dictionary, and
   output to the info log.
 
   Args:
@@ -301,39 +314,41 @@
     include_data: If True includes the raw data in the dictionary,
                   otherwise just the stats.
   """
-  num_samples = len(data)
-  results['%snum_samples' % key_prefix] = num_samples
+    num_samples = len(data)
+    results['%snum_samples' % key_prefix] = num_samples
 
-  if not data:
-    return
+    if not data:
+        return
 
-  data_min = min(data)
-  data_max = max(data)
-  data_mean = statistics.mean(data)
-  data_cdf = extract_cdf(data)
-  data_cdf_decile = extract_cdf_decile(data_cdf)
+    data_min = min(data)
+    data_max = max(data)
+    data_mean = statistics.mean(data)
+    data_cdf = extract_cdf(data)
+    data_cdf_decile = extract_cdf_decile(data_cdf)
 
-  results['%smin' % key_prefix] = data_min
-  results['%smax' % key_prefix] = data_max
-  results['%smean' % key_prefix] = data_mean
-  results['%scdf' % key_prefix] = data_cdf
-  results['%scdf_decile' % key_prefix] = data_cdf_decile
-  results['%sraw_data' % key_prefix] = data
+    results['%smin' % key_prefix] = data_min
+    results['%smax' % key_prefix] = data_max
+    results['%smean' % key_prefix] = data_mean
+    results['%scdf' % key_prefix] = data_cdf
+    results['%scdf_decile' % key_prefix] = data_cdf_decile
+    results['%sraw_data' % key_prefix] = data
 
-  if num_samples > 1:
-    data_stdev = statistics.stdev(data)
-    results['%sstdev' % key_prefix] = data_stdev
-    ad.log.info(
-      '%s: num_samples=%d, min=%.2f, max=%.2f, mean=%.2f, stdev=%.2f, cdf_decile=%s',
-      log_prefix, num_samples, data_min, data_max, data_mean, data_stdev,
-      data_cdf_decile)
-  else:
-    ad.log.info(
-      '%s: num_samples=%d, min=%.2f, max=%.2f, mean=%.2f, cdf_decile=%s',
-      log_prefix, num_samples, data_min, data_max, data_mean, data_cdf_decile)
+    if num_samples > 1:
+        data_stdev = statistics.stdev(data)
+        results['%sstdev' % key_prefix] = data_stdev
+        ad.log.info(
+            '%s: num_samples=%d, min=%.2f, max=%.2f, mean=%.2f, stdev=%.2f, cdf_decile=%s',
+            log_prefix, num_samples, data_min, data_max, data_mean, data_stdev,
+            data_cdf_decile)
+    else:
+        ad.log.info(
+            '%s: num_samples=%d, min=%.2f, max=%.2f, mean=%.2f, cdf_decile=%s',
+            log_prefix, num_samples, data_min, data_max, data_mean,
+            data_cdf_decile)
+
 
 def extract_cdf_decile(cdf):
-  """Extracts the 10%, 20%, ..., 90% points from the CDF and returns their
+    """Extracts the 10%, 20%, ..., 90% points from the CDF and returns their
   value (a list of 9 values).
 
   Since CDF may not (will not) have exact x% value picks the value >= x%.
@@ -341,66 +356,68 @@
   Args:
     cdf: a list of 2 lists, the X and Y of the CDF.
   """
-  decades = []
-  next_decade = 10
-  for x, y in zip(cdf[0], cdf[1]):
-    while 100*y >= next_decade:
-      decades.append(x)
-      next_decade = next_decade + 10
-    if next_decade == 100:
-      break
-  return decades
+    decades = []
+    next_decade = 10
+    for x, y in zip(cdf[0], cdf[1]):
+        while 100 * y >= next_decade:
+            decades.append(x)
+            next_decade = next_decade + 10
+        if next_decade == 100:
+            break
+    return decades
+
 
 def extract_cdf(data):
-  """Calculates the Cumulative Distribution Function (CDF) of the data.
+    """Calculates the Cumulative Distribution Function (CDF) of the data.
 
   Args:
       data: A list containing data (does not have to be sorted).
 
   Returns: a list of 2 lists: the X and Y axis of the CDF.
   """
-  x = []
-  cdf = []
-  if not data:
+    x = []
+    cdf = []
+    if not data:
+        return (x, cdf)
+
+    all_values = sorted(data)
+    for val in all_values:
+        if not x:
+            x.append(val)
+            cdf.append(1)
+        else:
+            if x[-1] == val:
+                cdf[-1] += 1
+            else:
+                x.append(val)
+                cdf.append(cdf[-1] + 1)
+
+    scale = 1.0 / len(all_values)
+    for i in range(len(cdf)):
+        cdf[i] = cdf[i] * scale
+
     return (x, cdf)
 
-  all_values = sorted(data)
-  for val in all_values:
-    if not x:
-      x.append(val)
-      cdf.append(1)
-    else:
-      if x[-1] == val:
-        cdf[-1] += 1
-      else:
-        x.append(val)
-        cdf.append(cdf[-1] + 1)
-
-  scale = 1.0 / len(all_values)
-  for i in range(len(cdf)):
-    cdf[i] = cdf[i] * scale
-
-  return (x, cdf)
-
 
 def get_mac_addr(device, interface):
-  """Get the MAC address of the specified interface. Uses ifconfig and parses
+    """Get the MAC address of the specified interface. Uses ifconfig and parses
   its output. Normalizes string to remove ':' and upper case.
 
   Args:
     device: Device on which to query the interface MAC address.
     interface: Name of the interface for which to obtain the MAC address.
   """
-  out = device.adb.shell("ifconfig %s" % interface)
-  res = re.match(".* HWaddr (\S+).*", out , re.S)
-  asserts.assert_true(
-      res,
-      'Unable to obtain MAC address for interface %s' % interface,
-      extras=out)
-  return res.group(1).upper().replace(':', '')
+    out = device.adb.shell("ifconfig %s" % interface)
+    res = re.match(".* HWaddr (\S+).*", out, re.S)
+    asserts.assert_true(
+        res,
+        'Unable to obtain MAC address for interface %s' % interface,
+        extras=out)
+    return res.group(1).upper().replace(':', '')
+
 
 def get_ipv6_addr(device, interface):
-  """Get the IPv6 address of the specified interface. Uses ifconfig and parses
+    """Get the IPv6 address of the specified interface. Uses ifconfig and parses
   its output. Returns a None if the interface does not have an IPv6 address
   (indicating it is not UP).
 
@@ -408,29 +425,32 @@
     device: Device on which to query the interface IPv6 address.
     interface: Name of the interface for which to obtain the IPv6 address.
   """
-  out = device.adb.shell("ifconfig %s" % interface)
-  res = re.match(".*inet6 addr: (\S+)/.*", out , re.S)
-  if not res:
-    return None
-  return res.group(1)
+    out = device.adb.shell("ifconfig %s" % interface)
+    res = re.match(".*inet6 addr: (\S+)/.*", out, re.S)
+    if not res:
+        return None
+    return res.group(1)
+
 
 #########################################################
 # Aware primitives
 #########################################################
 
+
 def request_network(dut, ns):
-  """Request a Wi-Fi Aware network.
+    """Request a Wi-Fi Aware network.
 
   Args:
     dut: Device
     ns: Network specifier
   Returns: the request key
   """
-  network_req = {"TransportType": 5, "NetworkSpecifier": ns}
-  return dut.droid.connectivityRequestWifiAwareNetwork(network_req)
+    network_req = {"TransportType": 5, "NetworkSpecifier": ns}
+    return dut.droid.connectivityRequestWifiAwareNetwork(network_req)
+
 
 def get_network_specifier(dut, id, dev_type, peer_mac, sec):
-  """Create a network specifier for the device based on the security
+    """Create a network specifier for the device based on the security
   configuration.
 
   Args:
@@ -440,17 +460,18 @@
     peer_mac: the discovery MAC address of the peer
     sec: security configuration
   """
-  if sec is None:
+    if sec is None:
+        return dut.droid.wifiAwareCreateNetworkSpecifierOob(
+            id, dev_type, peer_mac)
+    if isinstance(sec, str):
+        return dut.droid.wifiAwareCreateNetworkSpecifierOob(
+            id, dev_type, peer_mac, sec)
     return dut.droid.wifiAwareCreateNetworkSpecifierOob(
-        id, dev_type, peer_mac)
-  if isinstance(sec, str):
-    return dut.droid.wifiAwareCreateNetworkSpecifierOob(
-        id, dev_type, peer_mac, sec)
-  return dut.droid.wifiAwareCreateNetworkSpecifierOob(
-      id, dev_type, peer_mac, None, sec)
+        id, dev_type, peer_mac, None, sec)
+
 
 def configure_power_setting(device, mode, name, value):
-  """Use the command-line API to configure the power setting
+    """Use the command-line API to configure the power setting
 
   Args:
     device: Device on which to perform configuration
@@ -458,11 +479,12 @@
     name: One of the power settings from 'wifiaware set-power'.
     value: An integer.
   """
-  device.adb.shell(
-    "cmd wifiaware native_api set-power %s %s %d" % (mode, name, value))
+    device.adb.shell(
+        "cmd wifiaware native_api set-power %s %s %d" % (mode, name, value))
+
 
 def configure_mac_random_interval(device, interval_sec):
-  """Use the command-line API to configure the MAC address randomization
+    """Use the command-line API to configure the MAC address randomization
   interval.
 
   Args:
@@ -470,11 +492,12 @@
     interval_sec: The MAC randomization interval in seconds. A value of 0
                   disables all randomization.
   """
-  device.adb.shell(
-    "cmd wifiaware native_api set mac_random_interval_sec %d" % interval_sec)
+    device.adb.shell("cmd wifiaware native_api set mac_random_interval_sec %d"
+                     % interval_sec)
+
 
 def configure_ndp_allow_any_override(device, override_api_check):
-  """Use the command-line API to configure whether an NDP Responder may be
+    """Use the command-line API to configure whether an NDP Responder may be
   configured to accept an NDP request from ANY peer.
 
   By default the target API level of the requesting app determines whether such
@@ -486,63 +509,69 @@
     override_api_check: True to allow a Responder to ANY configuration, False to
                         perform the API level check.
   """
-  device.adb.shell("cmd wifiaware state_mgr allow_ndp_any %s" % (
-    "true" if override_api_check else "false"))
+    device.adb.shell("cmd wifiaware state_mgr allow_ndp_any %s" %
+                     ("true" if override_api_check else "false"))
+
 
 def config_settings_high_power(device):
-  """Configure device's power settings values to high power mode -
+    """Configure device's power settings values to high power mode -
   whether device is in interactive or non-interactive modes"""
-  configure_power_setting(device, "default", "dw_24ghz",
-                          aconsts.POWER_DW_24_INTERACTIVE)
-  configure_power_setting(device, "default", "dw_5ghz",
-                          aconsts.POWER_DW_5_INTERACTIVE)
-  configure_power_setting(device, "default", "disc_beacon_interval_ms",
-                          aconsts.POWER_DISC_BEACON_INTERVAL_INTERACTIVE)
-  configure_power_setting(device, "default", "num_ss_in_discovery",
-                          aconsts.POWER_NUM_SS_IN_DISC_INTERACTIVE)
-  configure_power_setting(device, "default", "enable_dw_early_term",
-                          aconsts.POWER_ENABLE_DW_EARLY_TERM_INTERACTIVE)
+    configure_power_setting(device, "default", "dw_24ghz",
+                            aconsts.POWER_DW_24_INTERACTIVE)
+    configure_power_setting(device, "default", "dw_5ghz",
+                            aconsts.POWER_DW_5_INTERACTIVE)
+    configure_power_setting(device, "default", "disc_beacon_interval_ms",
+                            aconsts.POWER_DISC_BEACON_INTERVAL_INTERACTIVE)
+    configure_power_setting(device, "default", "num_ss_in_discovery",
+                            aconsts.POWER_NUM_SS_IN_DISC_INTERACTIVE)
+    configure_power_setting(device, "default", "enable_dw_early_term",
+                            aconsts.POWER_ENABLE_DW_EARLY_TERM_INTERACTIVE)
 
-  configure_power_setting(device, "inactive", "dw_24ghz",
-                          aconsts.POWER_DW_24_INTERACTIVE)
-  configure_power_setting(device, "inactive", "dw_5ghz",
-                          aconsts.POWER_DW_5_INTERACTIVE)
-  configure_power_setting(device, "inactive", "disc_beacon_interval_ms",
-                          aconsts.POWER_DISC_BEACON_INTERVAL_INTERACTIVE)
-  configure_power_setting(device, "inactive", "num_ss_in_discovery",
-                          aconsts.POWER_NUM_SS_IN_DISC_INTERACTIVE)
-  configure_power_setting(device, "inactive", "enable_dw_early_term",
-                          aconsts.POWER_ENABLE_DW_EARLY_TERM_INTERACTIVE)
+    configure_power_setting(device, "inactive", "dw_24ghz",
+                            aconsts.POWER_DW_24_INTERACTIVE)
+    configure_power_setting(device, "inactive", "dw_5ghz",
+                            aconsts.POWER_DW_5_INTERACTIVE)
+    configure_power_setting(device, "inactive", "disc_beacon_interval_ms",
+                            aconsts.POWER_DISC_BEACON_INTERVAL_INTERACTIVE)
+    configure_power_setting(device, "inactive", "num_ss_in_discovery",
+                            aconsts.POWER_NUM_SS_IN_DISC_INTERACTIVE)
+    configure_power_setting(device, "inactive", "enable_dw_early_term",
+                            aconsts.POWER_ENABLE_DW_EARLY_TERM_INTERACTIVE)
+
 
 def config_settings_low_power(device):
-  """Configure device's power settings values to low power mode - whether
+    """Configure device's power settings values to low power mode - whether
   device is in interactive or non-interactive modes"""
-  configure_power_setting(device, "default", "dw_24ghz",
-                          aconsts.POWER_DW_24_NON_INTERACTIVE)
-  configure_power_setting(device, "default", "dw_5ghz",
-                          aconsts.POWER_DW_5_NON_INTERACTIVE)
-  configure_power_setting(device, "default", "disc_beacon_interval_ms",
-                          aconsts.POWER_DISC_BEACON_INTERVAL_NON_INTERACTIVE)
-  configure_power_setting(device, "default", "num_ss_in_discovery",
-                          aconsts.POWER_NUM_SS_IN_DISC_NON_INTERACTIVE)
-  configure_power_setting(device, "default", "enable_dw_early_term",
-                          aconsts.POWER_ENABLE_DW_EARLY_TERM_NON_INTERACTIVE)
+    configure_power_setting(device, "default", "dw_24ghz",
+                            aconsts.POWER_DW_24_NON_INTERACTIVE)
+    configure_power_setting(device, "default", "dw_5ghz",
+                            aconsts.POWER_DW_5_NON_INTERACTIVE)
+    configure_power_setting(device, "default", "disc_beacon_interval_ms",
+                            aconsts.POWER_DISC_BEACON_INTERVAL_NON_INTERACTIVE)
+    configure_power_setting(device, "default", "num_ss_in_discovery",
+                            aconsts.POWER_NUM_SS_IN_DISC_NON_INTERACTIVE)
+    configure_power_setting(device, "default", "enable_dw_early_term",
+                            aconsts.POWER_ENABLE_DW_EARLY_TERM_NON_INTERACTIVE)
 
-  configure_power_setting(device, "inactive", "dw_24ghz",
-                          aconsts.POWER_DW_24_NON_INTERACTIVE)
-  configure_power_setting(device, "inactive", "dw_5ghz",
-                          aconsts.POWER_DW_5_NON_INTERACTIVE)
-  configure_power_setting(device, "inactive", "disc_beacon_interval_ms",
-                          aconsts.POWER_DISC_BEACON_INTERVAL_NON_INTERACTIVE)
-  configure_power_setting(device, "inactive", "num_ss_in_discovery",
-                          aconsts.POWER_NUM_SS_IN_DISC_NON_INTERACTIVE)
-  configure_power_setting(device, "inactive", "enable_dw_early_term",
-                          aconsts.POWER_ENABLE_DW_EARLY_TERM_NON_INTERACTIVE)
+    configure_power_setting(device, "inactive", "dw_24ghz",
+                            aconsts.POWER_DW_24_NON_INTERACTIVE)
+    configure_power_setting(device, "inactive", "dw_5ghz",
+                            aconsts.POWER_DW_5_NON_INTERACTIVE)
+    configure_power_setting(device, "inactive", "disc_beacon_interval_ms",
+                            aconsts.POWER_DISC_BEACON_INTERVAL_NON_INTERACTIVE)
+    configure_power_setting(device, "inactive", "num_ss_in_discovery",
+                            aconsts.POWER_NUM_SS_IN_DISC_NON_INTERACTIVE)
+    configure_power_setting(device, "inactive", "enable_dw_early_term",
+                            aconsts.POWER_ENABLE_DW_EARLY_TERM_NON_INTERACTIVE)
 
 
-def config_power_settings(device, dw_24ghz, dw_5ghz, disc_beacon_interval=None,
-    num_ss_in_disc=None, enable_dw_early_term=None):
-  """Configure device's discovery window (DW) values to the specified values -
+def config_power_settings(device,
+                          dw_24ghz,
+                          dw_5ghz,
+                          disc_beacon_interval=None,
+                          num_ss_in_disc=None,
+                          enable_dw_early_term=None):
+    """Configure device's discovery window (DW) values to the specified values -
   whether the device is in interactive or non-interactive mode.
 
   Args:
@@ -555,37 +584,38 @@
     enable_dw_early_term: If True then enable early termination of the DW. If
                           None then not set.
   """
-  configure_power_setting(device, "default", "dw_24ghz", dw_24ghz)
-  configure_power_setting(device, "default", "dw_5ghz", dw_5ghz)
-  configure_power_setting(device, "inactive", "dw_24ghz", dw_24ghz)
-  configure_power_setting(device, "inactive", "dw_5ghz", dw_5ghz)
+    configure_power_setting(device, "default", "dw_24ghz", dw_24ghz)
+    configure_power_setting(device, "default", "dw_5ghz", dw_5ghz)
+    configure_power_setting(device, "inactive", "dw_24ghz", dw_24ghz)
+    configure_power_setting(device, "inactive", "dw_5ghz", dw_5ghz)
 
-  if disc_beacon_interval is not None:
-    configure_power_setting(device, "default", "disc_beacon_interval_ms",
-                            disc_beacon_interval)
-    configure_power_setting(device, "inactive", "disc_beacon_interval_ms",
-                            disc_beacon_interval)
+    if disc_beacon_interval is not None:
+        configure_power_setting(device, "default", "disc_beacon_interval_ms",
+                                disc_beacon_interval)
+        configure_power_setting(device, "inactive", "disc_beacon_interval_ms",
+                                disc_beacon_interval)
 
-  if num_ss_in_disc is not None:
-    configure_power_setting(device, "default", "num_ss_in_discovery",
-                            num_ss_in_disc)
-    configure_power_setting(device, "inactive", "num_ss_in_discovery",
-                            num_ss_in_disc)
+    if num_ss_in_disc is not None:
+        configure_power_setting(device, "default", "num_ss_in_discovery",
+                                num_ss_in_disc)
+        configure_power_setting(device, "inactive", "num_ss_in_discovery",
+                                num_ss_in_disc)
 
-  if enable_dw_early_term is not None:
-    configure_power_setting(device, "default", "enable_dw_early_term",
-                            enable_dw_early_term)
-    configure_power_setting(device, "inactive", "enable_dw_early_term",
-                            enable_dw_early_term)
+    if enable_dw_early_term is not None:
+        configure_power_setting(device, "default", "enable_dw_early_term",
+                                enable_dw_early_term)
+        configure_power_setting(device, "inactive", "enable_dw_early_term",
+                                enable_dw_early_term)
+
 
 def create_discovery_config(service_name,
-                          d_type,
-                          ssi=None,
-                          match_filter=None,
-                          match_filter_list=None,
-                          ttl=0,
-                          term_cb_enable=True):
-  """Create a publish discovery configuration based on input parameters.
+                            d_type,
+                            ssi=None,
+                            match_filter=None,
+                            match_filter_list=None,
+                            ttl=0,
+                            term_cb_enable=True):
+    """Create a publish discovery configuration based on input parameters.
 
   Args:
     service_name: Service name - required
@@ -599,21 +629,22 @@
   Returns:
     publish discovery configuration object.
   """
-  config = {}
-  config[aconsts.DISCOVERY_KEY_SERVICE_NAME] = service_name
-  config[aconsts.DISCOVERY_KEY_DISCOVERY_TYPE] = d_type
-  if ssi is not None:
-    config[aconsts.DISCOVERY_KEY_SSI] = ssi
-  if match_filter is not None:
-    config[aconsts.DISCOVERY_KEY_MATCH_FILTER] = match_filter
-  if match_filter_list is not None:
-    config[aconsts.DISCOVERY_KEY_MATCH_FILTER_LIST] = match_filter_list
-  config[aconsts.DISCOVERY_KEY_TTL] = ttl
-  config[aconsts.DISCOVERY_KEY_TERM_CB_ENABLED] = term_cb_enable
-  return config
+    config = {}
+    config[aconsts.DISCOVERY_KEY_SERVICE_NAME] = service_name
+    config[aconsts.DISCOVERY_KEY_DISCOVERY_TYPE] = d_type
+    if ssi is not None:
+        config[aconsts.DISCOVERY_KEY_SSI] = ssi
+    if match_filter is not None:
+        config[aconsts.DISCOVERY_KEY_MATCH_FILTER] = match_filter
+    if match_filter_list is not None:
+        config[aconsts.DISCOVERY_KEY_MATCH_FILTER_LIST] = match_filter_list
+    config[aconsts.DISCOVERY_KEY_TTL] = ttl
+    config[aconsts.DISCOVERY_KEY_TERM_CB_ENABLED] = term_cb_enable
+    return config
+
 
 def add_ranging_to_pub(p_config, enable_ranging):
-  """Add ranging enabled configuration to a publish configuration (only relevant
+    """Add ranging enabled configuration to a publish configuration (only relevant
   for publish configuration).
 
   Args:
@@ -622,11 +653,12 @@
   Returns:
     The modified publish configuration.
   """
-  p_config[aconsts.DISCOVERY_KEY_RANGING_ENABLED] = enable_ranging
-  return p_config
+    p_config[aconsts.DISCOVERY_KEY_RANGING_ENABLED] = enable_ranging
+    return p_config
+
 
 def add_ranging_to_sub(s_config, min_distance_mm, max_distance_mm):
-  """Add ranging distance configuration to a subscribe configuration (only
+    """Add ranging distance configuration to a subscribe configuration (only
   relevant to a subscribe configuration).
 
   Args:
@@ -636,14 +668,15 @@
   Returns:
     The modified subscribe configuration.
   """
-  if min_distance_mm is not None:
-    s_config[aconsts.DISCOVERY_KEY_MIN_DISTANCE_MM] = min_distance_mm
-  if max_distance_mm is not None:
-    s_config[aconsts.DISCOVERY_KEY_MAX_DISTANCE_MM] = max_distance_mm
-  return s_config
+    if min_distance_mm is not None:
+        s_config[aconsts.DISCOVERY_KEY_MIN_DISTANCE_MM] = min_distance_mm
+    if max_distance_mm is not None:
+        s_config[aconsts.DISCOVERY_KEY_MAX_DISTANCE_MM] = max_distance_mm
+    return s_config
+
 
 def attach_with_identity(dut):
-  """Start an Aware session (attach) and wait for confirmation and identity
+    """Start an Aware session (attach) and wait for confirmation and identity
   information (mac address).
 
   Args:
@@ -652,12 +685,13 @@
     id: Aware session ID.
     mac: Discovery MAC address of this device.
   """
-  id = dut.droid.wifiAwareAttach(True)
-  wait_for_event(dut, aconsts.EVENT_CB_ON_ATTACHED)
-  event = wait_for_event(dut, aconsts.EVENT_CB_ON_IDENTITY_CHANGED)
-  mac = event["data"]["mac"]
+    id = dut.droid.wifiAwareAttach(True)
+    wait_for_event(dut, aconsts.EVENT_CB_ON_ATTACHED)
+    event = wait_for_event(dut, aconsts.EVENT_CB_ON_IDENTITY_CHANGED)
+    mac = event["data"]["mac"]
 
-  return id, mac
+    return id, mac
+
 
 def create_discovery_pair(p_dut,
                           s_dut,
@@ -665,7 +699,7 @@
                           s_config,
                           device_startup_offset,
                           msg_id=None):
-  """Creates a discovery session (publish and subscribe), and waits for
+    """Creates a discovery session (publish and subscribe), and waits for
   service discovery - at that point the sessions are connected and ready for
   further messaging of data-path setup.
 
@@ -688,55 +722,59 @@
     peer_id_on_pub: Peer ID of the Subscriber as seen on the Publisher. Only
                     included if |msg_id| is not None.
   """
-  p_dut.pretty_name = 'Publisher'
-  s_dut.pretty_name = 'Subscriber'
+    p_dut.pretty_name = 'Publisher'
+    s_dut.pretty_name = 'Subscriber'
 
-  # Publisher+Subscriber: attach and wait for confirmation
-  p_id = p_dut.droid.wifiAwareAttach()
-  wait_for_event(p_dut, aconsts.EVENT_CB_ON_ATTACHED)
-  time.sleep(device_startup_offset)
-  s_id = s_dut.droid.wifiAwareAttach()
-  wait_for_event(s_dut, aconsts.EVENT_CB_ON_ATTACHED)
+    # Publisher+Subscriber: attach and wait for confirmation
+    p_id = p_dut.droid.wifiAwareAttach()
+    wait_for_event(p_dut, aconsts.EVENT_CB_ON_ATTACHED)
+    time.sleep(device_startup_offset)
+    s_id = s_dut.droid.wifiAwareAttach()
+    wait_for_event(s_dut, aconsts.EVENT_CB_ON_ATTACHED)
 
-  # Publisher: start publish and wait for confirmation
-  p_disc_id = p_dut.droid.wifiAwarePublish(p_id, p_config)
-  wait_for_event(p_dut, aconsts.SESSION_CB_ON_PUBLISH_STARTED)
+    # Publisher: start publish and wait for confirmation
+    p_disc_id = p_dut.droid.wifiAwarePublish(p_id, p_config)
+    wait_for_event(p_dut, aconsts.SESSION_CB_ON_PUBLISH_STARTED)
 
-  # Subscriber: start subscribe and wait for confirmation
-  s_disc_id = s_dut.droid.wifiAwareSubscribe(s_id, s_config)
-  wait_for_event(s_dut, aconsts.SESSION_CB_ON_SUBSCRIBE_STARTED)
+    # Subscriber: start subscribe and wait for confirmation
+    s_disc_id = s_dut.droid.wifiAwareSubscribe(s_id, s_config)
+    wait_for_event(s_dut, aconsts.SESSION_CB_ON_SUBSCRIBE_STARTED)
 
-  # Subscriber: wait for service discovery
-  discovery_event = wait_for_event(s_dut,
-                                   aconsts.SESSION_CB_ON_SERVICE_DISCOVERED)
-  peer_id_on_sub = discovery_event['data'][aconsts.SESSION_CB_KEY_PEER_ID]
+    # Subscriber: wait for service discovery
+    discovery_event = wait_for_event(s_dut,
+                                     aconsts.SESSION_CB_ON_SERVICE_DISCOVERED)
+    peer_id_on_sub = discovery_event['data'][aconsts.SESSION_CB_KEY_PEER_ID]
 
-  # Optionally send a message from Subscriber to Publisher
-  if msg_id is not None:
-    ping_msg = 'PING'
+    # Optionally send a message from Subscriber to Publisher
+    if msg_id is not None:
+        ping_msg = 'PING'
 
-    # Subscriber: send message to peer (Publisher)
-    s_dut.droid.wifiAwareSendMessage(s_disc_id, peer_id_on_sub, msg_id,
-                                     ping_msg, aconsts.MAX_TX_RETRIES)
-    sub_tx_msg_event = wait_for_event(s_dut, aconsts.SESSION_CB_ON_MESSAGE_SENT)
-    asserts.assert_equal(
-        msg_id, sub_tx_msg_event['data'][aconsts.SESSION_CB_KEY_MESSAGE_ID],
-        'Subscriber -> Publisher message ID corrupted')
+        # Subscriber: send message to peer (Publisher)
+        s_dut.droid.wifiAwareSendMessage(s_disc_id, peer_id_on_sub, msg_id,
+                                         ping_msg, aconsts.MAX_TX_RETRIES)
+        sub_tx_msg_event = wait_for_event(s_dut,
+                                          aconsts.SESSION_CB_ON_MESSAGE_SENT)
+        asserts.assert_equal(
+            msg_id,
+            sub_tx_msg_event['data'][aconsts.SESSION_CB_KEY_MESSAGE_ID],
+            'Subscriber -> Publisher message ID corrupted')
 
-    # Publisher: wait for received message
-    pub_rx_msg_event = wait_for_event(p_dut,
-                                      aconsts.SESSION_CB_ON_MESSAGE_RECEIVED)
-    peer_id_on_pub = pub_rx_msg_event['data'][aconsts.SESSION_CB_KEY_PEER_ID]
-    asserts.assert_equal(
-        ping_msg,
-        pub_rx_msg_event['data'][aconsts.SESSION_CB_KEY_MESSAGE_AS_STRING],
-        'Subscriber -> Publisher message corrupted')
-    return p_id, s_id, p_disc_id, s_disc_id, peer_id_on_sub, peer_id_on_pub
+        # Publisher: wait for received message
+        pub_rx_msg_event = wait_for_event(
+            p_dut, aconsts.SESSION_CB_ON_MESSAGE_RECEIVED)
+        peer_id_on_pub = pub_rx_msg_event['data'][
+            aconsts.SESSION_CB_KEY_PEER_ID]
+        asserts.assert_equal(
+            ping_msg,
+            pub_rx_msg_event['data'][aconsts.SESSION_CB_KEY_MESSAGE_AS_STRING],
+            'Subscriber -> Publisher message corrupted')
+        return p_id, s_id, p_disc_id, s_disc_id, peer_id_on_sub, peer_id_on_pub
 
-  return p_id, s_id, p_disc_id, s_disc_id, peer_id_on_sub
+    return p_id, s_id, p_disc_id, s_disc_id, peer_id_on_sub
+
 
 def create_ib_ndp(p_dut, s_dut, p_config, s_config, device_startup_offset):
-  """Create an NDP (using in-band discovery)
+    """Create an NDP (using in-band discovery)
 
   Args:
     p_dut: Device to use as publisher.
@@ -746,45 +784,48 @@
     device_startup_offset: Number of seconds to offset the enabling of NAN on
                            the two devices.
   """
-  (p_id, s_id, p_disc_id, s_disc_id, peer_id_on_sub,
-   peer_id_on_pub) = create_discovery_pair(
-       p_dut, s_dut, p_config, s_config, device_startup_offset, msg_id=9999)
+    (p_id, s_id, p_disc_id, s_disc_id, peer_id_on_sub,
+     peer_id_on_pub) = create_discovery_pair(
+         p_dut, s_dut, p_config, s_config, device_startup_offset, msg_id=9999)
 
-  # Publisher: request network
-  p_req_key = request_network(p_dut,
-                              p_dut.droid.wifiAwareCreateNetworkSpecifier(
-                                  p_disc_id, peer_id_on_pub, None))
+    # Publisher: request network
+    p_req_key = request_network(
+        p_dut,
+        p_dut.droid.wifiAwareCreateNetworkSpecifier(p_disc_id, peer_id_on_pub,
+                                                    None))
 
-  # Subscriber: request network
-  s_req_key = request_network(s_dut,
-                              s_dut.droid.wifiAwareCreateNetworkSpecifier(
-                                  s_disc_id, peer_id_on_sub, None))
+    # Subscriber: request network
+    s_req_key = request_network(
+        s_dut,
+        s_dut.droid.wifiAwareCreateNetworkSpecifier(s_disc_id, peer_id_on_sub,
+                                                    None))
 
-  # Publisher & Subscriber: wait for network formation
-  p_net_event = wait_for_event_with_keys(
-      p_dut, cconsts.EVENT_NETWORK_CALLBACK, EVENT_TIMEOUT,
-      (cconsts.NETWORK_CB_KEY_EVENT,
-       cconsts.NETWORK_CB_LINK_PROPERTIES_CHANGED), (cconsts.NETWORK_CB_KEY_ID,
-                                                     p_req_key))
-  s_net_event = wait_for_event_with_keys(
-      s_dut, cconsts.EVENT_NETWORK_CALLBACK, EVENT_TIMEOUT,
-      (cconsts.NETWORK_CB_KEY_EVENT,
-       cconsts.NETWORK_CB_LINK_PROPERTIES_CHANGED), (cconsts.NETWORK_CB_KEY_ID,
-                                                     s_req_key))
+    # Publisher & Subscriber: wait for network formation
+    p_net_event = wait_for_event_with_keys(
+        p_dut, cconsts.EVENT_NETWORK_CALLBACK, EVENT_TIMEOUT,
+        (cconsts.NETWORK_CB_KEY_EVENT,
+         cconsts.NETWORK_CB_LINK_PROPERTIES_CHANGED),
+        (cconsts.NETWORK_CB_KEY_ID, p_req_key))
+    s_net_event = wait_for_event_with_keys(
+        s_dut, cconsts.EVENT_NETWORK_CALLBACK, EVENT_TIMEOUT,
+        (cconsts.NETWORK_CB_KEY_EVENT,
+         cconsts.NETWORK_CB_LINK_PROPERTIES_CHANGED),
+        (cconsts.NETWORK_CB_KEY_ID, s_req_key))
 
-  p_aware_if = p_net_event["data"][cconsts.NETWORK_CB_KEY_INTERFACE_NAME]
-  s_aware_if = s_net_event["data"][cconsts.NETWORK_CB_KEY_INTERFACE_NAME]
+    p_aware_if = p_net_event["data"][cconsts.NETWORK_CB_KEY_INTERFACE_NAME]
+    s_aware_if = s_net_event["data"][cconsts.NETWORK_CB_KEY_INTERFACE_NAME]
 
-  p_ipv6 = p_dut.droid.connectivityGetLinkLocalIpv6Address(p_aware_if).split(
-      "%")[0]
-  s_ipv6 = s_dut.droid.connectivityGetLinkLocalIpv6Address(s_aware_if).split(
-      "%")[0]
+    p_ipv6 = p_dut.droid.connectivityGetLinkLocalIpv6Address(p_aware_if).split(
+        "%")[0]
+    s_ipv6 = s_dut.droid.connectivityGetLinkLocalIpv6Address(s_aware_if).split(
+        "%")[0]
 
-  return p_req_key, s_req_key, p_aware_if, s_aware_if, p_ipv6, s_ipv6
+    return p_req_key, s_req_key, p_aware_if, s_aware_if, p_ipv6, s_ipv6
+
 
 def create_oob_ndp_on_sessions(init_dut, resp_dut, init_id, init_mac, resp_id,
                                resp_mac):
-  """Create an NDP on top of existing Aware sessions (using OOB discovery)
+    """Create an NDP on top of existing Aware sessions (using OOB discovery)
 
   Args:
     init_dut: Initiator device
@@ -801,71 +842,74 @@
     init_ipv6: Initiator IPv6 address
     resp_ipv6: Responder IPv6 address
   """
-  # Responder: request network
-  resp_req_key = request_network(
-      resp_dut,
-      resp_dut.droid.wifiAwareCreateNetworkSpecifierOob(
-          resp_id, aconsts.DATA_PATH_RESPONDER, init_mac, None))
+    # Responder: request network
+    resp_req_key = request_network(
+        resp_dut,
+        resp_dut.droid.wifiAwareCreateNetworkSpecifierOob(
+            resp_id, aconsts.DATA_PATH_RESPONDER, init_mac, None))
 
-  # Initiator: request network
-  init_req_key = request_network(
-      init_dut,
-      init_dut.droid.wifiAwareCreateNetworkSpecifierOob(
-          init_id, aconsts.DATA_PATH_INITIATOR, resp_mac, None))
+    # Initiator: request network
+    init_req_key = request_network(
+        init_dut,
+        init_dut.droid.wifiAwareCreateNetworkSpecifierOob(
+            init_id, aconsts.DATA_PATH_INITIATOR, resp_mac, None))
 
-  # Initiator & Responder: wait for network formation
-  init_net_event = wait_for_event_with_keys(
-      init_dut, cconsts.EVENT_NETWORK_CALLBACK, EVENT_TIMEOUT,
-      (cconsts.NETWORK_CB_KEY_EVENT,
-       cconsts.NETWORK_CB_LINK_PROPERTIES_CHANGED), (cconsts.NETWORK_CB_KEY_ID,
-                                                     init_req_key))
-  resp_net_event = wait_for_event_with_keys(
-      resp_dut, cconsts.EVENT_NETWORK_CALLBACK, EVENT_TIMEOUT,
-      (cconsts.NETWORK_CB_KEY_EVENT,
-       cconsts.NETWORK_CB_LINK_PROPERTIES_CHANGED), (cconsts.NETWORK_CB_KEY_ID,
-                                                     resp_req_key))
+    # Initiator & Responder: wait for network formation
+    init_net_event = wait_for_event_with_keys(
+        init_dut, cconsts.EVENT_NETWORK_CALLBACK, EVENT_TIMEOUT,
+        (cconsts.NETWORK_CB_KEY_EVENT,
+         cconsts.NETWORK_CB_LINK_PROPERTIES_CHANGED),
+        (cconsts.NETWORK_CB_KEY_ID, init_req_key))
+    resp_net_event = wait_for_event_with_keys(
+        resp_dut, cconsts.EVENT_NETWORK_CALLBACK, EVENT_TIMEOUT,
+        (cconsts.NETWORK_CB_KEY_EVENT,
+         cconsts.NETWORK_CB_LINK_PROPERTIES_CHANGED),
+        (cconsts.NETWORK_CB_KEY_ID, resp_req_key))
 
-  init_aware_if = init_net_event['data'][cconsts.NETWORK_CB_KEY_INTERFACE_NAME]
-  resp_aware_if = resp_net_event['data'][cconsts.NETWORK_CB_KEY_INTERFACE_NAME]
+    init_aware_if = init_net_event['data'][
+        cconsts.NETWORK_CB_KEY_INTERFACE_NAME]
+    resp_aware_if = resp_net_event['data'][
+        cconsts.NETWORK_CB_KEY_INTERFACE_NAME]
 
-  init_ipv6 = init_dut.droid.connectivityGetLinkLocalIpv6Address(
-      init_aware_if).split('%')[0]
-  resp_ipv6 = resp_dut.droid.connectivityGetLinkLocalIpv6Address(
-      resp_aware_if).split('%')[0]
+    init_ipv6 = init_dut.droid.connectivityGetLinkLocalIpv6Address(
+        init_aware_if).split('%')[0]
+    resp_ipv6 = resp_dut.droid.connectivityGetLinkLocalIpv6Address(
+        resp_aware_if).split('%')[0]
 
-  return (init_req_key, resp_req_key, init_aware_if, resp_aware_if, init_ipv6,
-          resp_ipv6)
+    return (init_req_key, resp_req_key, init_aware_if, resp_aware_if,
+            init_ipv6, resp_ipv6)
+
 
 def create_oob_ndp(init_dut, resp_dut):
-  """Create an NDP (using OOB discovery)
+    """Create an NDP (using OOB discovery)
 
   Args:
     init_dut: Initiator device
     resp_dut: Responder device
   """
-  init_dut.pretty_name = 'Initiator'
-  resp_dut.pretty_name = 'Responder'
+    init_dut.pretty_name = 'Initiator'
+    resp_dut.pretty_name = 'Responder'
 
-  # Initiator+Responder: attach and wait for confirmation & identity
-  init_id = init_dut.droid.wifiAwareAttach(True)
-  wait_for_event(init_dut, aconsts.EVENT_CB_ON_ATTACHED)
-  init_ident_event = wait_for_event(init_dut,
-                                    aconsts.EVENT_CB_ON_IDENTITY_CHANGED)
-  init_mac = init_ident_event['data']['mac']
-  resp_id = resp_dut.droid.wifiAwareAttach(True)
-  wait_for_event(resp_dut, aconsts.EVENT_CB_ON_ATTACHED)
-  resp_ident_event = wait_for_event(resp_dut,
-                                    aconsts.EVENT_CB_ON_IDENTITY_CHANGED)
-  resp_mac = resp_ident_event['data']['mac']
+    # Initiator+Responder: attach and wait for confirmation & identity
+    init_id = init_dut.droid.wifiAwareAttach(True)
+    wait_for_event(init_dut, aconsts.EVENT_CB_ON_ATTACHED)
+    init_ident_event = wait_for_event(init_dut,
+                                      aconsts.EVENT_CB_ON_IDENTITY_CHANGED)
+    init_mac = init_ident_event['data']['mac']
+    resp_id = resp_dut.droid.wifiAwareAttach(True)
+    wait_for_event(resp_dut, aconsts.EVENT_CB_ON_ATTACHED)
+    resp_ident_event = wait_for_event(resp_dut,
+                                      aconsts.EVENT_CB_ON_IDENTITY_CHANGED)
+    resp_mac = resp_ident_event['data']['mac']
 
-  # wait for for devices to synchronize with each other - there are no other
-  # mechanisms to make sure this happens for OOB discovery (except retrying
-  # to execute the data-path request)
-  time.sleep(WAIT_FOR_CLUSTER)
+    # wait for for devices to synchronize with each other - there are no other
+    # mechanisms to make sure this happens for OOB discovery (except retrying
+    # to execute the data-path request)
+    time.sleep(WAIT_FOR_CLUSTER)
 
-  (init_req_key, resp_req_key, init_aware_if,
-   resp_aware_if, init_ipv6, resp_ipv6) = create_oob_ndp_on_sessions(
-       init_dut, resp_dut, init_id, init_mac, resp_id, resp_mac)
+    (init_req_key, resp_req_key, init_aware_if, resp_aware_if,
+     init_ipv6, resp_ipv6) = create_oob_ndp_on_sessions(
+         init_dut, resp_dut, init_id, init_mac, resp_id, resp_mac)
 
-  return (init_req_key, resp_req_key, init_aware_if, resp_aware_if, init_ipv6,
-          resp_ipv6)
+    return (init_req_key, resp_req_key, init_aware_if, resp_aware_if,
+            init_ipv6, resp_ipv6)
diff --git a/acts/framework/acts/test_utils/wifi/rtt/RttBaseTest.py b/acts/framework/acts/test_utils/wifi/rtt/RttBaseTest.py
index 2182780..fb6db20 100644
--- a/acts/framework/acts/test_utils/wifi/rtt/RttBaseTest.py
+++ b/acts/framework/acts/test_utils/wifi/rtt/RttBaseTest.py
@@ -23,48 +23,47 @@
 
 
 class RttBaseTest(BaseTestClass):
+    def __init__(self, controllers):
+        super(RttBaseTest, self).__init__(controllers)
 
-  def __init__(self, controllers):
-    super(RttBaseTest, self).__init__(controllers)
+    def setup_test(self):
+        required_params = ("lci_reference", "lcr_reference",
+                           "rtt_reference_distance_mm",
+                           "stress_test_min_iteration_count",
+                           "stress_test_target_run_time_sec")
+        self.unpack_userparams(required_params)
 
-  def setup_test(self):
-    required_params = ("lci_reference", "lcr_reference",
-                       "rtt_reference_distance_mm",
-                       "stress_test_min_iteration_count",
-                       "stress_test_target_run_time_sec")
-    self.unpack_userparams(required_params)
+        # can be moved to JSON config file
+        self.rtt_reference_distance_margin_mm = 1000
+        self.rtt_max_failure_rate_two_sided_rtt_percentage = 10
+        self.rtt_max_failure_rate_one_sided_rtt_percentage = 50
+        self.rtt_max_margin_exceeded_rate_two_sided_rtt_percentage = 10
+        self.rtt_max_margin_exceeded_rate_one_sided_rtt_percentage = 50
+        self.rtt_min_expected_rssi_dbm = -100
 
-    # can be moved to JSON config file
-    self.rtt_reference_distance_margin_mm = 1000
-    self.rtt_max_failure_rate_two_sided_rtt_percentage = 10
-    self.rtt_max_failure_rate_one_sided_rtt_percentage = 50
-    self.rtt_max_margin_exceeded_rate_two_sided_rtt_percentage = 10
-    self.rtt_max_margin_exceeded_rate_one_sided_rtt_percentage = 50
-    self.rtt_min_expected_rssi_dbm = -100
+        for ad in self.android_devices:
+            utils.set_location_service(ad, True)
+            asserts.skip_if(
+                not ad.droid.doesDeviceSupportWifiRttFeature(),
+                "Device under test does not support Wi-Fi RTT - skipping test")
+            wutils.wifi_toggle_state(ad, True)
+            rtt_avail = ad.droid.wifiIsRttAvailable()
+            if not rtt_avail:
+                self.log.info('RTT not available. Waiting ...')
+                rutils.wait_for_event(ad, rconsts.BROADCAST_WIFI_RTT_AVAILABLE)
+            ad.ed.clear_all_events()
+            rutils.config_privilege_override(ad, False)
+            ad.droid.wifiSetCountryCode(wutils.WifiEnums.CountryCode.US)
 
-    for ad in self.android_devices:
-      utils.set_location_service(ad, True)
-      asserts.skip_if(
-          not ad.droid.doesDeviceSupportWifiRttFeature(),
-          "Device under test does not support Wi-Fi RTT - skipping test")
-      wutils.wifi_toggle_state(ad, True)
-      rtt_avail = ad.droid.wifiIsRttAvailable()
-      if not rtt_avail:
-          self.log.info('RTT not available. Waiting ...')
-          rutils.wait_for_event(ad, rconsts.BROADCAST_WIFI_RTT_AVAILABLE)
-      ad.ed.clear_all_events()
-      rutils.config_privilege_override(ad, False)
-      ad.droid.wifiSetCountryCode(wutils.WifiEnums.CountryCode.US)
+    def teardown_test(self):
+        for ad in self.android_devices:
+            if not ad.droid.doesDeviceSupportWifiRttFeature():
+                return
 
-  def teardown_test(self):
-    for ad in self.android_devices:
-      if not ad.droid.doesDeviceSupportWifiRttFeature():
-        return
+            # clean-up queue from the System Service UID
+            ad.droid.wifiRttCancelRanging([1000])
 
-      # clean-up queue from the System Service UID
-      ad.droid.wifiRttCancelRanging([1000])
-
-  def on_fail(self, test_name, begin_time):
-    for ad in self.android_devices:
-      ad.take_bug_report(test_name, begin_time)
-      ad.cat_adb_log(test_name, begin_time)
+    def on_fail(self, test_name, begin_time):
+        for ad in self.android_devices:
+            ad.take_bug_report(test_name, begin_time)
+            ad.cat_adb_log(test_name, begin_time)
diff --git a/acts/framework/acts/test_utils/wifi/rtt/rtt_test_utils.py b/acts/framework/acts/test_utils/wifi/rtt/rtt_test_utils.py
index c24b406..627bb75 100644
--- a/acts/framework/acts/test_utils/wifi/rtt/rtt_test_utils.py
+++ b/acts/framework/acts/test_utils/wifi/rtt/rtt_test_utils.py
@@ -27,11 +27,11 @@
 
 
 def decorate_event(event_name, id):
-  return '%s_%d' % (event_name, id)
+    return '%s_%d' % (event_name, id)
 
 
 def wait_for_event(ad, event_name, timeout=EVENT_TIMEOUT):
-  """Wait for the specified event or timeout.
+    """Wait for the specified event or timeout.
 
   Args:
     ad: The android device
@@ -40,39 +40,41 @@
   Returns:
     The event (if available)
   """
-  prefix = ''
-  if hasattr(ad, 'pretty_name'):
-    prefix = '[%s] ' % ad.pretty_name
-  try:
-    event = ad.ed.pop_event(event_name, timeout)
-    ad.log.info('%s%s: %s', prefix, event_name, event['data'])
-    return event
-  except queue.Empty:
-    ad.log.info('%sTimed out while waiting for %s', prefix, event_name)
-    asserts.fail(event_name)
+    prefix = ''
+    if hasattr(ad, 'pretty_name'):
+        prefix = '[%s] ' % ad.pretty_name
+    try:
+        event = ad.ed.pop_event(event_name, timeout)
+        ad.log.info('%s%s: %s', prefix, event_name, event['data'])
+        return event
+    except queue.Empty:
+        ad.log.info('%sTimed out while waiting for %s', prefix, event_name)
+        asserts.fail(event_name)
+
 
 def fail_on_event(ad, event_name, timeout=EVENT_TIMEOUT):
-  """Wait for a timeout period and looks for the specified event - fails if it
+    """Wait for a timeout period and looks for the specified event - fails if it
   is observed.
 
   Args:
     ad: The android device
     event_name: The event to wait for (and fail on its appearance)
   """
-  prefix = ''
-  if hasattr(ad, 'pretty_name'):
-    prefix = '[%s] ' % ad.pretty_name
-  try:
-    event = ad.ed.pop_event(event_name, timeout)
-    ad.log.info('%sReceived unwanted %s: %s', prefix, event_name, event['data'])
-    asserts.fail(event_name, extras=event)
-  except queue.Empty:
-    ad.log.info('%s%s not seen (as expected)', prefix, event_name)
-    return
+    prefix = ''
+    if hasattr(ad, 'pretty_name'):
+        prefix = '[%s] ' % ad.pretty_name
+    try:
+        event = ad.ed.pop_event(event_name, timeout)
+        ad.log.info('%sReceived unwanted %s: %s', prefix, event_name,
+                    event['data'])
+        asserts.fail(event_name, extras=event)
+    except queue.Empty:
+        ad.log.info('%s%s not seen (as expected)', prefix, event_name)
+        return
 
 
 def config_privilege_override(dut, override_to_no_privilege):
-  """Configure the device to override the permission check and to disallow any
+    """Configure the device to override the permission check and to disallow any
   privileged RTT operations, e.g. disallow one-sided RTT to Responders (APs)
   which do not support IEEE 802.11mc.
 
@@ -81,12 +83,12 @@
     override_to_no_privilege: True to indicate no privileged ops, False for
                               default (which will allow privileged ops).
   """
-  dut.adb.shell("cmd wifirtt set override_assume_no_privilege %d" % (
-    1 if override_to_no_privilege else 0))
+    dut.adb.shell("cmd wifirtt set override_assume_no_privilege %d" %
+                  (1 if override_to_no_privilege else 0))
 
 
 def get_rtt_constrained_results(scanned_networks, support_rtt):
-  """Filter the input list and only return those networks which either support
+    """Filter the input list and only return those networks which either support
   or do not support RTT (IEEE 802.11mc.)
 
   Args:
@@ -96,34 +98,34 @@
 
   Returns: a sub-set of the scanned_networks per support_rtt constraint.
   """
-  matching_networks = []
-  for network in scanned_networks:
-    if support_rtt:
-      if (rconsts.SCAN_RESULT_KEY_RTT_RESPONDER in network and
-          network[rconsts.SCAN_RESULT_KEY_RTT_RESPONDER]):
-        matching_networks.append(network)
-    else:
-      if (rconsts.SCAN_RESULT_KEY_RTT_RESPONDER not in network or
-            not network[rconsts.SCAN_RESULT_KEY_RTT_RESPONDER]):
-        matching_networks.append(network)
+    matching_networks = []
+    for network in scanned_networks:
+        if support_rtt:
+            if (rconsts.SCAN_RESULT_KEY_RTT_RESPONDER in network
+                    and network[rconsts.SCAN_RESULT_KEY_RTT_RESPONDER]):
+                matching_networks.append(network)
+        else:
+            if (rconsts.SCAN_RESULT_KEY_RTT_RESPONDER not in network
+                    or not network[rconsts.SCAN_RESULT_KEY_RTT_RESPONDER]):
+                matching_networks.append(network)
 
-  return matching_networks
+    return matching_networks
 
 
 def scan_networks(dut):
-  """Perform a scan and return scan results.
+    """Perform a scan and return scan results.
 
   Args:
     dut: Device under test.
 
   Returns: an array of scan results.
   """
-  wutils.start_wifi_connection_scan(dut)
-  return dut.droid.wifiGetScanResults()
+    wutils.start_wifi_connection_scan(dut)
+    return dut.droid.wifiGetScanResults()
 
 
 def scan_with_rtt_support_constraint(dut, support_rtt, repeat=0):
-  """Perform a scan and return scan results of APs: only those that support or
+    """Perform a scan and return scan results of APs: only those that support or
   do not support RTT (IEEE 802.11mc) - per the support_rtt parameter.
 
   Args:
@@ -134,17 +136,17 @@
 
   Returns: an array of scan results.
   """
-  for i in range(repeat + 1):
-    scan_results = scan_networks(dut)
-    aps = get_rtt_constrained_results(scan_results, support_rtt)
-    if len(aps) != 0:
-      return aps
+    for i in range(repeat + 1):
+        scan_results = scan_networks(dut)
+        aps = get_rtt_constrained_results(scan_results, support_rtt)
+        if len(aps) != 0:
+            return aps
 
-  return []
+    return []
 
 
 def select_best_scan_results(scans, select_count, lowest_rssi=-80):
-  """Select the strongest 'select_count' scans in the input list based on
+    """Select the strongest 'select_count' scans in the input list based on
   highest RSSI. Exclude all very weak signals, even if results in a shorter
   list.
 
@@ -155,23 +157,24 @@
   Returns: a list of the strongest 'select_count' scan results from the scans
            list.
   """
-  def takeRssi(element):
-    return element['level']
 
-  result = []
-  scans.sort(key=takeRssi, reverse=True)
-  for scan in scans:
-    if len(result) == select_count:
-      break
-    if scan['level'] < lowest_rssi:
-      break # rest are lower since we're sorted
-    result.append(scan)
+    def takeRssi(element):
+        return element['level']
 
-  return result
+    result = []
+    scans.sort(key=takeRssi, reverse=True)
+    for scan in scans:
+        if len(result) == select_count:
+            break
+        if scan['level'] < lowest_rssi:
+            break  # rest are lower since we're sorted
+        result.append(scan)
+
+    return result
 
 
 def validate_ap_result(scan_result, range_result):
-  """Validate the range results:
+    """Validate the range results:
   - Successful if AP (per scan result) support 802.11mc (allowed to fail
     otherwise)
   - MAC of result matches the BSSID
@@ -180,17 +183,20 @@
     scan_result: Scan result for the AP
     range_result: Range result returned by the RTT API
   """
-  asserts.assert_equal(scan_result[wutils.WifiEnums.BSSID_KEY], range_result[
-    rconsts.EVENT_CB_RANGING_KEY_MAC_AS_STRING_BSSID], 'MAC/BSSID mismatch')
-  if (rconsts.SCAN_RESULT_KEY_RTT_RESPONDER in scan_result and
-      scan_result[rconsts.SCAN_RESULT_KEY_RTT_RESPONDER]):
-    asserts.assert_true(range_result[rconsts.EVENT_CB_RANGING_KEY_STATUS] ==
-                        rconsts.EVENT_CB_RANGING_STATUS_SUCCESS,
-                        'Ranging failed for an AP which supports 802.11mc!')
+    asserts.assert_equal(
+        scan_result[wutils.WifiEnums.BSSID_KEY],
+        range_result[rconsts.EVENT_CB_RANGING_KEY_MAC_AS_STRING_BSSID],
+        'MAC/BSSID mismatch')
+    if (rconsts.SCAN_RESULT_KEY_RTT_RESPONDER in scan_result
+            and scan_result[rconsts.SCAN_RESULT_KEY_RTT_RESPONDER]):
+        asserts.assert_true(
+            range_result[rconsts.EVENT_CB_RANGING_KEY_STATUS] ==
+            rconsts.EVENT_CB_RANGING_STATUS_SUCCESS,
+            'Ranging failed for an AP which supports 802.11mc!')
 
 
 def validate_ap_results(scan_results, range_results):
-  """Validate an array of ranging results against the scan results used to
+    """Validate an array of ranging results against the scan results used to
   trigger the range. The assumption is that the results are returned in the
   same order as the request (which were the scan results).
 
@@ -198,22 +204,21 @@
     scan_results: Scans results used to trigger the range request
     range_results: Range results returned by the RTT API
   """
-  asserts.assert_equal(
-      len(scan_results),
-      len(range_results),
-      'Mismatch in length of scan results and range results')
+    asserts.assert_equal(
+        len(scan_results), len(range_results),
+        'Mismatch in length of scan results and range results')
 
-  # sort first based on BSSID/MAC
-  scan_results.sort(key=lambda x: x[wutils.WifiEnums.BSSID_KEY])
-  range_results.sort(
-      key=lambda x: x[rconsts.EVENT_CB_RANGING_KEY_MAC_AS_STRING_BSSID])
+    # sort first based on BSSID/MAC
+    scan_results.sort(key=lambda x: x[wutils.WifiEnums.BSSID_KEY])
+    range_results.sort(
+        key=lambda x: x[rconsts.EVENT_CB_RANGING_KEY_MAC_AS_STRING_BSSID])
 
-  for i in range(len(scan_results)):
-    validate_ap_result(scan_results[i], range_results[i])
+    for i in range(len(scan_results)):
+        validate_ap_result(scan_results[i], range_results[i])
 
 
 def validate_aware_mac_result(range_result, mac, description):
-  """Validate the range result for an Aware peer specified with a MAC address:
+    """Validate the range result for an Aware peer specified with a MAC address:
   - Correct MAC address.
 
   The MAC addresses may contain ":" (which are ignored for the comparison) and
@@ -224,14 +229,14 @@
     mac: MAC address of the peer
     description: Additional content to print on failure
   """
-  mac1 = mac.replace(':', '').lower()
-  mac2 = range_result[rconsts.EVENT_CB_RANGING_KEY_MAC_AS_STRING].replace(':',
-                                                                  '').lower()
-  asserts.assert_equal(mac1, mac2,
-                       '%s: MAC mismatch' % description)
+    mac1 = mac.replace(':', '').lower()
+    mac2 = range_result[rconsts.EVENT_CB_RANGING_KEY_MAC_AS_STRING].replace(
+        ':', '').lower()
+    asserts.assert_equal(mac1, mac2, '%s: MAC mismatch' % description)
+
 
 def validate_aware_peer_id_result(range_result, peer_id, description):
-  """Validate the range result for An Aware peer specified with a Peer ID:
+    """Validate the range result for An Aware peer specified with a Peer ID:
   - Correct Peer ID
   - MAC address information not available
 
@@ -240,16 +245,21 @@
     peer_id: Peer ID of the peer
     description: Additional content to print on failure
   """
-  asserts.assert_equal(peer_id,
-                       range_result[rconsts.EVENT_CB_RANGING_KEY_PEER_ID],
-                       '%s: Peer Id mismatch' % description)
-  asserts.assert_false(rconsts.EVENT_CB_RANGING_KEY_MAC in range_result,
-                       '%s: MAC Address not empty!' % description)
+    asserts.assert_equal(peer_id,
+                         range_result[rconsts.EVENT_CB_RANGING_KEY_PEER_ID],
+                         '%s: Peer Id mismatch' % description)
+    asserts.assert_false(rconsts.EVENT_CB_RANGING_KEY_MAC in range_result,
+                         '%s: MAC Address not empty!' % description)
 
 
-def extract_stats(results, range_reference_mm, range_margin_mm, min_rssi,
-    reference_lci=[], reference_lcr=[], summary_only=False):
-  """Extract statistics from a list of RTT results. Returns a dictionary
+def extract_stats(results,
+                  range_reference_mm,
+                  range_margin_mm,
+                  min_rssi,
+                  reference_lci=[],
+                  reference_lcr=[],
+                  summary_only=False):
+    """Extract statistics from a list of RTT results. Returns a dictionary
    with results:
      - num_results (success or fails)
      - num_success_results
@@ -290,99 +300,103 @@
 
   Returns: A dictionary of stats.
   """
-  stats = {}
-  stats['num_results'] = 0
-  stats['num_success_results'] = 0
-  stats['num_no_results'] = 0
-  stats['num_failures'] = 0
-  stats['num_range_out_of_margin'] = 0
-  stats['num_invalid_rssi'] = 0
-  stats['any_lci_mismatch'] = False
-  stats['any_lcr_mismatch'] = False
-  stats['invalid_num_attempted'] = False
-  stats['invalid_num_successful'] = False
+    stats = {}
+    stats['num_results'] = 0
+    stats['num_success_results'] = 0
+    stats['num_no_results'] = 0
+    stats['num_failures'] = 0
+    stats['num_range_out_of_margin'] = 0
+    stats['num_invalid_rssi'] = 0
+    stats['any_lci_mismatch'] = False
+    stats['any_lcr_mismatch'] = False
+    stats['invalid_num_attempted'] = False
+    stats['invalid_num_successful'] = False
 
-  range_max_mm = range_reference_mm + range_margin_mm
-  range_min_mm = range_reference_mm - range_margin_mm
+    range_max_mm = range_reference_mm + range_margin_mm
+    range_min_mm = range_reference_mm - range_margin_mm
 
-  distances = []
-  distance_std_devs = []
-  rssis = []
-  num_attempted_measurements = []
-  num_successful_measurements = []
-  status_codes = []
-  lcis = []
-  lcrs = []
+    distances = []
+    distance_std_devs = []
+    rssis = []
+    num_attempted_measurements = []
+    num_successful_measurements = []
+    status_codes = []
+    lcis = []
+    lcrs = []
 
-  for i in range(len(results)):
-    result = results[i]
+    for i in range(len(results)):
+        result = results[i]
 
-    if result is None: # None -> timeout waiting for RTT result
-      stats['num_no_results'] = stats['num_no_results'] + 1
-      continue
-    stats['num_results'] = stats['num_results'] + 1
+        if result is None:  # None -> timeout waiting for RTT result
+            stats['num_no_results'] = stats['num_no_results'] + 1
+            continue
+        stats['num_results'] = stats['num_results'] + 1
 
-    status_codes.append(result[rconsts.EVENT_CB_RANGING_KEY_STATUS])
-    if status_codes[-1] != rconsts.EVENT_CB_RANGING_STATUS_SUCCESS:
-      stats['num_failures'] = stats['num_failures'] + 1
-      continue
-    stats['num_success_results'] = stats['num_success_results'] + 1
+        status_codes.append(result[rconsts.EVENT_CB_RANGING_KEY_STATUS])
+        if status_codes[-1] != rconsts.EVENT_CB_RANGING_STATUS_SUCCESS:
+            stats['num_failures'] = stats['num_failures'] + 1
+            continue
+        stats['num_success_results'] = stats['num_success_results'] + 1
 
-    distance_mm = result[rconsts.EVENT_CB_RANGING_KEY_DISTANCE_MM]
-    distances.append(distance_mm)
-    if not range_min_mm <= distance_mm <= range_max_mm:
-      stats['num_range_out_of_margin'] = stats['num_range_out_of_margin'] + 1
-    distance_std_devs.append(
-        result[rconsts.EVENT_CB_RANGING_KEY_DISTANCE_STD_DEV_MM])
+        distance_mm = result[rconsts.EVENT_CB_RANGING_KEY_DISTANCE_MM]
+        distances.append(distance_mm)
+        if not range_min_mm <= distance_mm <= range_max_mm:
+            stats[
+                'num_range_out_of_margin'] = stats['num_range_out_of_margin'] + 1
+        distance_std_devs.append(
+            result[rconsts.EVENT_CB_RANGING_KEY_DISTANCE_STD_DEV_MM])
 
-    rssi = result[rconsts.EVENT_CB_RANGING_KEY_RSSI]
-    rssis.append(rssi)
-    if not min_rssi <= rssi <= 0:
-      stats['num_invalid_rssi'] = stats['num_invalid_rssi'] + 1
+        rssi = result[rconsts.EVENT_CB_RANGING_KEY_RSSI]
+        rssis.append(rssi)
+        if not min_rssi <= rssi <= 0:
+            stats['num_invalid_rssi'] = stats['num_invalid_rssi'] + 1
 
-    num_attempted = result[
-      rconsts.EVENT_CB_RANGING_KEY_NUM_ATTEMPTED_MEASUREMENTS]
-    num_attempted_measurements.append(num_attempted)
-    if num_attempted == 0:
-      stats['invalid_num_attempted'] = True
+        num_attempted = result[
+            rconsts.EVENT_CB_RANGING_KEY_NUM_ATTEMPTED_MEASUREMENTS]
+        num_attempted_measurements.append(num_attempted)
+        if num_attempted == 0:
+            stats['invalid_num_attempted'] = True
 
-    num_successful = result[
-      rconsts.EVENT_CB_RANGING_KEY_NUM_SUCCESSFUL_MEASUREMENTS]
-    num_successful_measurements.append(num_successful)
-    if num_successful == 0:
-      stats['invalid_num_successful'] = True
+        num_successful = result[
+            rconsts.EVENT_CB_RANGING_KEY_NUM_SUCCESSFUL_MEASUREMENTS]
+        num_successful_measurements.append(num_successful)
+        if num_successful == 0:
+            stats['invalid_num_successful'] = True
 
-    lcis.append(result[rconsts.EVENT_CB_RANGING_KEY_LCI])
-    if (result[rconsts.EVENT_CB_RANGING_KEY_LCI] != reference_lci):
-      stats['any_lci_mismatch'] = True
-    lcrs.append(result[rconsts.EVENT_CB_RANGING_KEY_LCR])
-    if (result[rconsts.EVENT_CB_RANGING_KEY_LCR] != reference_lcr):
-      stats['any_lcr_mismatch'] = True
+        lcis.append(result[rconsts.EVENT_CB_RANGING_KEY_LCI])
+        if (result[rconsts.EVENT_CB_RANGING_KEY_LCI] != reference_lci):
+            stats['any_lci_mismatch'] = True
+        lcrs.append(result[rconsts.EVENT_CB_RANGING_KEY_LCR])
+        if (result[rconsts.EVENT_CB_RANGING_KEY_LCR] != reference_lcr):
+            stats['any_lcr_mismatch'] = True
 
-  if len(distances) > 0:
-    stats['distance_mean'] = statistics.mean(distances)
-  if len(distances) > 1:
-    stats['distance_std_dev'] = statistics.stdev(distances)
-  if len(rssis) > 0:
-    stats['rssi_mean'] = statistics.mean(rssis)
-  if len(rssis) > 1:
-    stats['rssi_std_dev'] = statistics.stdev(rssis)
-  if not summary_only:
-    stats['distances'] = distances
-    stats['distance_std_devs'] = distance_std_devs
-    stats['rssis'] = rssis
-    stats['num_attempted_measurements'] = num_attempted_measurements
-    stats['num_successful_measurements'] = num_successful_measurements
-    stats['status_codes'] = status_codes
-    stats['lcis'] = lcis
-    stats['lcrs'] = lcrs
+    if len(distances) > 0:
+        stats['distance_mean'] = statistics.mean(distances)
+    if len(distances) > 1:
+        stats['distance_std_dev'] = statistics.stdev(distances)
+    if len(rssis) > 0:
+        stats['rssi_mean'] = statistics.mean(rssis)
+    if len(rssis) > 1:
+        stats['rssi_std_dev'] = statistics.stdev(rssis)
+    if not summary_only:
+        stats['distances'] = distances
+        stats['distance_std_devs'] = distance_std_devs
+        stats['rssis'] = rssis
+        stats['num_attempted_measurements'] = num_attempted_measurements
+        stats['num_successful_measurements'] = num_successful_measurements
+        stats['status_codes'] = status_codes
+        stats['lcis'] = lcis
+        stats['lcrs'] = lcrs
 
-  return stats
+    return stats
 
 
-def run_ranging(dut, aps, iter_count, time_between_iterations,
-    target_run_time_sec=0):
-  """Executing ranging to the set of APs.
+def run_ranging(dut,
+                aps,
+                iter_count,
+                time_between_iterations,
+                target_run_time_sec=0):
+    """Executing ranging to the set of APs.
 
   Will execute a minimum of 'iter_count' iterations. Will continue to run
   until execution time (just) exceeds 'target_run_time_sec'.
@@ -397,54 +411,60 @@
   Returns: a list of the events containing the RTT results (or None for a
   failed measurement).
   """
-  max_peers = dut.droid.wifiRttMaxPeersInRequest()
+    max_peers = dut.droid.wifiRttMaxPeersInRequest()
 
-  asserts.assert_true(len(aps) > 0, "Need at least one AP!")
-  if len(aps) > max_peers:
-    aps = aps[0:max_peers]
+    asserts.assert_true(len(aps) > 0, "Need at least one AP!")
+    if len(aps) > max_peers:
+        aps = aps[0:max_peers]
 
-  events = {} # need to keep track per BSSID!
-  for ap in aps:
-    events[ap["BSSID"]] = []
+    events = {}  # need to keep track per BSSID!
+    for ap in aps:
+        events[ap["BSSID"]] = []
 
-  start_clock = time.time()
-  iterations_done = 0
-  run_time = 0
-  while iterations_done < iter_count or (
-      target_run_time_sec != 0 and run_time < target_run_time_sec):
-    if iterations_done != 0 and time_between_iterations != 0:
-      time.sleep(time_between_iterations)
+    start_clock = time.time()
+    iterations_done = 0
+    run_time = 0
+    while iterations_done < iter_count or (target_run_time_sec != 0
+                                           and run_time < target_run_time_sec):
+        if iterations_done != 0 and time_between_iterations != 0:
+            time.sleep(time_between_iterations)
 
-    id = dut.droid.wifiRttStartRangingToAccessPoints(aps)
-    try:
-      event = dut.ed.pop_event(
-        decorate_event(rconsts.EVENT_CB_RANGING_ON_RESULT, id), EVENT_TIMEOUT)
-      range_results = event["data"][rconsts.EVENT_CB_RANGING_KEY_RESULTS]
-      asserts.assert_equal(
-          len(aps),
-          len(range_results),
-          'Mismatch in length of scan results and range results')
-      for result in range_results:
-        bssid = result[rconsts.EVENT_CB_RANGING_KEY_MAC_AS_STRING]
-        asserts.assert_true(bssid in events,
-                            "Result BSSID %s not in requested AP!?" % bssid)
-        asserts.assert_equal(len(events[bssid]), iterations_done,
-                             "Duplicate results for BSSID %s!?" % bssid)
-        events[bssid].append(result)
-    except queue.Empty:
-      for ap in aps:
-        events[ap["BSSID"]].append(None)
+        id = dut.droid.wifiRttStartRangingToAccessPoints(aps)
+        try:
+            event = dut.ed.pop_event(
+                decorate_event(rconsts.EVENT_CB_RANGING_ON_RESULT, id),
+                EVENT_TIMEOUT)
+            range_results = event["data"][rconsts.EVENT_CB_RANGING_KEY_RESULTS]
+            asserts.assert_equal(
+                len(aps), len(range_results),
+                'Mismatch in length of scan results and range results')
+            for result in range_results:
+                bssid = result[rconsts.EVENT_CB_RANGING_KEY_MAC_AS_STRING]
+                asserts.assert_true(
+                    bssid in events,
+                    "Result BSSID %s not in requested AP!?" % bssid)
+                asserts.assert_equal(
+                    len(events[bssid]), iterations_done,
+                    "Duplicate results for BSSID %s!?" % bssid)
+                events[bssid].append(result)
+        except queue.Empty:
+            for ap in aps:
+                events[ap["BSSID"]].append(None)
 
-    iterations_done = iterations_done + 1
-    run_time = time.time() - start_clock
+        iterations_done = iterations_done + 1
+        run_time = time.time() - start_clock
 
-  return events
+    return events
 
 
-def analyze_results(all_aps_events, rtt_reference_distance_mm,
-    distance_margin_mm, min_expected_rssi, lci_reference, lcr_reference,
-    summary_only=False):
-  """Verifies the results of the RTT experiment.
+def analyze_results(all_aps_events,
+                    rtt_reference_distance_mm,
+                    distance_margin_mm,
+                    min_expected_rssi,
+                    lci_reference,
+                    lcr_reference,
+                    summary_only=False):
+    """Verifies the results of the RTT experiment.
 
   Args:
     all_aps_events: Dictionary of APs, each a list of RTT result events.
@@ -454,10 +474,10 @@
     lci_reference, lcr_reference: Expected LCI/LCR values (arrays of bytes).
     summary_only: Only include summary keys (reduce size).
   """
-  all_stats = {}
-  for bssid, events in all_aps_events.items():
-    stats = extract_stats(events, rtt_reference_distance_mm,
-                          distance_margin_mm, min_expected_rssi,
-                          lci_reference, lcr_reference, summary_only)
-    all_stats[bssid] = stats
-  return all_stats
+    all_stats = {}
+    for bssid, events in all_aps_events.items():
+        stats = extract_stats(events, rtt_reference_distance_mm,
+                              distance_margin_mm, min_expected_rssi,
+                              lci_reference, lcr_reference, summary_only)
+        all_stats[bssid] = stats
+    return all_stats
diff --git a/acts/framework/acts/tracelogger.py b/acts/framework/acts/tracelogger.py
index ef978f9..afdcb3f 100644
--- a/acts/framework/acts/tracelogger.py
+++ b/acts/framework/acts/tracelogger.py
@@ -14,53 +14,145 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
+from colorama import Fore, Back, Style, init
+import datetime
 import inspect
+import logging
 import os
+import xml.etree.cElementTree as et
 
 
-class TraceLogger():
+TYPE = {
+  'INFO': {'level': 10, 'enabled': True, 'style': None},
+  'DEBUG': {'level': 20, 'enabled': True, 'style': Fore.GREEN + Style.BRIGHT},
+  'WARNING': {'level': 30, 'enabled': True, 'style': Fore.YELLOW + Style.BRIGHT},
+  'ERROR': {'level': 40, 'enabled': True, 'style': Fore.RED + Style.BRIGHT},
+  'EXCEPTION': {'level': 0, 'enabled': True, 'style': Back.RED + Fore.WHITE + Style.BRIGHT},
+  'CASE': {'level': 0, 'enabled': True, 'style': Back.BLUE + Fore.WHITE + Style.BRIGHT},
+  'SUITE': {'level': 0, 'enabled': True, 'style': Back.MAGENTA + Fore.WHITE + Style.BRIGHT},
+  'DEVICE': {'level': 50, 'enabled': True, 'style': Fore.CYAN + Style.BRIGHT},
+  'STEP': {'level': 15, 'enabled': True, 'style': Fore.WHITE + Style.BRIGHT}}
+
+
+class TraceLogger(object):
     def __init__(self, logger):
         self._logger = logger
+        self.root = et.Element('logger')
+        self.cat = None
+        self.max_level = 100
+        self.type = TYPE
+        self.d = self.debug
+        self.e = self.error
+        self.i = self.info
+        self.t = self.step
+        self.w = self.warning
+
 
     @staticmethod
-    def _get_trace_info(level=1):
+    def _get_trace_info(level=1, offset=2):
         # We want the stack frame above this and above the error/warning/info
         inspect_stack = inspect.stack()
-        trace_info = ""
+        trace_info = ''
         for i in range(level):
             try:
-                stack_frames = inspect_stack[2 + i]
+                stack_frames = inspect_stack[offset + i]
                 info = inspect.getframeinfo(stack_frames[0])
-                trace_info = "%s[%s:%s:%s]" % (trace_info,
+                trace_info = '%s[%s:%s:%s]' % (trace_info,
                                                os.path.basename(info.filename),
                                                info.function, info.lineno)
             except IndexError:
                 break
         return trace_info
 
+    def _log_with(self, logging_lambda, trace_level, msg, *args, **kwargs):
+        trace_info = TraceLogger._get_trace_info(level=trace_level, offset=3)
+        logging_lambda('%s %s' % (msg, trace_info), *args, **kwargs)
+
+    def _check_verbosity(self, message_type):
+          if self.level:
+            return self.max_level >= self.type[message_type]['level']
+          else:
+            return self.type[message_type]['enabled']
+
+
+    def _xml(self, message_date, message_type, message_text):
+        if self.cat is None:
+          self.cat = et.SubElement(self.root, 'category', name='general', id='gen')
+        message = et.SubElement(self.cat, 'message', name=message_type, date=str(message_date))
+        message.text = str(message_text)
+
+
+    def _print_message(self, message_type, message):
+      if self._check_verbosity(message_type):
+        now = datetime.datetime.now()
+        self._xml(now, message_type, message)
+        style = self.type[message_type]['style']
+        default_format = '{} [{}] '.format(now, message_type)
+        if style:
+          for line in str(message).split('\n'):
+            print('{}{} {}'.format(style, default_format, line))
+        else:
+          for line in str(message).split('\n'):
+            print('{} {}'.format(default_format, line))
+
     def exception(self, msg, *args, **kwargs):
-        trace_info = TraceLogger._get_trace_info(level=5)
-        self._logger.exception("%s %s" % (msg, trace_info), *args, **kwargs)
+        self._log_with(self._logger.exception, 5, msg, *args, **kwargs)
 
     def debug(self, msg, *args, **kwargs):
-        trace_info = TraceLogger._get_trace_info(level=3)
-        self._logger.debug("%s %s" % (msg, trace_info), *args, **kwargs)
+        self._log_with(self._logger.debug, 3, msg, *args, **kwargs)
 
     def error(self, msg, *args, **kwargs):
-        trace_info = TraceLogger._get_trace_info(level=3)
-        self._logger.error("%s %s" % (msg, trace_info), *args, **kwargs)
+        self._log_with(self._logger.error, 3, msg, *args, **kwargs)
 
     def warn(self, msg, *args, **kwargs):
-        trace_info = TraceLogger._get_trace_info(level=1)
-        self._logger.warn("%s %s" % (msg, trace_info), *args, **kwargs)
+        self._log_with(self._logger.warn, 1, msg, *args, **kwargs)
 
     def warning(self, msg, *args, **kwargs):
-        trace_info = TraceLogger._get_trace_info(level=1)
-        self._logger.warning("%s %s" % (msg, trace_info), *args, **kwargs)
+        self._log_with(self._logger.warning, 1, msg, *args, **kwargs)
 
     def info(self, msg, *args, **kwargs):
-        trace_info = TraceLogger._get_trace_info(level=1)
-        self._logger.info("%s %s" % (msg, trace_info), *args, **kwargs)
+        self._log_with(self._logger.info, 1, msg, *args, **kwargs)
+
+    def step(self, message):
+        self._print_message(message_type='STEP', message=message)
 
     def __getattr__(self, name):
         return getattr(self._logger, name)
+
+
+class TakoTraceLogger(TraceLogger):
+    def __init__(self, *args, **kwargs):
+        super().__init__(*args, **kwargs)
+        self.d = self.debug
+        self.e = self.error
+        self.i = self.info
+        self.t = self.step
+        self.w = self.warning
+
+    def _logger_level(self, level_name):
+        level = logging.getLevelName(level_name)
+        return lambda *args, **kwargs: self._logger.log(level, *args, **kwargs)
+
+    def step(self, msg, *args, **kwargs):
+        """Delegate a step call to the underlying logger."""
+        self._log_with(self._logger_level('STEP'), 1, msg, *args, **kwargs)
+
+    def device(self, msg, *args, **kwargs):
+        """Delegate a device call to the underlying logger."""
+        self._log_with(self._logger_level('DEVICE'), 1, msg, *args, **kwargs)
+
+    def suite(self, msg, *args, **kwargs):
+        """Delegate a device call to the underlying logger."""
+        self._log_with(self._logger_level('SUITE'), 1, msg, *args, **kwargs)
+
+    def case(self, msg, *args, **kwargs):
+        """Delegate a case call to the underlying logger."""
+        self._log_with(self._logger_level('CASE'), 1, msg, *args, **kwargs)
+
+    def flush_log(self):
+        """This function exists for compatibility with Tako's logserial module.
+
+        Note that flushing the log is handled automatically by python's logging
+        module.
+        """
+        pass
diff --git a/acts/framework/acts/utils.py b/acts/framework/acts/utils.py
index 2c8bfe8..533d874 100755
--- a/acts/framework/acts/utils.py
+++ b/acts/framework/acts/utils.py
@@ -33,7 +33,6 @@
 from acts.controllers import adb
 from acts.libs.proc import job
 
-
 # File name length is limited to 255 chars on some OS, so we need to make sure
 # the file names we output fits within the limit.
 MAX_FILENAME_LEN = 255
@@ -415,7 +414,7 @@
                              " stdout: %s" % (proc.pid, ret, err, out))
 
 
-def start_standing_subprocess(cmd, check_health_delay=0):
+def start_standing_subprocess(cmd, check_health_delay=0, shell=True):
     """Starts a long-running subprocess.
 
     This is not a blocking call and the subprocess started by it should be
@@ -439,7 +438,7 @@
         cmd,
         stdout=subprocess.PIPE,
         stderr=subprocess.PIPE,
-        shell=True,
+        shell=shell,
         preexec_fn=os.setpgrp)
     logging.debug("Start standing subprocess with cmd: %s", cmd)
     if check_health_delay > 0:
@@ -945,3 +944,88 @@
     if pid:
         runtime = adb.shell('ps -o etime= -p "%s"' % pid)
     return runtime
+
+
+def wait_until(func, timeout_s, condition=True, sleep_s=1.0):
+    """Executes a function repeatedly until condition is met.
+
+    Args:
+      func: The function pointer to execute.
+      timeout_s: Amount of time (in seconds) to wait before raising an
+                 exception.
+      condition: The ending condition of the WaitUntil loop.
+      sleep_s: The amount of time (in seconds) to sleep between each function
+               execution.
+
+    Returns:
+      The time in seconds before detecting a successful condition.
+
+    Raises:
+      TimeoutError: If the condition was never met and timeout is hit.
+    """
+    start_time = time.time()
+    end_time = start_time + timeout_s
+    count = 0
+    while True:
+        count += 1
+        if func() == condition:
+            return time.time() - start_time
+        if time.time() > end_time:
+            break
+        time.sleep(sleep_s)
+    raise TimeoutError('Failed to complete function %s in %d seconds having '
+                       'attempted %d times.' % (str(func), timeout_s, count))
+
+
+# Adapted from
+# https://en.wikibooks.org/wiki/Algorithm_Implementation/Strings/Levenshtein_distance#Python
+# Available under the Creative Commons Attribution-ShareAlike License
+def levenshtein(string1, string2):
+    """Returns the Levenshtein distance of two strings.
+    Uses Dynamic Programming approach, only keeping track of
+    two rows of the DP table at a time.
+
+    Args:
+      string1: String to compare to string2
+      string2: String to compare to string1
+
+    Returns:
+      distance: the Levenshtein distance between string1 and string2
+    """
+
+    if len(string1) < len(string2):
+        return levenshtein(string2, string1)
+
+    if len(string2) == 0:
+        return len(string1)
+
+    previous_row = range(len(string2) + 1)
+    for i, char1 in enumerate(string1):
+        current_row = [i + 1]
+        for j, char2 in enumerate(string2):
+            insertions = previous_row[j + 1] + 1
+            deletions = current_row[j] + 1
+            substitutions = previous_row[j] + (char1 != char2)
+            current_row.append(min(insertions, deletions, substitutions))
+        previous_row = current_row
+
+    return previous_row[-1]
+
+
+def string_similarity(s1, s2):
+    """Returns a similarity measurement based on Levenshtein distance.
+
+    Args:
+      s1: the string to compare to s2
+      s2: the string to compare to s1
+
+    Returns:
+      result: the similarity metric
+    """
+    lev = levenshtein(s1, s2)
+    try:
+        lev_ratio = float(lev) / max(len(s1), len(s2))
+        result = (1.0 - lev_ratio) * 100
+    except ZeroDivisionError:
+        result = 100 if not s2 else 0
+    return float(result)
diff --git a/acts/framework/setup.py b/acts/framework/setup.py
index 422d431..c0c65b8 100755
--- a/acts/framework/setup.py
+++ b/acts/framework/setup.py
@@ -32,11 +32,14 @@
     'pyserial',
     'shellescape>=3.4.1',
     'protobuf',
+    'retry',
     'requests',
     'roman',
     'scapy-python3',
     'pylibftdi',
     'xlsxwriter',
+    # TODO(markdr): b/113719194: Remove this module
+    'colorama'
 ]
 
 if sys.version_info < (3, ):
diff --git a/acts/framework/tests/acts_import_unit_test.py b/acts/framework/tests/acts_import_unit_test.py
index dcb70ef..2442c96 100755
--- a/acts/framework/tests/acts_import_unit_test.py
+++ b/acts/framework/tests/acts_import_unit_test.py
@@ -55,12 +55,20 @@
     'acts/test_utils/wifi/wifi_retail_ap.py',
     'acts/test_utils/bt/bt_power_test_utils.py',
     'acts/test_utils/coex/coex_test_utils.py',
+    'acts/framework/acts/controllers/buds_lib/dev_utils/proto/gen/nanopb_pb2.py',
+    'acts/controllers/buds_lib/data_storage/bigquery/bigquery_logger_utils.py',
+    'acts/controllers/buds_lib/data_storage/bigquery/test_bigquery_utils.py',
+    'acts/controllers/buds_lib/data_storage/bigquery/test_bigquery_logger.py',
+    'acts/controllers/buds_lib/data_storage/bigquery/bigquery_buffer.py',
+    'acts/controllers/buds_lib/data_storage/bigquery/bigquery_logger.py',
+    'acts/controllers/buds_lib/data_storage/bigquery/bigquery_scheduled_automatic_client.py',
     'acts/framework/acts/test_utils/coex/CoexBaseTest.py',
     'acts/framework/acts/test_utils/coex/CoexPerformanceBaseTest.py',
     'acts/framework/acts/test_utils/power/PowerWiFiBaseTest.py',
     'acts/framework/acts/test_utils/power/PowerCoexBaseTest.py',
     'acts/framework/acts/test_utils/power/PowerBaseTest.py',
     'acts/framework/acts/test_utils/power/PowerBTBaseTest.py',
+    'acts/framework/acts/controllers/buds_lib/data_storage/_sponge/sponge_client_lite.py',
 ]
 
 BLACKLIST_DIRECTORIES = [
diff --git a/acts/tests/google/tel/live/TelLiveConnectivityMonitorBaseTest.py b/acts/tests/google/tel/live/TelLiveConnectivityMonitorBaseTest.py
new file mode 100644
index 0000000..57e7519
--- /dev/null
+++ b/acts/tests/google/tel/live/TelLiveConnectivityMonitorBaseTest.py
@@ -0,0 +1,834 @@
+#!/usr/bin/env python3.4
+#
+#   Copyright 2018 - Google
+#
+#   Licensed under the Apache License, Version 2.0 (the "License");
+#   you may not use this file except in compliance with the License.
+#   You may obtain a copy of the License at
+#
+#       http://www.apache.org/licenses/LICENSE-2.0
+#
+#   Unless required by applicable law or agreed to in writing, software
+#   distributed under the License is distributed on an "AS IS" BASIS,
+#   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#   See the License for the specific language governing permissions and
+#   limitations under the License.
+"""
+    Connectivity Monitor and Telephony Troubleshooter Tests
+"""
+
+import os
+import re
+import time
+
+from acts import signals
+from acts import utils
+from acts.test_utils.tel.TelephonyBaseTest import TelephonyBaseTest
+from acts.test_utils.tel.tel_defines import CAPABILITY_VOLTE
+from acts.test_utils.tel.tel_defines import CAPABILITY_VT
+from acts.test_utils.tel.tel_defines import CAPABILITY_WFC
+from acts.test_utils.tel.tel_defines import MAX_WAIT_TIME_FOR_STATE_CHANGE
+from acts.test_utils.tel.tel_defines import WFC_MODE_WIFI_PREFERRED
+from acts.test_utils.tel.tel_defines import VT_STATE_BIDIRECTIONAL
+from acts.test_utils.tel.tel_test_utils import bring_up_connectivity_monitor
+from acts.test_utils.tel.tel_test_utils import call_setup_teardown
+from acts.test_utils.tel.tel_test_utils import ensure_wifi_connected
+from acts.test_utils.tel.tel_test_utils import fastboot_wipe
+from acts.test_utils.tel.tel_test_utils import get_device_epoch_time
+from acts.test_utils.tel.tel_test_utils import get_model_name
+from acts.test_utils.tel.tel_test_utils import get_operator_name
+from acts.test_utils.tel.tel_test_utils import hangup_call
+from acts.test_utils.tel.tel_test_utils import last_call_drop_reason
+from acts.test_utils.tel.tel_test_utils import reboot_device
+from acts.test_utils.tel.tel_test_utils import toggle_airplane_mode
+from acts.test_utils.tel.tel_test_utils import toggle_volte
+from acts.test_utils.tel.tel_test_utils import toggle_wfc
+from acts.test_utils.tel.tel_test_utils import wait_for_wfc_enabled
+from acts.test_utils.tel.tel_test_utils import wifi_toggle_state
+from acts.test_utils.tel.tel_test_utils import trigger_modem_crash
+from acts.test_utils.tel.tel_test_utils import trigger_modem_crash_by_modem
+from acts.test_utils.tel.tel_voice_utils import is_phone_in_call_2g
+from acts.test_utils.tel.tel_voice_utils import is_phone_in_call_3g
+from acts.test_utils.tel.tel_voice_utils import is_phone_in_call_csfb
+from acts.test_utils.tel.tel_voice_utils import is_phone_in_call_iwlan
+from acts.test_utils.tel.tel_voice_utils import is_phone_in_call_volte
+from acts.test_utils.tel.tel_voice_utils import phone_setup_voice_2g
+from acts.test_utils.tel.tel_voice_utils import phone_setup_voice_3g
+from acts.test_utils.tel.tel_voice_utils import phone_setup_csfb
+from acts.test_utils.tel.tel_voice_utils import phone_setup_iwlan
+from acts.test_utils.tel.tel_voice_utils import phone_setup_volte
+from acts.test_utils.tel.tel_video_utils import video_call_setup_teardown
+from acts.test_utils.tel.tel_video_utils import phone_setup_video
+from acts.test_utils.tel.tel_video_utils import \
+    is_phone_in_call_video_bidirectional
+
+CALL_DROP_CODE_MAPPING = {
+    373: "Radio Internal Error",
+    175: "Invalid Transaction Identifier V02",
+    159: "Temporary Failure",
+    135: "Rejected by Network V02",
+    118: "SS Not Available",
+    115: "Call Barred V02",
+    42: "Access Block V02",
+    41: "Incompatible V02"
+}
+
+CONSECUTIVE_CALL_FAILS = 5
+CALL_TROUBLE_THRESHOLD = 25
+TROUBLES = {
+    1: "WIFI_CALL_DROPS_IN_BAD_WIFI_SIGNAL",
+    2: "WIFI_CALL_DROPS_IN_GOOD_WIFI_SIGNAL_ON_SPECIFIC_WIFI_NETWORK",
+    3: "WIFI_CALL_DROPS_WITH_SPECIFIC_REASON_IN_GOOD_WIFI_SIGNAL",
+    4: "WIFI_CALL_DROPS_WITH_RANDOM_FAILURES_IN_GOOD_WIFI_SIGNAL",
+    5: "VOLTE_CALL_DROPS_IN_BAD_LTE_SIGNAL_AREAS",
+    6: "VOLTE_CALL_DROPS_IN_GOOD_LTE_SIGNAL_AREAS",
+    7: "CS_CALL_DROPS_IMS_DISABLED",
+    8: "CS_CALL_DROPS_WFC_DISABLED",
+    9: "CS_CALL_DROPS_IMS_REGISTRATION_FAILURES",
+    10: "CS_CALL_DROPS_DURING_SRVCC",
+    11: "CS_CALL_DROPS_IN_BAD_RF_CONDITIONS",
+    12: "CS_CALL_DROPS_IN_GOOD_RF_CONDITIONS_WITH_SPECIFIC_REASON",
+    13: "UNABLE_TO_TRIAGE"
+}
+
+ACTIONS = {
+    1: "CHECK_BLUETOOTH",
+    2: "CHECK_HEADSET",
+    3: "SWITCH_FROM_WIFI_PREFERRED_TO_CELLULAR_PREFERRED",
+    4: "SWITCH_FROM_CELLULAR_PREFERRED_TO_WIFI_PREFERRED",
+    5: "ENABLE_ADVANCED_4G_CALLING",
+    6: "DISABLE_ADVANCED_4G_CALLING",
+    7: "TOGGLE_AIRPLANE_MODE_TWICE",
+    8: "REBOOT_THE_PHONE",
+    9: "ENABLE_WIFI_CALLING",
+    10: "DISABLE_WIFI_CALLING",
+    11: "DISABLE_AIRPLANE_MODE",
+    12: "NONE"
+}
+
+IGNORED_CALL_DROP_REASONS = ["Radio Link Lost", "Media Timeout"]
+
+CALL_DATA_LOGS = (
+    "/data/data/com.google.android.connectivitymonitor/databases")
+
+IGNORED_CALL_DROP_TRIGGERS = ["toggle_apm", "toggle_wifi"]
+
+
+class TelLiveConnectivityMonitorBaseTest(TelephonyBaseTest):
+    def __init__(self, controllers):
+        TelephonyBaseTest.__init__(self, controllers)
+        self.user_params["enable_connectivity_metrics"] = False
+        self.user_params["telephony_auto_rerun"] = 0
+
+    def setup_class(self):
+        TelephonyBaseTest.setup_class(self)
+        self.dut = self.android_devices[0]
+        self.ad_reference = self.android_devices[1]
+        self.dut_model = get_model_name(self.dut)
+        self.dut_operator = get_operator_name(self.log, self.dut)
+        self.dut_capabilities = self.dut.telephony.get("capabilities", [])
+        self.dut_wfc_modes = self.dut.telephony.get("wfc_modes", [])
+        self.reference_capabilities = self.ad_reference.telephony.get(
+            "capabilities", [])
+        self.dut.log.info("DUT capabilities: %s", self.dut_capabilities)
+        self.skip_reset_between_cases = False
+        self.user_params["telephony_auto_rerun"] = 0
+        self.number_of_devices = 1
+        self.call_drop_override_code = self.user_params.get(
+            "call_drop_override_code", 373)
+
+    def setup_test(self):
+        TelephonyBaseTest.setup_test(self)
+        bring_up_connectivity_monitor(self.dut)
+        ## Work around for WFC not working issue on 2018 devices
+        if "Permissive" not in self.dut.adb.shell("su root getenforce"):
+            self.dut.adb.shell("su root setenforce 0")
+
+    def on_fail(self, test_name, begin_time):
+        self.dut.log.info("Pulling %s", CALL_DATA_LOGS)
+        log_path = os.path.join(self.dut.log_path, test_name,
+                                "ConnectivityMonitorLogs_%s" % self.dut.serial)
+        utils.create_dir(log_path)
+        self.dut.pull_files([CALL_DATA_LOGS], log_path)
+
+        self._take_bug_report(test_name, begin_time)
+
+    def teardown_test(self):
+        self.set_drop_reason_override(override_code=None)
+        TelephonyBaseTest.teardown_test(self)
+
+    def connect_to_wifi(self):
+        if not ensure_wifi_connected(self.log, self.dut,
+                                     self.wifi_network_ssid,
+                                     self.wifi_network_pass):
+            self.dut.log.error("Fail to connected to WiFi")
+            return False
+        else:
+            self.dut.log.info("Connected to WiFi")
+            return True
+
+    def is_wfc_enabled(self):
+        return wait_for_wfc_enabled(self.log, self.dut)
+
+    def enable_volte(self):
+        if CAPABILITY_VOLTE not in self.dut_capabilities:
+            raise signals.TestSkip("VoLTE is not supported, abort test.")
+        toggle_volte(self.log, self.dut, True)
+
+    def enable_wfc(self):
+        if CAPABILITY_WFC not in self.dut_capabilities:
+            raise signals.TestSkip("WFC is not supported, abort test.")
+        toggle_wfc(self.log, self.dut, True)
+
+    def disable_volte(self):
+        if CAPABILITY_VOLTE not in self.dut_capabilities:
+            raise signals.TestSkip("VoLTE is not supported, abort test.")
+        toggle_volte(self.log, self.dut, False)
+
+    def disable_wfc(self):
+        if CAPABILITY_WFC not in self.dut_capabilities:
+            raise signals.TestSkip("WFC is not supported, abort test.")
+        toggle_wfc(self.log, self.dut, False)
+
+    def setup_wfc_non_apm(self):
+        if CAPABILITY_WFC not in self.dut_capabilities and (
+                WFC_MODE_WIFI_PREFERRED not in self.dut_wfc_modes):
+            raise signals.TestSkip(
+                "WFC in non-APM is not supported, abort test.")
+        if not phone_setup_iwlan(
+                self.log, self.dut, False, WFC_MODE_WIFI_PREFERRED,
+                self.wifi_network_ssid, self.wifi_network_pass):
+            self.dut.log.error("Failed to setup WFC.")
+            raise signals.TestFailure("Failed to setup WFC in non-APM")
+        self.dut.log.info("Phone is in WFC enabled state.")
+        return True
+
+    def setup_wfc_apm(self):
+        if CAPABILITY_WFC not in self.dut_capabilities:
+            raise signals.TestSkip("WFC is not supported, abort test.")
+        if not phone_setup_iwlan(self.log, self.dut, True,
+                                 self.dut_wfc_modes[0], self.wifi_network_ssid,
+                                 self.wifi_network_pass):
+            self.dut.log.error("Failed to setup WFC.")
+            raise signals.TestFailure("Failed to setup WFC in APM")
+        self.dut.log.info("Phone is in WFC enabled state.")
+        return True
+
+    def setup_volte(self):
+        if CAPABILITY_VOLTE not in self.dut_capabilities:
+            raise signals.TestSkip("VoLTE is not supported, abort test.")
+        if not phone_setup_volte(self.log, self.dut):
+            self.dut.log.error("Phone failed to enable VoLTE.")
+            raise signals.TestFailure("Failed to enable VoLTE")
+        self.dut.log.info("Phone VOLTE is enabled successfully.")
+        return True
+
+    def setup_csfb(self):
+        if not phone_setup_csfb(self.log, self.dut):
+            self.dut.log.error("Phone failed to setup CSFB.")
+            raise signals.TestFailure("Failed to setup CSFB")
+        self.dut.log.info("Phone CSFB is enabled successfully.")
+        return True
+
+    def setup_3g(self):
+        if not phone_setup_voice_3g(self.log, self.dut):
+            self.dut.log.error("Phone failed to setup 3G.")
+            raise signals.TestFailure("Faile to setup 3G")
+        self.dut.log.info("Phone RAT 3G is enabled successfully.")
+        return True
+
+    def setup_2g(self):
+        if self.dut_operator not in ("tmo", "uk_ee"):
+            raise signals.TestSkip("2G is not supported, abort test.")
+        if not phone_setup_voice_2g(self.log, self.dut):
+            self.dut.log.error("Phone failed to setup 2G.")
+            raise signals.TestFailure("Failed to setup 2G")
+        self.dut.log.info("RAT 2G is enabled successfully.")
+        return True
+
+    def setup_vt(self):
+        if CAPABILITY_VT not in self.dut_capabilities or (
+                CAPABILITY_VT not in self.reference_capabilities):
+            raise signals.TestSkip("VT is not supported, abort test.")
+        for ad in (self.dut, self.ad_reference):
+            if not phone_setup_video(self.log, ad):
+                ad.log.error("Failed to setup VT.")
+                return False
+            return True
+
+    def set_drop_reason_override(self, override_code=None):
+        if not override_code:
+            if self.dut.adb.shell("getprop vendor.radio.call_end_reason"):
+                self.dut.adb.shell("setprop vendor.radio.call_end_reason ''")
+        else:
+            if self.dut.adb.shell("getprop vendor.radio.call_end_reason"
+                                  ) != str(override_code):
+                cmd = "setprop vendor.radio.call_end_reason %s" \
+                      % override_code
+                self.dut.log.info("====== %s ======", cmd)
+                self.dut.adb.shell(cmd)
+
+    def modem_crash(self):
+        # Modem SSR
+        self.user_params["check_crash"] = False
+        self.dut.log.info("Triggering ModemSSR")
+        try:
+            self.dut.droid.logI("======== Trigger modem crash ========")
+        except Exception:
+            pass
+        if (not self.dut.is_apk_installed("com.google.mdstest")
+            ) or self.dut.adb.getprop("ro.build.version.release")[0] in (
+                "8", "O", "7", "N") or self.dut.model in ("angler", "bullhead",
+                                                          "sailfish",
+                                                          "marlin"):
+            trigger_modem_crash(self.dut)
+        else:
+            trigger_modem_crash_by_modem(self.dut)
+
+    def call_drop_by_modem_crash(self,
+                                 call_verification_function=None,
+                                 vt=False):
+        if vt:
+            if not video_call_setup_teardown(
+                    self.log,
+                    self.dut,
+                    self.ad_reference,
+                    None,
+                    video_state=VT_STATE_BIDIRECTIONAL,
+                    verify_caller_func=is_phone_in_call_video_bidirectional,
+                    verify_callee_func=is_phone_in_call_video_bidirectional):
+                self.dut.log.error("VT Call Failed.")
+                return False
+        else:
+            if not call_setup_teardown(
+                    self.log,
+                    self.dut,
+                    self.ad_reference,
+                    ad_hangup=None,
+                    verify_caller_func=call_verification_function,
+                    wait_time_in_call=10):
+                self.log.error("Call setup failed")
+                return False
+
+        # Modem SSR
+        self.modem_crash()
+
+        try:
+            if self.dut.droid.telecomIsInCall():
+                self.dut.log.info("Still in call after trigger modem crash")
+                return False
+            else:
+                reasons = self.dut.search_logcat(
+                    "qcril_qmi_voice_map_qmi_to_ril_last_call_failure_cause")
+                if reasons:
+                    self.dut.log.info(reasons[-1]["log_message"])
+        except Exception as e:
+            self.dut.log.error(e)
+
+    def toggle_apm(self):
+        toggle_airplane_mode(self.log, self.dut, new_state=None)
+
+    def toggle_wifi(self):
+        wifi_toggle_state(self.log, self.dut, None)
+
+    def drop_reason_override(self):
+        hangup_call(self.log, self.ad_reference)
+
+    def clearn_up_bugreport_database(self):
+        self.dut.adb.shell(
+            "rm /data/data/com.google.android.connectivitymonitor/"
+            "shared_prefs/ConnectivityMonitor_BugReport.xml")
+
+    def clearn_up_troubleshooter_database(self):
+        self.dut.adb.shell(
+            "rm /data/data/com.google.android.connectivitymonitor/"
+            "shared_prefs/ConnectivityMonitor_TroubleshooterResult.xml")
+
+    def parsing_bugreport_database(self):
+        output = self.dut.adb.shell(
+            "cat /data/data/com.google.android.connectivitymonitor/"
+            "shared_prefs/ConnectivityMonitor_BugReport.xml")
+        bugreport_database = re.findall(r">Call Drop:\s+(.*)<", output)
+        self.dut.log.info("bugreport_database = %s", bugreport_database)
+        return bugreport_database
+
+    def parsing_troubleshooter_database(self):
+        output = self.dut.adb.shell(
+            "cat /data/data/com.google.android.connectivitymonitor/"
+            "shared_prefs/ConnectivityMonitor_TroubleshooterResult.xml")
+        results = re.findall(r"name=\"(\S+)\">(\S+)<", output)
+        troubleshooter_database = {}
+        for result in results:
+            if "count" in result[0] or "num_calls" in result[0]:
+                troubleshooter_database[result[0]] = int(result[1])
+            else:
+                troubleshooter_database[result[0]] = result[1]
+        self.dut.log.info("TroubleshooterResult=%s",
+                          sorted(troubleshooter_database.items()))
+        return troubleshooter_database
+
+    def parsing_call_summary(self):
+        call_summary = self.dut.adb.shell(
+            "dumpsys activity service com.google.android.connectivitymonitor/"
+            ".ConnectivityMonitorService")
+        self.dut.log.info(call_summary)
+        call_summary_info = {}
+        results = re.findall(
+            r"(\S+): (\d+) out of (\d+) calls dropped, percentage=(\S+)",
+            call_summary)
+        for result in results:
+            call_summary_info[result[0]] = int(result[2])
+            call_summary_info["%s_dropped" % result[0]] = int(result[1])
+            if result[3] == "NaN":
+                call_summary_info["%s_dropped_percentage" % result[0]] = 0
+            else:
+                call_summary_info["%s_dropped_percentage" % result[0]] = float(
+                    result[3])
+        results = re.findall(r"(\S+): predominant failure reason=(.+)",
+                             call_summary)
+        for result in results:
+            call_summary_info["%s_failure_reason" % result[0]] = result[1]
+        self.dut.log.info("call summary dumpsys = %s",
+                          sorted(call_summary_info.items()))
+        return call_summary_info
+
+    def parsing_call_statistics(self):
+        call_statistics_info = {}
+        call_statistics = self.dut.adb.shell(
+            "content query --uri content://com.google.android."
+            "connectivitymonitor.troubleshooterprovider/call_statistics")
+        self.dut.log.info("troubleshooterprovider call_statistics:\n%s",
+                          call_statistics)
+        results = re.findall(r"KEY=(\S+), VALUE=(\S+)", call_statistics)
+        for result in results:
+            if ("count" in result[0] or "num_calls" in result[0]):
+                if result[1] == "NULL":
+                    call_statistics_info[result[0]] = 0
+                else:
+                    call_statistics_info[result[0]] = int(result[1])
+            else:
+                call_statistics_info[result[0]] = result[1]
+        self.dut.log.info("troubleshooterprovider call_statistics: %s",
+                          sorted(call_statistics_info.items()))
+        return call_statistics_info
+
+    def parsing_diagnostics(self):
+        diagnostics_info = {}
+        diagnostics = self.dut.adb.shell(
+            "content query --uri content://com.google.android."
+            "connectivitymonitor.troubleshooterprovider/diagnostics")
+        self.dut.log.info("troubleshooterprovider diagnostics:\n%s",
+                          diagnostics)
+        results = re.findall(r"KEY=(\S+), VALUE=(\S+)", diagnostics)
+        for result in results:
+            diagnostics_info[result[0]] = result[1]
+        self.dut.log.info("troubleshooterprovider diagnostics: %s",
+                          sorted(diagnostics_info.items()))
+        return diagnostics_info
+
+    def call_setup_and_connectivity_monitor_checking(self,
+                                                     setup=None,
+                                                     handover=None,
+                                                     triggers=[],
+                                                     expected_drop_reason="",
+                                                     expected_trouble=None,
+                                                     expected_action=None):
+
+        call_verification_function = None
+        begin_time = get_device_epoch_time(self.dut)
+        call_data_summary_before = self.parsing_call_summary()
+        call_statistics_before = self.parsing_call_statistics()
+        self.parsing_diagnostics()
+        self.parsing_troubleshooter_database()
+        bugreport_database_before = self.parsing_bugreport_database()
+
+        if expected_drop_reason:
+            expected_drop_reasons = set(expected_drop_reason.split("|"))
+        else:
+            expected_drop_reasons = set()
+        checking_counters = ["Calls"]
+        checking_reasons = []
+        result = True
+        if setup in ("wfc_apm", "wfc_non_apm"):
+            call_verification_function = is_phone_in_call_iwlan
+        elif setup == "volte":
+            call_verification_function = is_phone_in_call_volte
+        elif setup == "csfb":
+            call_verification_function = is_phone_in_call_csfb
+        elif setup == "3g":
+            call_verification_function = is_phone_in_call_3g
+        elif setup == "2g":
+            call_verification_function = is_phone_in_call_2g
+        technology = handover or setup
+        if technology in ("wfc_apm", "wfc_non_apm"):
+            if triggers and triggers[0] not in IGNORED_CALL_DROP_TRIGGERS:
+                checking_counters.extend(
+                    ["Calls_dropped", "VOWIFI", "VOWIFI_dropped"])
+                checking_reasons.append("VOWIFI_failure_reason")
+            elif call_data_summary_before.get("Calls_dropped", 0):
+                checking_counters.append("VOWIFI")
+        elif technology == "volte":
+            if triggers and triggers[0] not in IGNORED_CALL_DROP_TRIGGERS:
+                checking_counters.extend(
+                    ["Calls_dropped", "VOLTE", "VOLTE_dropped"])
+                checking_reasons.append("VOLTE_failure_reason")
+            elif call_data_summary_before.get("Calls_dropped", 0):
+                checking_counters.append("VOLTE")
+        elif technology in ("csfb", "3g", "2g"):
+            if triggers and triggers[0] not in IGNORED_CALL_DROP_TRIGGERS:
+                checking_counters.extend(["Calls_dropped", "CS", "CS_dropped"])
+                checking_reasons.append("CS_failure_reason")
+            elif call_data_summary_before.get("Calls_dropped", 0):
+                checking_counters.append("CS")
+
+        if setup == "vt":
+            if not video_call_setup_teardown(
+                    self.log,
+                    self.dut,
+                    self.ad_reference,
+                    None,
+                    video_state=VT_STATE_BIDIRECTIONAL,
+                    verify_caller_func=is_phone_in_call_video_bidirectional,
+                    verify_callee_func=is_phone_in_call_video_bidirectional):
+                raise signals.TestFailure("VT Call Failed.")
+        else:
+            if not call_setup_teardown(
+                    self.log,
+                    self.dut,
+                    self.ad_reference,
+                    ad_hangup=None,
+                    verify_caller_func=call_verification_function,
+                    wait_time_in_call=10):
+                raise signals.TestFailure("Call Setup Failed.")
+
+        for trigger in triggers:
+            if self.dut.droid.telecomIsInCall():
+                self.dut.log.info("Telecom is in call")
+                self.dut.log.info(
+                    "Voice in RAT %s",
+                    self.dut.droid.telephonyGetCurrentVoiceNetworkType())
+            else:
+                self.dut.log.info("Not in call")
+            # Trigger in-call event
+            if trigger and getattr(self, trigger, None):
+                trigger_func = getattr(self, trigger)
+                trigger_func()
+                time.sleep(MAX_WAIT_TIME_FOR_STATE_CHANGE)
+
+        if self.dut.droid.telecomIsInCall():
+            self.dut.log.info("Telecom is in call")
+            self.dut.log.info(
+                "Voice in RAT %s",
+                self.dut.droid.telephonyGetCurrentVoiceNetworkType())
+        else:
+            self.dut.log.info("Not in call")
+
+        if self.dut.droid.telecomIsInCall():
+            self.dut.log.info("Telecom is in call")
+            self.dut.log.info(
+                "Voice in RAT %s",
+                self.dut.droid.telephonyGetCurrentVoiceNetworkType())
+        else:
+            self.dut.log.info("Not in call")
+
+        drop_reason = last_call_drop_reason(self.dut, begin_time)
+        drop_reason = drop_reason.title()
+        if drop_reason:
+            expected_drop_reasons.add(drop_reason)
+        for ad in (self.ad_reference, self.dut):
+            try:
+                if ad.droid.telecomIsInCall():
+                    if triggers:
+                        ad.log.info("Still in call after triggers %s",
+                                    triggers)
+                        result = False
+                    hangup_call(self.log, ad)
+                    time.sleep(MAX_WAIT_TIME_FOR_STATE_CHANGE)
+            except Exception as e:
+                ad.log.error(e)
+
+        call_data_summary_after = self.parsing_call_summary()
+        call_statistics_after = self.parsing_call_statistics()
+        diagnostics_after = self.parsing_diagnostics()
+        ts_database_after = self.parsing_troubleshooter_database()
+
+        for counter in checking_counters:
+            if call_data_summary_after.get(
+                    counter,
+                    0) != call_data_summary_before.get(counter, 0) + 1:
+                self.dut.log.error("Counter %s did not increase", counter)
+                result = False
+            else:
+                self.dut.log.info("Counter %s increased", counter)
+            if counter == "Calls":
+                if call_statistics_after.get("num_calls",
+                                             0) - call_statistics_before.get(
+                                                 "num_calls", 0) < 1:
+                    self.dut.log.warning(
+                        "call_statistics num_calls didn't increase")
+                    # result = False
+                else:
+                    self.dut.log.info("call_statistics num_calls increased")
+            if "_dropped" in counter and counter != "Calls_dropped":
+                desc = counter.split("_")[0]
+                if desc == "VOWIFI":
+                    stat_key = "recent_wfc_fail_count"
+                else:
+                    stat_key = "recent_%s_fail_count" % desc.lower()
+                before = call_statistics_after.get(stat_key, 0)
+                after = call_statistics_after.get(stat_key, 0)
+                most_failure_call_type = call_statistics_after.get(
+                    "call_type_with_most_failures")
+                diagnosis = diagnostics_after.get("diagnosis")
+                actions = diagnostics_after.get("actions")
+                if after - before < 1:
+                    self.dut.log.warning("call_statistics %s didn't increase, "
+                                         "before %s, after %s" %
+                                         (stat_key, before, after))
+                    # result = False
+                else:
+                    self.dut.log.info("call_statistics %s increased", stat_key)
+                if most_failure_call_type != desc:
+                    self.dut.log.warning(
+                        "call_statistics call_type_with_most_failures "
+                        "is %s, not %s", most_failure_call_type, desc)
+                else:
+                    self.dut.log.info(
+                        "call_statistics call_type_with_most_failures is %s",
+                        most_failure_call_type)
+                dropped = call_data_summary_after.get("%s_dropped" % desc, 0)
+                drop_percentage = call_data_summary_after.get(
+                    "%s_dropped_percentage" % desc, 0)
+                self.dut.log.info("%s_dropped = %s, percentage = %s", desc,
+                                  dropped, drop_percentage)
+                if expected_trouble and expected_trouble != diagnosis:
+                    self.dut.log.warning("diagnoisis = %s, expecting %s",
+                                         diagnosis, expected_trouble)
+                if expected_action and expected_action != actions:
+                    self.dut.log.error("actions = %s, expecting %s", actions,
+                                       expected_action)
+                    result = False
+                if drop_percentage > CALL_TROUBLE_THRESHOLD and (
+                        dropped > CONSECUTIVE_CALL_FAILS):
+                    if diagnosis == "UNABLE_TO_TRIAGE":
+                        self.dut.log.error(
+                            "troubleshooter diagnosis is %s with %s dropped "
+                            "and %s drop_percentage", diagnosis, dropped,
+                            drop_percentage)
+                        result = False
+                    if actions == "NONE":
+                        self.dut.log.error(
+                            "troubleshooter failed to provide suggestion, "
+                            "actions = %s", actions)
+                        result = False
+        if expected_drop_reasons:
+            expected_drop_reason = "|".join(expected_drop_reasons)
+        for reason_key in checking_reasons:
+            if call_data_summary_after.get(reason_key, None):
+                drop_reason = call_data_summary_after[reason_key]
+                if expected_drop_reason and drop_reason not in expected_drop_reason:
+                    self.dut.log.error("%s is: %s, expecting %s", reason_key,
+                                       drop_reason, expected_drop_reason)
+                    result = False
+                else:
+                    self.dut.log.info("%s is: %s as expected", reason_key,
+                                      drop_reason)
+            else:
+                self.dut.log.error("%s is not provided in summary report",
+                                   reason_key)
+                result = False
+
+        if not triggers or triggers[0] in IGNORED_CALL_DROP_TRIGGERS:
+            return result
+        if drop_reason in bugreport_database_before:
+            self.dut.log.info("%s is in bugreport database %s before call",
+                              drop_reason, bugreport_database_before)
+            return result
+        else:
+            self.dut.log.info("%s is not in bugreport database %s before call",
+                              drop_reason, bugreport_database_before)
+        if drop_reason in IGNORED_CALL_DROP_REASONS:
+            self.dut.log.info(
+                "Call drop with reason %s will skip bugreport notification",
+                drop_reason)
+            return result
+        else:
+            self.dut.log.info(
+                "Call drop %s should generate bugreport notification",
+                drop_reason)
+        # Parse logcat for UI notification only for the first failure
+        if self.dut.search_logcat("Bugreport notification title Call Drop:",
+                                  begin_time):
+            self.dut.log.info(
+                "Bugreport notification title Call Drop is seen in logcat")
+            return result
+        else:
+            self.dut.log.error(
+                "Bugreport notification title Call Drop is not seen in logcat")
+            return False
+
+    def call_drop_test(self,
+                       setup=None,
+                       handover=None,
+                       count=CONSECUTIVE_CALL_FAILS,
+                       triggers=[],
+                       expected_drop_reason=None,
+                       expected_trouble=None,
+                       expected_action=None):
+        if not triggers:
+            if self.dut.model in ("marlin", "sailfish", "walleye", "taimen"):
+                triggers = ["modem_crash"]
+                expected_drop_reason = "Error Unspecified"
+            else:
+                triggers = ["drop_reason_override"]
+        if "drop_reason_override" in triggers:
+            self.set_drop_reason_override(
+                override_code=self.call_drop_override_code)
+            expected_drop_reason = CALL_DROP_CODE_MAPPING[int(
+                self.call_drop_override_code)]
+        for iter in range(count):
+            self.dut.log.info("===== %s_iter_%s =====", self.test_name,
+                              iter + 1)
+            if iter < count - 1:
+                action = None
+                trouble = None
+            else:
+                action = expected_action
+                trouble = expected_trouble
+            if not self.call_setup_and_connectivity_monitor_checking(
+                    setup=setup,
+                    handover=handover,
+                    triggers=triggers,
+                    expected_drop_reason=expected_drop_reason,
+                    expected_trouble=trouble,
+                    expected_action=action):
+                return False
+        return True
+
+    def call_drop_triggered_suggestion_test(self,
+                                            setup=None,
+                                            handover=None,
+                                            triggers=[],
+                                            expected_drop_reason=None,
+                                            expected_trouble=None,
+                                            expected_action=None):
+        call_summary = self.parsing_call_summary()
+        diagnostics = self.parsing_diagnostics()
+        diagnosis = diagnostics.get("diagnosis")
+        actions = diagnostics.get("actions")
+        self.dut.log.info("Expected trouble = %s, action = %s",
+                          expected_trouble, expected_action)
+        if expected_trouble and diagnosis == expected_trouble and not handover:
+            self.dut.log.info("Diagnosis is the expected %s", trouble)
+            if expected_action and expected_action != actions:
+                self.dut.log.error("Action is %s, expecting %s", actions,
+                                   expected_action)
+                result = False
+            if setup in ("wfc_apm", "wfc_non_apm"):
+                desc = "VOWIFI"
+            elif setup == "volte":
+                desc = "VOLTE"
+            elif setup in ("csfb", "3g", "2g"):
+                desc = "CS"
+            drops = call_summary.get("%s_dropped" % desc, 0)
+            drop_percentage = call_summary.get("%s_dropped_percentage" % desc,
+                                               0)
+            if drops < CONSECUTIVE_CALL_FAILS or drop_percentage < 25:
+                self.dut.log.error(
+                    "Should NOT get %s for %s %s_dropped and %s %s_dropped_percentage",
+                    trouble, drops, desc, drop_percentage, desc)
+                return False
+            else:
+                return True
+        else:
+            self.dut.log.info("Generate %s consecutive call drops",
+                              CONSECUTIVE_CALL_FAILS)
+            return self.call_drop_test(
+                setup=setup,
+                handover=handover,
+                count=CONSECUTIVE_CALL_FAILS,
+                triggers=triggers,
+                expected_drop_reason=expected_drop_reason,
+                expected_trouble=expected_trouble,
+                expected_action=expected_action)
+
+    def healthy_call_test(self,
+                          setup=None,
+                          handover=None,
+                          count=1,
+                          triggers=[],
+                          expected_trouble=None,
+                          expected_action=None):
+        if self.dut.model not in ("marlin", "sailfish", "walleye", "taimen"):
+            self.set_drop_reason_override(override_code=25)
+        result = True
+        for iter in range(count):
+            if not self.call_setup_and_connectivity_monitor_checking(
+                    setup=setup,
+                    handover=handover,
+                    triggers=triggers,
+                    expected_trouble=expected_trouble,
+                    expected_action=expected_action):
+                return False
+        return True
+
+    def forced_call_drop_test(self,
+                              setup=None,
+                              handover=None,
+                              triggers=None,
+                              expected_drop_reason=None):
+        expected_trouble = None
+        expected_action = None
+        technology = handover or setup
+        if setup:
+            setup_func = getattr(self, "setup_%s" % setup)
+            if not setup_func(): return False
+            if technology == "volte":
+                expected_trouble = TROUBLES[6],
+                expected_action = ACTIONS[6]
+            elif technology == "csfb":
+                if CAPABILITY_VOLTE in self.dut_capabilities:
+                    expected_action = ACTIONS[5]
+                else:
+                    expected_action = ACTIONS[7]
+                expected_trouble = TROUBLES[7]
+            elif technology == "3g":
+                if CAPABILITY_VOLTE in self.dut_capabilities:
+                    expected_action = ACTIONS[5]
+                else:
+                    expected_action = ACTIONS[7]
+                expected_trouble = TROUBLES[7]
+            elif technology == "2g":
+                if CAPABILITY_VOLTE in self.dut_capabilities:
+                    expected_action = ACTIONS[5]
+                else:
+                    expected_action = ACTIONS[7]
+                expected_trouble = TROUBLES[7]
+            elif technology == "wfc_apm":
+                expected_trouble = TROUBLES[3]
+                expected_action = ACTIONS[11]
+            elif technology == "wfc_non_apm":
+                expected_trouble = TROUBLES[3]
+                expected_action = ACTIONS[3]
+
+        return self.call_drop_triggered_suggestion_test(
+            setup=setup,
+            handover=handover,
+            triggers=triggers,
+            expected_drop_reason=expected_drop_reason,
+            expected_trouble=expected_trouble,
+            expected_action=expected_action)
+
+    def call_drop_test_after_wipe(self, setup=None):
+        if setup:
+            setup_func = getattr(self, "setup_%s" % setup)
+            if not setup_func(): return False
+        fastboot_wipe(self.dut)
+        bring_up_connectivity_monitor(self.dut)
+        return self.forced_call_drop_test(setup=setup)
+
+    def call_drop_test_after_reboot(self, setup=None):
+        self.forced_call_drop_test(setup=setup)
+        self.healthy_call_test(setup=setup, count=1)
+        reboot_device(self.dut)
+        return self.forced_call_drop_test(setup=setup)
diff --git a/acts/tests/google/tel/live/TelLiveConnectivityMonitorMobilityTest.py b/acts/tests/google/tel/live/TelLiveConnectivityMonitorMobilityTest.py
new file mode 100755
index 0000000..699718b
--- /dev/null
+++ b/acts/tests/google/tel/live/TelLiveConnectivityMonitorMobilityTest.py
@@ -0,0 +1,476 @@
+#!/usr/bin/env python3.4
+#
+#   Copyright 2016 - Google
+#
+#   Licensed under the Apache License, Version 2.0 (the "License");
+#   you may not use this file except in compliance with the License.
+#   You may obtain a copy of the License at
+#
+#       http://www.apache.org/licenses/LICENSE-2.0
+#
+#   Unless required by applicable law or agreed to in writing, software
+#   distributed under the License is distributed on an "AS IS" BASIS,
+#   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#   See the License for the specific language governing permissions and
+#   limitations under the License.
+"""
+    Test Script for epdg RF shield box related tests.
+"""
+
+import time
+from acts.test_decorators import test_tracker_info
+from acts.test_utils.tel.TelephonyBaseTest import TelephonyBaseTest
+from acts.test_utils.tel.tel_atten_utils import set_rssi
+from acts.test_utils.tel.tel_defines import CELL_WEAK_RSSI_VALUE
+from acts.test_utils.tel.tel_defines import INVALID_WIFI_RSSI
+from acts.test_utils.tel.tel_defines import MAX_RSSI_RESERVED_VALUE
+from acts.test_utils.tel.tel_defines import MIN_RSSI_RESERVED_VALUE
+from acts.test_utils.tel.tel_defines import WAIT_TIME_WIFI_RSSI_CALIBRATION_SCREEN_ON
+from acts.test_utils.tel.tel_defines import WAIT_TIME_WIFI_RSSI_CALIBRATION_WIFI_CONNECTED
+from acts.test_utils.tel.tel_defines import WFC_MODE_CELLULAR_PREFERRED
+from acts.test_utils.tel.tel_defines import WFC_MODE_WIFI_PREFERRED
+from acts.test_utils.tel.tel_defines import WIFI_WEAK_RSSI_VALUE
+from acts.test_utils.tel.tel_defines import SignalStrengthContainer
+from acts.test_utils.tel.tel_test_utils import ensure_phones_default_state
+from acts.test_utils.tel.tel_test_utils import ensure_phone_subscription
+from acts.test_utils.tel.tel_test_utils import ensure_wifi_connected
+from acts.test_utils.tel.tel_test_utils import set_wfc_mode
+from acts.test_utils.tel.tel_test_utils import wait_for_wifi_data_connection
+from acts.test_utils.tel.tel_test_utils import verify_internet_connection
+from TelLiveConnectivityMonitorBaseTest import TelLiveConnectivityMonitorBaseTest
+
+# Attenuator name
+ATTEN_NAME_FOR_WIFI_2G = 'wifi0'
+ATTEN_NAME_FOR_WIFI_5G = 'wifi1'
+ATTEN_NAME_FOR_CELL_3G = 'cell0'
+ATTEN_NAME_FOR_CELL_4G = 'cell1'
+
+# WiFi RSSI settings for ROVE_IN test
+WIFI_RSSI_FOR_ROVE_IN_TEST_PHONE_ROVE_IN = -60
+WIFI_RSSI_FOR_ROVE_IN_TEST_PHONE_NOT_ROVE_IN = -70
+
+# WiFi RSSI settings for ROVE_OUT test
+WIFI_RSSI_FOR_ROVE_OUT_TEST_PHONE_INITIAL_STATE = -60
+WIFI_RSSI_FOR_ROVE_OUT_TEST_PHONE_NOT_ROVE_OUT = -70
+WIFI_RSSI_FOR_ROVE_OUT_TEST_PHONE_ROVE_OUT = -90
+
+# WiFi RSSI settings for HAND_IN test
+WIFI_RSSI_FOR_HAND_IN_TEST_PHONE_NOT_HAND_IN = -80
+WIFI_RSSI_FOR_HAND_IN_TEST_PHONE_HAND_IN = -50
+
+# WiFi RSSI settings for HAND_OUT test
+WIFI_RSSI_FOR_HAND_OUT_TEST_PHONE_NOT_HAND_OUT = -60
+WIFI_RSSI_FOR_HAND_OUT_TEST_PHONE_HAND_OUT = -85
+
+CS_LINK_LOST = "Radio Link Lost"
+IMS_LINK_LOST = "Media Timeout"
+
+
+class TelLiveConnectivityMonitorMobilityTest(
+        TelLiveConnectivityMonitorBaseTest):
+    def __init__(self, controllers):
+        TelLiveConnectivityMonitorBaseTest.__init__(self, controllers)
+
+        self.attens = {}
+        for atten in self.attenuators:
+            self.attens[atten.path] = atten
+            atten.set_atten(atten.get_max_atten())  # Default all attens to max
+
+    def setup_class(self):
+        TelLiveConnectivityMonitorBaseTest.setup_class(self)
+
+        # Do WiFi RSSI calibration.
+        self.set_wifi_strong_cell_strong()
+
+        if not ensure_phone_subscription(self.log, self.dut):
+            self.dut.log.error("Failed to find valid subscription")
+            return False
+        if not ensure_wifi_connected(self.log, self.dut,
+                                     self.wifi_network_ssid,
+                                     self.wifi_network_pass):
+            self.dut.log.error("Fail to connect to WiFI")
+            return False
+        if (not wait_for_wifi_data_connection(self.log, self.dut, True)
+                or not verify_internet_connection(self.log, self.dut)):
+            self.dut.log.error("No Data on Wifi")
+            return False
+
+        # Delay WAIT_TIME_WIFI_RSSI_CALIBRATION_WIFI_CONNECTED after WiFi
+        # Connected to make sure WiFi RSSI reported value is correct.
+        time.sleep(WAIT_TIME_WIFI_RSSI_CALIBRATION_WIFI_CONNECTED)
+        # Turn On Screen and delay WAIT_TIME_WIFI_RSSI_CALIBRATION_SCREEN_ON
+        # then get WiFi RSSI to avoid WiFi RSSI report -127(invalid value).
+        self.dut.droid.wakeUpNow()
+        time.sleep(WAIT_TIME_WIFI_RSSI_CALIBRATION_SCREEN_ON)
+
+        setattr(self, "wifi_rssi_with_no_atten",
+                self.dut.droid.wifiGetConnectionInfo()['rssi'])
+        if self.wifi_rssi_with_no_atten == INVALID_WIFI_RSSI:
+            self.dut.log.error(
+                "Initial WiFi RSSI calibration value is wrong: -127.")
+            return False
+        self.dut.log.info("WiFi RSSI calibration info: atten=0, RSSI=%s",
+                          self.wifi_rssi_with_no_atten)
+        ensure_phones_default_state(self.log, [self.dut])
+
+        # Do Cellular RSSI calibration.
+        setattr(self, "cell_rssi_with_no_atten",
+                self.dut.droid.telephonyGetSignalStrength()[
+                    SignalStrengthContainer.SIGNAL_STRENGTH_LTE_DBM])
+        self.dut.log.info("Cellular RSSI calibration info: atten=0, RSSI=%s",
+                          self.cell_rssi_with_no_atten)
+        return True
+
+    def teardown_class(self):
+        self.dut.droid.telephonyStopTrackingSignalStrengthChange()
+        super().teardown_class()
+        return True
+
+    def setup_test(self):
+        super().setup_test()
+        self.set_wifi_strong_cell_strong()
+        return True
+
+    def teardown_test(self):
+        super().teardown_test()
+        self.set_wifi_strong_cell_strong()
+        return True
+
+    def set_wifi_strong_cell_strong(self):
+        self.log.info("--->Setting WiFi strong cell strong<---")
+        set_rssi(self.log, self.attens[ATTEN_NAME_FOR_WIFI_2G], 0,
+                 MAX_RSSI_RESERVED_VALUE)
+        set_rssi(self.log, self.attens[ATTEN_NAME_FOR_WIFI_5G], 0,
+                 MAX_RSSI_RESERVED_VALUE)
+        set_rssi(self.log, self.attens[ATTEN_NAME_FOR_CELL_3G], 0,
+                 MAX_RSSI_RESERVED_VALUE)
+        set_rssi(self.log, self.attens[ATTEN_NAME_FOR_CELL_4G], 0,
+                 MAX_RSSI_RESERVED_VALUE)
+        return True
+
+    def set_wifi_strong_cell_weak(self):
+        self.log.info("--->Setting WiFi strong cell weak<---")
+        set_rssi(self.log, self.attens[ATTEN_NAME_FOR_WIFI_2G], 0,
+                 MAX_RSSI_RESERVED_VALUE)
+        set_rssi(self.log, self.attens[ATTEN_NAME_FOR_WIFI_5G], 0,
+                 MAX_RSSI_RESERVED_VALUE)
+        set_rssi(self.log, self.attens[ATTEN_NAME_FOR_CELL_3G],
+                 self.cell_rssi_with_no_atten, CELL_WEAK_RSSI_VALUE)
+        set_rssi(self.log, self.attens[ATTEN_NAME_FOR_CELL_4G],
+                 self.cell_rssi_with_no_atten, CELL_WEAK_RSSI_VALUE)
+        return True
+
+    def set_wifi_strong_cell_absent(self):
+        self.log.info("--->Setting WiFi strong cell absent<---")
+        set_rssi(self.log, self.attens[ATTEN_NAME_FOR_WIFI_2G], 0,
+                 MAX_RSSI_RESERVED_VALUE)
+        set_rssi(self.log, self.attens[ATTEN_NAME_FOR_WIFI_5G], 0,
+                 MAX_RSSI_RESERVED_VALUE)
+        set_rssi(self.log, self.attens[ATTEN_NAME_FOR_CELL_3G], 0,
+                 MIN_RSSI_RESERVED_VALUE)
+        set_rssi(self.log, self.attens[ATTEN_NAME_FOR_CELL_4G], 0,
+                 MIN_RSSI_RESERVED_VALUE)
+        return True
+
+    def set_wifi_weak_cell_strong(self):
+        self.log.info("--->Setting WiFi weak cell strong<---")
+        set_rssi(self.log, self.attens[ATTEN_NAME_FOR_WIFI_2G],
+                 self.wifi_rssi_with_no_atten, WIFI_WEAK_RSSI_VALUE)
+        set_rssi(self.log, self.attens[ATTEN_NAME_FOR_WIFI_5G],
+                 self.wifi_rssi_with_no_atten, WIFI_WEAK_RSSI_VALUE)
+        set_rssi(self.log, self.attens[ATTEN_NAME_FOR_CELL_3G], 0,
+                 MAX_RSSI_RESERVED_VALUE)
+        set_rssi(self.log, self.attens[ATTEN_NAME_FOR_CELL_4G], 0,
+                 MAX_RSSI_RESERVED_VALUE)
+        return True
+
+    def set_wifi_weak_cell_weak(self):
+        self.log.info("--->Setting WiFi weak cell weak<---")
+        set_rssi(self.log, self.attens[ATTEN_NAME_FOR_WIFI_2G],
+                 self.wifi_rssi_with_no_atten, WIFI_WEAK_RSSI_VALUE)
+        set_rssi(self.log, self.attens[ATTEN_NAME_FOR_WIFI_5G],
+                 self.wifi_rssi_with_no_atten, WIFI_WEAK_RSSI_VALUE)
+        set_rssi(self.log, self.attens[ATTEN_NAME_FOR_CELL_3G],
+                 self.cell_rssi_with_no_atten, CELL_WEAK_RSSI_VALUE)
+        set_rssi(self.log, self.attens[ATTEN_NAME_FOR_CELL_4G],
+                 self.cell_rssi_with_no_atten, CELL_WEAK_RSSI_VALUE)
+        return True
+
+    def set_wifi_weak_cell_absent(self):
+        self.log.info("--->Setting WiFi weak cell absent<---")
+        set_rssi(self.log, self.attens[ATTEN_NAME_FOR_WIFI_2G],
+                 self.wifi_rssi_with_no_atten, WIFI_WEAK_RSSI_VALUE)
+        set_rssi(self.log, self.attens[ATTEN_NAME_FOR_WIFI_5G],
+                 self.wifi_rssi_with_no_atten, WIFI_WEAK_RSSI_VALUE)
+        set_rssi(self.log, self.attens[ATTEN_NAME_FOR_CELL_3G], 0,
+                 MIN_RSSI_RESERVED_VALUE)
+        set_rssi(self.log, self.attens[ATTEN_NAME_FOR_CELL_4G], 0,
+                 MIN_RSSI_RESERVED_VALUE)
+        return True
+
+    def set_wifi_absent_cell_strong(self):
+        self.log.info("--->Setting WiFi absent cell strong<---")
+        set_rssi(self.log, self.attens[ATTEN_NAME_FOR_WIFI_2G], 0,
+                 MIN_RSSI_RESERVED_VALUE)
+        set_rssi(self.log, self.attens[ATTEN_NAME_FOR_WIFI_5G], 0,
+                 MIN_RSSI_RESERVED_VALUE)
+        set_rssi(self.log, self.attens[ATTEN_NAME_FOR_CELL_3G], 0,
+                 MAX_RSSI_RESERVED_VALUE)
+        set_rssi(self.log, self.attens[ATTEN_NAME_FOR_CELL_4G], 0,
+                 MAX_RSSI_RESERVED_VALUE)
+        return True
+
+    def set_wifi_absent_cell_weak(self):
+        self.log.info("--->Setting WiFi absent cell weak<---")
+        set_rssi(self.log, self.attens[ATTEN_NAME_FOR_WIFI_2G], 0,
+                 MIN_RSSI_RESERVED_VALUE)
+        set_rssi(self.log, self.attens[ATTEN_NAME_FOR_WIFI_5G], 0,
+                 MIN_RSSI_RESERVED_VALUE)
+        set_rssi(self.log, self.attens[ATTEN_NAME_FOR_CELL_3G],
+                 self.cell_rssi_with_no_atten, CELL_WEAK_RSSI_VALUE)
+        set_rssi(self.log, self.attens[ATTEN_NAME_FOR_CELL_4G],
+                 self.cell_rssi_with_no_atten, CELL_WEAK_RSSI_VALUE)
+        return True
+
+    def set_wifi_absent_cell_absent(self):
+        self.log.info("--->Setting WiFi absent cell absent<---")
+        set_rssi(self.log, self.attens[ATTEN_NAME_FOR_WIFI_2G], 0,
+                 MIN_RSSI_RESERVED_VALUE)
+        set_rssi(self.log, self.attens[ATTEN_NAME_FOR_WIFI_5G], 0,
+                 MIN_RSSI_RESERVED_VALUE)
+        set_rssi(self.log, self.attens[ATTEN_NAME_FOR_CELL_3G], 0,
+                 MIN_RSSI_RESERVED_VALUE)
+        set_rssi(self.log, self.attens[ATTEN_NAME_FOR_CELL_4G], 0,
+                 MIN_RSSI_RESERVED_VALUE)
+        return True
+
+    """ Tests Begin """
+
+    @test_tracker_info(uuid="d474725b-c34d-4686-8b5f-c0d4733a0cc1")
+    @TelephonyBaseTest.tel_test_wrap
+    def test_volte_call_drop_by_poor_signals(self):
+        return self.forced_call_drop_test(
+            setup="volte",
+            triggers=[
+                "set_wifi_absent_cell_absent", "set_wifi_strong_cell_strong"
+            ],
+            expected_drop_reason=IMS_LINK_LOST)
+
+    @test_tracker_info(uuid="7f62f1c0-6d9e-4e7e-812f-b1c60d2f4b41")
+    @TelephonyBaseTest.tel_test_wrap
+    def test_csfb_call_drop_by_poor_signals(self):
+        return self.forced_call_drop_test(
+            setup="csfb",
+            triggers=[
+                "set_wifi_absent_cell_absent", "set_wifi_strong_cell_strong"
+            ],
+            expected_drop_reason=CS_LINK_LOST)
+
+    @test_tracker_info(uuid="8d1c8c44-be54-43ec-892c-c3f41855c7c8")
+    @TelephonyBaseTest.tel_test_wrap
+    def test_3g_call_drop_by_poor_signal(self):
+        return self.forced_call_drop_test(
+            setup="3g",
+            triggers=[
+                "set_wifi_absent_cell_absent", "set_wifi_strong_cell_strong"
+            ],
+            expected_drop_reason=CS_LINK_LOST)
+
+    @test_tracker_info(uuid="66e01cb3-3bea-4d08-9ab4-7f22790c57b1")
+    @TelephonyBaseTest.tel_test_wrap
+    def test_wfc_apm_call_drop_by_poor_signal(self):
+        return self.forced_call_drop_test(
+            setup="wfc_apm",
+            triggers=[
+                "set_wifi_absent_cell_absent", "set_wifi_strong_cell_strong"
+            ],
+            expected_drop_reason=IMS_LINK_LOST)
+
+    @test_tracker_info(uuid="669e9f97-6931-403a-a13d-4f179bd4406f")
+    @TelephonyBaseTest.tel_test_wrap
+    def test_wfc_non_apm_call_drop_by_poor_signal(self):
+        self.setup_wfc_non_apm()
+        return self.forced_call_drop_test(
+            setup="wfc_non_apm",
+            triggers=[
+                "set_wifi_absent_cell_absent", "set_wifi_strong_cell_strong"
+            ],
+            expected_drop_reason=IMS_LINK_LOST)
+
+    @test_tracker_info(uuid="c7619788-2357-4c49-a754-50ffaf433d59")
+    @TelephonyBaseTest.tel_test_wrap
+    def test_volte_handover_to_wfc_then_hangup(self):
+        self.connect_to_wifi()
+        self.enable_wfc()
+        set_wfc_mode(self.log, self.dut, WFC_MODE_CELLULAR_PREFERRED)
+        self.setup_volte()
+        return self.call_setup_and_connectivity_monitor_checking(
+            setup="volte",
+            handover="wfc_non_apm",
+            triggers=["set_wifi_strong_cell_absent"],
+            expected_drop_reason=None,
+            expected_trouble=None,
+            expected_action=None)
+
+    @test_tracker_info(uuid="c10c8406-6a0c-4039-b2ce-3782593774f2")
+    @TelephonyBaseTest.tel_test_wrap
+    def test_csfb_handover_to_wfc_then_hangup(self):
+        self.setup_csfb()
+        self.connect_to_wifi()
+        self.enable_wfc()
+        self.set_wifi_absent_cell_strong()
+        return self.call_setup_and_connectivity_monitor_checking(
+            setup="csfb",
+            handover="wfc_non_apm",
+            triggers=[
+                "set_wifi_strong_cell_strong", "connect_to_wifi",
+                "is_wfc_enabled", "set_wifi_strong_cell_absent"
+            ],
+            expected_drop_reason=None,
+            expected_trouble=None,
+            expected_action=None)
+
+    @test_tracker_info(uuid="fcb62ea3-3a39-407c-90d8-21896c981ef4")
+    @TelephonyBaseTest.tel_test_wrap
+    def test_3g_handover_to_wfc_then_hangup(self):
+        self.setup_3g()
+        self.connect_to_wifi()
+        self.enable_wfc()
+        set_wfc_mode(self.log, self.dut, WFC_MODE_CELLULAR_PREFERRED)
+        if not self.is_wfc_enabled():
+            self.dut.log.error("WFC is not enabled")
+            return False
+        return self.call_setup_and_connectivity_monitor_checking(
+            setup="3g",
+            handover="wfc_non_apm",
+            triggers=["set_wifi_strong_cell_absent"],
+            expected_drop_reason=None,
+            expected_trouble=None,
+            expected_action=None)
+
+    @test_tracker_info(uuid="85f32373-d1b2-4763-8812-d7ff43a9b3e6")
+    @TelephonyBaseTest.tel_test_wrap
+    def test_wfc_handover_to_volte_then_hangup(self):
+        self.setup_volte()
+        self.connect_to_wifi()
+        self.enable_wfc()
+        self.set_wifi_strong_cell_absent()
+        if not self.is_wfc_enabled():
+            self.dut.log.error("WFC is not enabled")
+            return False
+        return self.call_setup_and_connectivity_monitor_checking(
+            setup="wfc_non_apm",
+            handover="volte",
+            triggers=[
+                "set_wifi_strong_cell_strong", "set_wifi_absent_cell_strong"
+            ],
+            expected_drop_reason=None,
+            expected_trouble=None,
+            expected_action=None)
+
+    @test_tracker_info(uuid="c3dee2ba-1637-4382-97a7-ec9ca795f3dc")
+    @TelephonyBaseTest.tel_test_wrap
+    def test_wfc_handover_to_volte_then_call_drop(self):
+        self.setup_volte()
+        self.connect_to_wifi()
+        self.enable_wfc()
+        self.set_wifi_strong_cell_absent()
+        if not self.is_wfc_enabled():
+            self.dut.log.error("WFC is not enabled")
+            return False
+        return self.call_drop_test(
+            setup="wfc_non_apm",
+            handover="volte",
+            count=1,
+            triggers=[
+                "set_wifi_strong_cell_strong", "set_wifi_absent_cell_strong",
+                "set_wifi_absent_cell_absent"
+            ],
+            expected_drop_reason=IMS_LINK_LOST,
+            expected_trouble=None,
+            expected_action=None)
+
+    @test_tracker_info(uuid="90bc318a-b8ba-45c9-8d8f-e642eeb00460")
+    @TelephonyBaseTest.tel_test_wrap
+    def test_wfc_handover_to_csfb_then_call_drop(self):
+        self.setup_csfb()
+        self.set_wifi_strong_cell_absent()
+        self.connect_to_wifi()
+        self.enable_wfc()
+        if not self.is_wfc_enabled():
+            self.dut.log.error("WFC is not enabled")
+            return False
+        return self.call_drop_test(
+            setup="wfc_apm",
+            handover="csfb",
+            count=1,
+            triggers=[
+                "set_wifi_strong_cell_strong", "set_wifi_absent_cell_strong",
+                "set_wifi_absent_cell_absent"
+            ],
+            expected_drop_reason=CS_LINK_LOST,
+            expected_trouble=None,
+            expected_action=None)
+
+    @test_tracker_info(uuid="0557709e-6d82-4c66-b622-6f36db8bdcc2")
+    @TelephonyBaseTest.tel_test_wrap
+    def test_volte_handover_to_wfc_then_call_drop(self):
+        self.setup_volte()
+        self.connect_to_wifi()
+        self.enable_wfc()
+        self.set_wifi_absent_cell_strong()
+        return self.call_drop_test(
+            setup="volte",
+            handover="wfc_non_apm",
+            count=1,
+            triggers=[
+                "set_wifi_strong_cell_strong", "connect_to_wifi",
+                "is_wfc_enabled", "set_wifi_strong_cell_absent",
+                "set_wifi_absent_cell_absent"
+            ],
+            expected_drop_reason=IMS_LINK_LOST,
+            expected_trouble=None,
+            expected_action=None)
+
+    @test_tracker_info(uuid="3453ed14-8227-4050-96f1-e9ac7973df3b")
+    @TelephonyBaseTest.tel_test_wrap
+    def test_csfb_handover_to_wfc_then_call_drop(self):
+        self.setup_csfb()
+        self.connect_to_wifi()
+        self.enable_wfc()
+        self.set_wifi_absent_cell_strong()
+        return self.call_drop_test(
+            setup="csfb",
+            handover="wfc",
+            count=1,
+            triggers=[
+                "set_wifi_strong_cell_strong", "connect_to_wifi",
+                "is_wfc_enabled", "set_wifi_strong_cell_absent",
+                "set_wifi_absent_cell_absent"
+            ],
+            expected_drop_reason=IMS_LINK_LOST,
+            expected_trouble=None,
+            expected_action=None)
+
+    @test_tracker_info(uuid="68cc68db-c60b-4c4a-a974-8e0d1fa211f2")
+    @TelephonyBaseTest.tel_test_wrap
+    def test_3g_handover_to_wfc_then_call_drop(self):
+        self.setup_3g()
+        self.connect_to_wifi()
+        self.enable_wfc()
+        self.set_wifi_absent_cell_strong()
+        return self.call_drop_test(
+            setup="3g",
+            handover="wfc_non_apm",
+            count=1,
+            triggers=[
+                "set_wifi_strong_cell_strong", "connect_to_wifi",
+                "is_wfc_enabled", "set_wifi_strong_cell_absent",
+                "set_wifi_absent_cell_absent"
+            ],
+            expected_drop_reason=IMS_LINK_LOST,
+            expected_trouble=None,
+            expected_action=None)
+
+
+""" Tests End """
diff --git a/acts/tests/google/tel/live/TelLiveConnectivityMonitorTest.py b/acts/tests/google/tel/live/TelLiveConnectivityMonitorTest.py
new file mode 100644
index 0000000..c0f0953
--- /dev/null
+++ b/acts/tests/google/tel/live/TelLiveConnectivityMonitorTest.py
@@ -0,0 +1,622 @@
+#!/usr/bin/env python3.4
+#
+#   Copyright 2018 - Google
+#
+#   Licensed under the Apache License, Version 2.0 (the "License");
+#   you may not use this file except in compliance with the License.
+#   You may obtain a copy of the License at
+#
+#       http://www.apache.org/licenses/LICENSE-2.0
+#
+#   Unless required by applicable law or agreed to in writing, software
+#   distributed under the License is distributed on an "AS IS" BASIS,
+#   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#   See the License for the specific language governing permissions and
+#   limitations under the License.
+"""
+    Connectivity Monitor and Telephony Troubleshooter Tests
+"""
+
+from acts.test_decorators import test_tracker_info
+from acts.test_utils.tel.TelephonyBaseTest import TelephonyBaseTest
+from acts.test_utils.tel.tel_defines import CAPABILITY_VOLTE
+from TelLiveConnectivityMonitorBaseTest import TelLiveConnectivityMonitorBaseTest
+from TelLiveConnectivityMonitorBaseTest import ACTIONS
+from TelLiveConnectivityMonitorBaseTest import TROUBLES
+
+
+class TelLiveConnectivityMonitorTest(TelLiveConnectivityMonitorBaseTest):
+    """ Tests Begin """
+
+    @test_tracker_info(uuid="fee3d03d-701b-4727-9320-426ff6b29974")
+    @TelephonyBaseTest.tel_test_wrap
+    def test_volte_call_drop_triggered_suggestion(self):
+        """Telephony Monitor Functional Test
+
+        Steps:
+            1. Verify Connectivity Monitor is on
+            2. Force Trigger a call drop : media timeout and ensure it is
+               notified by Connectivity Monitor
+
+        Expected Results:
+            feature work fine, and does report to User about Call Drop
+
+        Returns:
+            True is pass, False if fail.
+        """
+        return self.forced_call_drop_test(setup="volte")
+
+    @test_tracker_info(uuid="8c3ee59a-74e5-4885-8f42-8a15d4550d5f")
+    @TelephonyBaseTest.tel_test_wrap
+    def test_csfb_call_drop_triggered_suggestion(self):
+        """Telephony Monitor Functional Test
+
+        Steps:
+            1. Verify Connectivity Monitor is on
+            2. Force Trigger a call drop : media timeout and ensure it is
+               notified by Connectivity Monitor
+
+        Expected Results:
+            feature work fine, and does report to User about Call Drop
+
+        Returns:
+            True is pass, False if fail.
+        """
+        return self.forced_call_drop_test(setup="csfb")
+
+    @test_tracker_info(uuid="6cd12786-c048-4925-8745-1d5d30094257")
+    @TelephonyBaseTest.tel_test_wrap
+    def test_3g_call_drop_triggered_suggestion(self):
+        """Telephony Monitor Functional Test
+
+        Steps:
+            1. Verify Connectivity Monitor is on
+            2. Force Trigger a call drop : media timeout and ensure it is
+               notified by Connectivity Monitor
+
+        Expected Results:
+            feature work fine, and does report to User about Call Drop
+
+        Returns:
+            True is pass, False if fail.
+        """
+        return self.forced_call_drop_test(setup="3g")
+
+    @test_tracker_info(uuid="51166448-cea6-480b-93d8-7063f940ce0a")
+    @TelephonyBaseTest.tel_test_wrap
+    def test_2g_call_drop_triggered_suggestion(self):
+        """Telephony Monitor Functional Test
+
+        Steps:
+            1. Verify Connectivity Monitor is on
+            2. Force Trigger a call drop : media timeout and ensure it is
+               notified by Connectivity Monitor
+
+        Expected Results:
+            feature work fine, and does report to User about Call Drop
+
+        Returns:
+            True is pass, False if fail.
+        """
+        return self.forced_call_drop_test(setup="2g")
+
+    @test_tracker_info(uuid="409f3331-5d64-4793-b300-2b3d3fa50ba5")
+    @TelephonyBaseTest.tel_test_wrap
+    def test_wfc_apm_call_drop_triggered_suggestion(self):
+        """Telephony Monitor Functional Test
+
+        Steps:
+            1. Verify Connectivity Monitor is on
+            2. Force Trigger a call drop : media timeout and ensure it is
+               notified by Connectivity Monitor
+
+        Expected Results:
+            feature work fine, and does report to User about Call Drop
+
+        Returns:
+            True is pass, False if fail.
+        """
+        return self.forced_call_drop_test(setup="wfc_apm")
+
+    @test_tracker_info(uuid="336c383f-ec19-4447-af37-7f9bb0bac4dd")
+    @TelephonyBaseTest.tel_test_wrap
+    def test_wfc_non_apm_call_drop_triggered_suggestion(self):
+        """Telephony Monitor Functional Test
+
+        Steps:
+            1. Verify Connectivity Monitor is on
+            2. Force Trigger a call drop : media timeout and ensure it is
+               notified by Connectivity Monitor
+
+        Expected Results:
+            feature work fine, and does report to User about Call Drop
+
+        Returns:
+            True is pass, False if fail.
+        """
+        return self.forced_call_drop_test(setup="wfc_non_apm")
+
+    @test_tracker_info(uuid="fd8d22ac-66b2-4e91-a922-8ecec45c85e6")
+    @TelephonyBaseTest.tel_test_wrap
+    def test_vt_call_drop_triggered_suggestion(self):
+        """Telephony Monitor Functional Test
+
+        Steps:
+            1. Verify Connectivity Monitor is on
+            2. Force Trigger a call drop : media timeout and ensure it is
+               notified by Connectivity Monitor
+
+        Expected Results:
+            feature work fine, and does report to User about Call Drop
+
+        Returns:
+            True is pass, False if fail.
+        """
+        return self.forced_call_drop_test(setup="vt")
+
+    @test_tracker_info(uuid="11c4068e-9710-4a40-8587-79d32a68a37e")
+    @TelephonyBaseTest.tel_test_wrap
+    def test_volte_call_drop_after_user_data_wipe(self):
+        """Telephony Monitor Functional Test
+
+        Steps:
+            1. Verify Connectivity Monitor is on
+            2. Force Trigger a call drop : media timeout and ensure it is
+               notified by Connectivity Monitor
+
+        Expected Results:
+            feature work fine, and does report to User about Call Drop
+
+        Returns:
+            True is pass, False if fail.
+        """
+        return self.call_drop_test_after_wipe(setup="volte")
+
+    @test_tracker_info(uuid="8c7083e1-7c06-40c9-9a58-485adceb8690")
+    @TelephonyBaseTest.tel_test_wrap
+    def test_csfb_call_drop_after_user_data_wipe(self):
+        """Telephony Monitor Functional Test
+
+        Steps:
+            1. Verify Connectivity Monitor is on
+            2. Force Trigger a call drop : media timeout and ensure it is
+               notified by Connectivity Monitor
+
+        Expected Results:
+            feature work fine, and does report to User about Call Drop
+
+        Returns:
+            True is pass, False if fail.
+        """
+        return self.call_drop_test_after_wipe(setup="csfb")
+
+    @test_tracker_info(uuid="a7938250-ea3c-4d37-85fe-72edf67c61f7")
+    @TelephonyBaseTest.tel_test_wrap
+    def test_3g_call_drop_after_user_data_wipe(self):
+        """Telephony Monitor Functional Test
+
+        Steps:
+            1. Verify Connectivity Monitor is on
+            2. Force Trigger a call drop : media timeout and ensure it is
+               notified by Connectivity Monitor
+
+        Expected Results:
+            feature work fine, and does report to User about Call Drop
+
+        Returns:
+            True is pass, False if fail.
+        """
+        return self.call_drop_test_after_wipe(setup="3g")
+
+    @test_tracker_info(uuid="24f498c4-26c5-447f-8e7d-fc3ff1d1e9d5")
+    @TelephonyBaseTest.tel_test_wrap
+    def test_2g_call_drop_after_user_data_wipe(self):
+        """Telephony Monitor Functional Test
+
+        Steps:
+            1. Verify Connectivity Monitor is on
+            2. Force Trigger a call drop : media timeout and ensure it is
+               notified by Connectivity Monitor
+
+        Expected Results:
+            feature work fine, and does report to User about Call Drop
+
+        Returns:
+            True is pass, False if fail.
+        """
+        return self.call_drop_test_after_wipe(setup="2g")
+
+    @test_tracker_info(uuid="9fd0fc1e-9480-40b7-bd6f-fe6ac95c2018")
+    @TelephonyBaseTest.tel_test_wrap
+    def test_wfc_apm_call_drop_after_user_data_wipe(self):
+        """Telephony Monitor Functional Test
+
+        Steps:
+            1. Verify Connectivity Monitor is on
+            2. Force Trigger a call drop : media timeout and ensure it is
+               notified by Connectivity Monitor
+
+        Expected Results:
+            feature work fine, and does report to User about Call Drop
+
+        Returns:
+            True is pass, False if fail.
+        """
+        return self.call_drop_test_after_wipe(setup="wfc_apm")
+
+    @test_tracker_info(uuid="8fd9f1a0-b1e0-4469-8617-608ed0682f91")
+    @TelephonyBaseTest.tel_test_wrap
+    def test_wfc_non_apm_call_drop_after_user_data_wipe(self):
+        """Telephony Monitor Functional Test
+
+        Steps:
+            1. Verify Connectivity Monitor is on
+            2. Force Trigger a call drop : media timeout and ensure it is
+               notified by Connectivity Monitor
+
+        Expected Results:
+            feature work fine, and does report to User about Call Drop
+
+        Returns:
+            True is pass, False if fail.
+        """
+        return self.call_drop_test_after_wipe(setup="wfc_non_apm")
+
+    @test_tracker_info(uuid="86056126-9c0b-4702-beb5-49b66368a806")
+    @TelephonyBaseTest.tel_test_wrap
+    def test_vt_call_drop_after_user_data_wipe(self):
+        """Telephony Monitor Functional Test
+
+        Steps:
+            1. Verify Connectivity Monitor is on
+            2. Force Trigger a call drop : media timeout and ensure it is
+               notified by Connectivity Monitor
+
+        Expected Results:
+            feature work fine, and does report to User about Call Drop
+
+        Returns:
+            True is pass, False if fail.
+        """
+        return self.call_drop_test_after_wipe(setup="vt")
+
+    @test_tracker_info(uuid="96ee7af3-96cf-48a7-958b-834684b670dc")
+    @TelephonyBaseTest.tel_test_wrap
+    def test_stats_and_suggestion_after_reboot(self):
+        """Telephony Monitor Functional Test
+
+        Steps:
+            1. Verify Connectivity Monitor is on
+            2. Force Trigger a call drop : media timeout and ensure it is
+               notified by Connectivity Monitor
+
+        Expected Results:
+            feature work fine, and does report to User about Call Drop
+
+        Returns:
+            True is pass, False if fail.
+        """
+        return self.call_drop_test_after_reboot(setup="volte")
+
+    @test_tracker_info(uuid="6b9c8f45-a3cc-4fa8-9a03-bc439ed5b415")
+    @TelephonyBaseTest.tel_test_wrap
+    def test_call_drops_equally_across_all_types(self):
+        """Telephony Monitor Functional Test
+
+        Steps:
+            1. Verify Connectivity Monitor is on
+            2. Force Trigger a call drop : media timeout and ensure it is
+               notified by Connectivity Monitor
+
+        Expected Results:
+            feature work fine, and does report to User about Call Drop
+
+        Returns:
+            True is pass, False if fail.
+        """
+        self.setup_wfc_non_apm()
+        return self.call_drop_test_after_same_type_healthy_call(
+            setup="wfc_non_apm")
+
+    @test_tracker_info(uuid="f2633204-c2ac-4c57-9465-ef6de3223de3")
+    @TelephonyBaseTest.tel_test_wrap
+    def test_volte_call_drop_with_wifi_on_cellular_preferred(self):
+        """Telephony Monitor Functional Test
+
+        Steps:
+            1. Verify Connectivity Monitor is on
+            2. Force Trigger a call drop : media timeout and ensure it is
+               notified by Connectivity Monitor
+
+        Expected Results:
+            feature work fine, and does report to User about Call Drop
+
+        Returns:
+            True is pass, False if fail.
+        """
+        self.setup_volte()
+        self.connect_to_wifi()
+        return self.call_drop_triggered_suggestion_test(setup="volte")
+
+    @test_tracker_info(uuid="ec274cb6-0b75-4026-94a7-0228a43a0f5f")
+    @TelephonyBaseTest.tel_test_wrap
+    def test_csfb_call_drop_with_wifi_on_cellular_preferred(self):
+        """Telephony Monitor Functional Test
+
+        Steps:
+            1. Verify Connectivity Monitor is on
+            2. Force Trigger a call drop : media timeout and ensure it is
+               notified by Connectivity Monitor
+
+        Expected Results:
+            feature work fine, and does report to User about Call Drop
+
+        Returns:
+            True is pass, False if fail.
+        """
+        self.setup_csfb()
+        self.connect_to_wifi()
+        return self.call_drop_triggered_suggestion_test(setup="csfb")
+
+    @test_tracker_info(uuid="b9b439c0-4200-47d6-824b-f12b64dfeecd")
+    @TelephonyBaseTest.tel_test_wrap
+    def test_3g_call_drop_with_wifi_on_cellular_preferred(self):
+        """Telephony Monitor Functional Test
+
+        Steps:
+            1. Verify Connectivity Monitor is on
+            2. Force Trigger a call drop : media timeout and ensure it is
+               notified by Connectivity Monitor
+
+        Expected Results:
+            feature work fine, and does report to User about Call Drop
+
+        Returns:
+            True is pass, False if fail.
+        """
+        self.setup_3g()
+        self.connect_to_wifi()
+        return self.call_drop_triggered_suggestion_test(setup="3g")
+
+    @test_tracker_info(uuid="a4e43270-f7fa-4709-bbe2-c7368af39227")
+    @TelephonyBaseTest.tel_test_wrap
+    def test_wfc_non_apm_toggling_wifi_call_drop(self):
+        """Connectivity Monitor Off Test
+
+        Steps:
+            1. Verify Connectivity Monitor can be turned off
+            2. Force Trigger a call drop : media timeout and ensure it is
+               not notified by Connectivity Monitor
+
+        Expected Results:
+            feature work fine, and does not report to User about Call Drop
+
+        Returns:
+            True is pass, False if fail.
+        """
+        self.setup_volte()
+        self.setup_wfc_non_apm()
+        return self.call_setup_and_connectivity_monitor_checking(
+            setup="wfc_non_apm", triggers=["toggle_wifi", "toogle_wifi"])
+
+    @test_tracker_info(uuid="1c880cf8-082c-4451-b890-22081177d084")
+    @TelephonyBaseTest.tel_test_wrap
+    def test_wfc_apm_call_toggling_wifi(self):
+        """Telephony Monitor Functional Test
+
+        Steps:
+            1. Verify Connectivity Monitor is on
+            2. Force Trigger a call drop : media timeout and ensure it is
+               notified by Connectivity Monitor
+
+        Expected Results:
+            feature work fine, and does report to User about Call Drop
+
+        Returns:
+            True is pass, False if fail.
+        """
+        self.setup_volte()
+        self.setup_wfc_apm()
+        return self.call_setup_and_connectivity_monitor_checking(
+            setup="wfc_apm", triggers=["toggle_wifi", "toggle_wifi"])
+
+    @test_tracker_info(uuid="")
+    @TelephonyBaseTest.tel_test_wrap
+    def test_call_drop_by_toggling_apm_with_connectivity_monitor_volte(self):
+        """Telephony Monitor Functional Test
+
+        Steps:
+            1. Verify Connectivity Monitor is on
+            2. Force Trigger a call drop : media timeout and ensure it is
+               not counted as call drop by Connectivity Monitor
+
+        Expected Results:
+            feature work fine, and does report to User about Call Drop
+
+        Returns:
+            True is pass, False if fail.
+        """
+        self.setup_volte()
+        return self.call_setup_and_connectivity_monitor_checking(
+            setup="volte", triggers=["toggle_apm"])
+
+    @test_tracker_info(uuid="8e1ba024-3b43-4a7d-adc8-2252da81c55c")
+    @TelephonyBaseTest.tel_test_wrap
+    def test_call_drop_by_toggling_apm_with_connectivity_monitor_csfb(self):
+        """Telephony Monitor Functional Test
+
+        Steps:
+            1. Verify Connectivity Monitor is on
+            2. Force Trigger a call drop : media timeout and ensure it is
+               notified by Connectivity Monitor
+
+        Expected Results:
+            feature work fine, and does report to User about Call Drop
+
+        Returns:
+            True is pass, False if fail.
+        """
+        self.setup_csfb()
+        return self.call_setup_and_connectivity_monitor_checking(
+            setup="csfb", triggers=["toggle_apm"])
+
+    @test_tracker_info(uuid="fe6afae4-fa04-435f-8bbc-4a63f5fb525c")
+    @TelephonyBaseTest.tel_test_wrap
+    def test_call_drop_by_toggling_apm_with_connectivity_monitor_3g(self):
+        """Telephony Monitor Functional Test
+
+        Steps:
+            1. Verify Connectivity Monitor is on
+            2. Force Trigger a call drop : media timeout and ensure it is
+               notified by Connectivity Monitor
+
+        Expected Results:
+            feature work fine, and does report to User about Call Drop
+
+        Returns:
+            True is pass, False if fail.
+        """
+        self.setup_3g()
+        return self.call_setup_and_connectivity_monitor_checking(
+            setup="3g", triggers=["toggle_apm"])
+
+    @test_tracker_info(uuid="cc089e2b-d0e1-42a3-80de-597986be3d4e")
+    @TelephonyBaseTest.tel_test_wrap
+    def test_call_drop_by_toggling_apm_with_connectivity_monitor_2g(self):
+        """Telephony Monitor Functional Test
+
+        Steps:
+            1. Verify Connectivity Monitor is on
+            2. Force Trigger a call drop : media timeout and ensure it is
+               notified by Connectivity Monitor
+
+        Expected Results:
+            feature work fine, and does report to User about Call Drop
+
+        Returns:
+            True is pass, False if fail.
+        """
+        self.setup_2g()
+        return self.call_setup_and_connectivity_monitor_checking(
+            setup="2g", triggers=["toggle_apm"])
+
+    @test_tracker_info(uuid="f8ba9655-572c-4a90-be59-6a5bc9a8fad0")
+    @TelephonyBaseTest.tel_test_wrap
+    def test_call_drop_by_toggling_apm_with_connectivity_monitor_wfc_apm(self):
+        """Telephony Monitor Functional Test
+
+        Steps:
+            1. Verify Connectivity Monitor is on
+            2. Force Trigger a call drop : media timeout and ensure it is
+               notified by Connectivity Monitor
+
+        Expected Results:
+            feature work fine, and does report to User about Call Drop
+
+        Returns:
+            True is pass, False if fail.
+        """
+        self.setup_wfc_apm()
+        return self.call_setup_and_connectivity_monitor_checking(
+            setup="wfc_apm", triggers=["toggle_apm"])
+
+    @test_tracker_info(uuid="f2995df9-f56d-442c-977a-141e3269481f")
+    @TelephonyBaseTest.tel_test_wrap
+    def test_call_drop_by_toggling_apm_with_connectivity_monitor_wfc_non_apm(
+            self):
+        """Telephony Monitor Functional Test
+
+        Steps:
+            1. Verify Connectivity Monitor is on
+            2. Force Trigger a call drop : media timeout and ensure it is
+               notified by Connectivity Monitor
+
+        Expected Results:
+            feature work fine, and does report to User about Call Drop
+
+        Returns:
+            True is pass, False if fail.
+        """
+        self.setup_wfc_non_apm()
+        return self.call_setup_and_connectivity_monitor_checking(
+            setup="wfc_non_apm", triggers=["toggle_apm"])
+
+    @test_tracker_info(uuid="cb52110c-7470-4886-b71f-e32f0e489cbd")
+    @TelephonyBaseTest.tel_test_wrap
+    def test_call_drop_by_toggling_apm_with_connectivity_monitor_vt(self):
+        """Telephony Monitor Functional Test
+
+        Steps:
+            1. Verify Connectivity Monitor is on
+            2. Force Trigger a call drop : media timeout and ensure it is
+               notified by Connectivity Monitor
+
+        Expected Results:
+            feature work fine, and does report to User about Call Drop
+
+        Returns:
+            True is pass, False if fail.
+        """
+        self.setup_vt()
+        return self.call_setup_and_connectivity_monitor_checking(
+            setup="vt", triggers=["toggle_apm"])
+
+    @test_tracker_info(uuid="b91a1e8d-3630-4b81-bc8c-c7d3dad42c77")
+    @TelephonyBaseTest.tel_test_wrap
+    def test_healthy_call_with_connectivity_monitor_volte(self):
+        """Telephony Monitor Functional Test
+
+        Steps:
+            1. Verify Connectivity Monitor is on
+            2. No call drop during the call
+            3. Verify the call summary report
+
+        Expected Results:
+            feature work fine, and healthy call is added to report
+
+        Returns:
+            True is pass, False if fail.
+        """
+        self.setup_volte()
+        return self.healthy_call_test(setup="volte", count=1)
+
+    @test_tracker_info(uuid="2f581f6a-087f-4d12-a75c-a62778cb741b")
+    @TelephonyBaseTest.tel_test_wrap
+    def test_healthy_call_with_connectivity_monitor_csfb(self):
+        """Telephony Monitor Functional Test
+
+        Steps:
+            1. Verify Connectivity Monitor is on
+            2. Force Trigger a call drop : media timeout and ensure it is
+               notified by Connectivity Monitor
+
+        Expected Results:
+            feature work fine, and does report to User about Call Drop
+
+        Returns:
+            True is pass, False if fail.
+        """
+        self.setup_csfb()
+        return self.healthy_call_test(setup="csfb", count=1)
+
+    @test_tracker_info(uuid="a5989001-8201-4356-9903-581d0e361b38")
+    @TelephonyBaseTest.tel_test_wrap
+    def test_healthy_call_with_connectivity_monitor_wfc_apm(self):
+        """Telephony Monitor Functional Test
+
+        Steps:
+            1. Verify Connectivity Monitor is on
+            2. Make a call and hung up the call
+            3. Verify the healthy call is added to the call summary report
+
+        Expected Results:
+            feature work fine
+
+        Returns:
+            True is pass, False if fail.
+        """
+        self.setup_wfc_apm()
+        return self.healthy_call_test(setup="wfc_apm", count=1)
+
+
+""" Tests End """
diff --git a/acts/tests/google/wearables/BluetoothPairAndConnectTest.py b/acts/tests/google/wearables/BluetoothPairAndConnectTest.py
new file mode 100644
index 0000000..72dcf60
--- /dev/null
+++ b/acts/tests/google/wearables/BluetoothPairAndConnectTest.py
@@ -0,0 +1,259 @@
+#/usr/bin/env python3
+#
+# Copyright (C) 2018 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may not
+# use this file except in compliance with the License. You may obtain a copy of
+# the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations under
+# the License.
+"""Bluetooth 1st time force pair and connect test implementation."""
+# Quick way to get the Apollo serial number:
+# python3.5 -c "from acts.controllers.buds_lib.apollo_lib import get_devices; [print(d['serial_number']) for d in get_devices()]"
+
+import os
+import time
+import traceback
+import uuid
+
+from acts.base_test import BaseTestClass
+from acts.base_test import Error
+from acts.controllers.buds_lib.apollo_lib import DeviceError, ParentDevice as ApolloDevice
+#from acts.controllers.buds_lib.data_storage.bigquery import bigquery_buffer as bq
+from acts.controllers.buds_lib.test_actions.apollo_acts import ApolloTestActions
+from acts.test_utils.bt.bt_test_utils import clear_bonded_devices
+from acts.test_utils.bt.bt_test_utils import enable_bluetooth
+from acts.utils import set_location_service
+
+# Save to both x20 and big query
+DEFAULT_BIGQUERY_DATASET_ID = 'apollo'
+DEFAULT_BIGQUERY_SUMMARY_TABLE = 'bluetooth_pair_connect_summary_v0_2'
+DEFAULT_BIGQUERY_MEASUREMENT_TABLE = 'bluetooth_pair_connect_measurement_v0_2'
+
+# define CSV headers.
+APOLLO_BTSTATUS_CONVERSION = {
+    # Below is from GetConnDevices
+    'btdevs_HFP': 'HFP Pri',
+    'btdevs_A2DP': 'A2DP Pri',
+    'btdevs_RFCOMM_CTRL': 'CTRL',
+    'btdevs_RFCOMM_AUDIO': 'AUDIO',
+    'btdevs_RFCOMM_DEBUG': 'DEBUG',
+    'btdevs_RFCOMM_TRANS': 'TRANS'
+}
+
+
+def get_uuid():
+    """Get a UUID for the test."""
+    return str(uuid.uuid1())
+
+
+class BluetoothDeviceNotFound(Error):
+    pass
+
+
+class BluetoothTestException(Error):
+    pass
+
+
+class BluetoothPairAndConnectTest(BaseTestClass):
+    """Class representing a TestCase object for handling execution of tests."""
+
+    def __init__(self, configs):
+        BaseTestClass.__init__(self, configs)
+        # sanity check of the dut devices.
+        # TODO: is it possible to move this sanity check to a device config validator?
+        if not self.android_devices:
+            raise BluetoothDeviceNotFound(
+                'Cannot find android phone (need at least one).')
+        self.phone = self.android_devices[0]
+
+        if not self.buds_devices:
+            raise BluetoothDeviceNotFound(
+                'Cannot find apollo device (need at least one).')
+        self.apollo = self.buds_devices[0]
+        self.log.info('Successfully found needed devices.')
+
+        # some default values
+        self.result_name = 'Undefined'
+        self.result_path = 'Undefined'
+        self.t_test_start_time = 'Undefined'
+        self.t_test_uuid = get_uuid()
+
+        # Staging the test, create result object, etc.
+        self.apollo_act = ApolloTestActions(self.apollo, self.log)
+        self.dut_bt_addr = self.apollo.bluetooth_address
+        self.iteration = 1
+
+    def setup_test(self):
+        # Get device fw build info for output directory.
+        # TODO: find a better way to put them into a library.
+        retry = 0
+        version = 'Unknown'
+        while retry < 3:
+            try:
+                success, info = self.apollo.get_version()
+                if success and 'Fw Build Label' in info:
+                    version = info['Fw Build Label']
+                    # strip quotation
+                    if version.startswith('"') and version.endswith('"'):
+                        version = version[1:-1]
+                    break
+                else:
+                    retry += 1
+                    time.sleep(1)
+            except DeviceError:
+                self.log.warning(
+                    'Failed to read apollo build label, retrying...')
+        phone_model = self.phone.model
+        phone_os_version = self.phone.adb.getprop('ro.build.version.release')
+        t_test_start_time = time.strftime('%Y_%m_%d-%H_%M_%S')
+        self.t_test_start_time = t_test_start_time
+        result_dir = "wearables_logs"
+        result_path = os.path.join(self.log_path, result_dir)
+        self.log.info('Test result path: %s' % result_path)
+        try:
+            os.makedirs(result_path)
+        except os.error as ex:
+            self.log.warning('Cannot create result log path %s.' % result_path)
+            raise ex
+        self.result_name = result_dir
+        self.result_path = result_path
+
+        # Get the metadata
+        metadata = self.get_metadata_info()
+        # dump metadata to BQ, one record per test
+        #bq.log(DEFAULT_BIGQUERY_DATASET_ID, DEFAULT_BIGQUERY_SUMMARY_TABLE,
+        #       metadata)
+
+        # make sure bluetooth is on
+        enable_bluetooth(self.phone.droid, self.phone.ed)
+        set_location_service(self.phone, True)
+        self.log.info('===== START BLUETOOTH CONNECTION TEST  =====')
+        return True
+
+    def teardown_test(self):
+        self.log.info('Teardown test, shutting down all services...')
+        self.apollo.close()
+        return True
+
+    def test_bluetooth_connect(self):
+        """Main test method."""
+        # for now let's handle all exception here
+        is_success = False
+        try:
+            # Actual test steps:
+            clear_bonded_devices(self.phone)
+            self.apollo_act.factory_reset()
+            time.sleep(5)
+            self.phone.droid.bluetoothDiscoverAndBond(self.dut_bt_addr)
+            is_success = self.apollo_act.wait_for_bluetooth_a2dp_hfp()
+
+            # Done, write results.
+            apollo_res = self.apollo_act.measurement_timer.elapsed()
+            # TODO: Investigate import errors, skip for now
+            #phone_res = self.phone_act.measurement_timer.elapsed()
+            #self._write_results(phone_res, apollo_res)
+
+        # TODO: figure out what exception should be handled, what should be raised.
+        except DeviceError as ex:
+            # Apollo gave us an error. Report and skip to next iteration.
+            # TODO: add recovery/reset code in post test?
+            self.log.warning('Apollo reporting error: %s' % ex)
+        except Error as ex:
+            # should only catch test related exception
+            self.log.warning('Error executing test case: %s' % ex)
+        except Exception as ex:
+            # now we have a problem.
+            self.log.warning('Error executing test case: %s' % ex)
+            self.log.warning('Abort.')
+            #traceback.print_exc()
+        return is_success
+
+    def get_metadata_info(self):
+        metadata = dict()
+        metadata['uuid'] = self.t_test_uuid
+        metadata['start_time'] = self.t_test_start_time
+        # Merge device metadata into master metadata.
+        phone_metadata = self.phone.device_info
+        for key in phone_metadata:
+            metadata['phone_' + key] = phone_metadata[key]
+        apollo_metadata = self.apollo.get_info()
+        for key in apollo_metadata:
+            metadata['apollo_' + key] = apollo_metadata[key]
+        return metadata
+
+    def _write_results(self, phone_res, apollo_res):
+        """Custom logic to parse and save the time measurements.
+
+        Save the measurements to x20 and big query.
+
+        Args:
+          phone_res: time measurement from the phone, should only contain bond time
+          apollo_res: time measurements from Apollo, should contain profile
+                      connection time.
+        """
+        all_cols = []
+        all_vals = []
+        # profile connect time. Add header text conversion here.
+        sorted_header_keys = sorted(APOLLO_BTSTATUS_CONVERSION.keys())
+        for key in sorted_header_keys:
+            # header names in CSV
+            all_cols.append(key)
+            profile_name = APOLLO_BTSTATUS_CONVERSION[key]
+            if profile_name in apollo_res:
+                all_vals.append(apollo_res[profile_name])
+            else:
+                all_vals.append(0)
+
+        # Now get all bond/connect time.
+        all_conn_time = max(all_vals)
+        all_cols.insert(0, 'all_connect')
+        all_vals.insert(0, all_conn_time)
+
+        if 'bond' in phone_res:
+            all_bond_time = phone_res['bond']
+            self.log.info('bond %f' % all_bond_time)
+        else:
+            all_bond_time = 0
+            self.log.warning('Cannot find bond time, set bond time to 0.')
+        all_cols.insert(0, 'all_bond')
+        all_vals.insert(0, all_bond_time)
+
+        all_cols.insert(0, 'Timestamps')
+        all_vals.insert(0, time.strftime('%Y_%m_%d-%H_%M_%S'))
+        all_cols.insert(0, 'Iteration')
+        all_vals.insert(0, self.iteration)
+
+        # Write to BQ
+        res_dict = dict(zip(all_cols, all_vals))
+        res_dict['uuid'] = self.t_test_uuid
+        #bq.log(DEFAULT_BIGQUERY_DATASET_ID, DEFAULT_BIGQUERY_MEASUREMENT_TABLE,
+        #       res_dict)
+
+        # Now write to x20.
+        res_path = os.path.join(self.result_path, 'bt_time_record.csv')
+        # write the header only when creating new file.
+        write_header = False
+        if not os.path.isfile(res_path):
+            write_header = True
+        try:
+            self.log.info('Writing to %s...' % res_path)
+            self.log.info(','.join(all_cols))
+            self.log.info(','.join(str(x) for x in all_vals))
+
+            with open(res_path, 'ab') as file_handle:
+                if write_header:
+                    file_handle.write(','.join(all_cols))
+                    file_handle.write('\n')
+                file_handle.write(','.join(str(x) for x in all_vals))
+                file_handle.write('\n')
+            self.log.info('Result file updated in x20.')
+        except IOError as ex:
+            self.log.warning(ex.message)
+            raise ex
diff --git a/acts/tests/google/wearables/BluetoothReconnectTest.py b/acts/tests/google/wearables/BluetoothReconnectTest.py
new file mode 100644
index 0000000..8c32250
--- /dev/null
+++ b/acts/tests/google/wearables/BluetoothReconnectTest.py
@@ -0,0 +1,98 @@
+#/usr/bin/env python3
+#
+# Copyright (C) 2018 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may not
+# use this file except in compliance with the License. You may obtain a copy of
+# the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations under
+# the License.
+"""Bluetooth disconnect and reconnect verification."""
+# Quick way to get the Apollo serial number:
+# python3.5 -c "from acts.controllers.buds_lib.apollo_lib import get_devices; [print(d['serial_number']) for d in get_devices()]"
+
+import time
+from acts import asserts
+from acts.base_test import BaseTestClass
+from acts.controllers.buds_lib.test_actions.apollo_acts import ApolloTestActions
+from acts.test_utils.bt.bt_test_utils import clear_bonded_devices
+from acts.test_utils.bt.bt_test_utils import disable_bluetooth
+from acts.test_utils.bt.bt_test_utils import enable_bluetooth
+from acts.utils import set_location_service
+
+class BluetoothReconnectTest(BaseTestClass):
+    """Class representing a TestCase object for handling execution of tests."""
+
+    # TODO: add ACTS style metrics logging
+    def __init__(self, configs):
+        BaseTestClass.__init__(self, configs)
+        # sanity check of the dut devices.
+        # TODO(b/119051823): Investigate using a config validator to replace this.
+        if not self.android_devices:
+            raise ValueError(
+                'Cannot find android phone (need at least one).')
+        self.phone = self.android_devices[0]
+
+        if not self.buds_devices:
+            raise ValueError(
+                'Cannot find apollo device (need at least one).')
+        self.apollo = self.buds_devices[0]
+        self.log.info('Successfully found needed devices.')
+
+        # Staging the test, create result object, etc.
+        self.apollo_act = ApolloTestActions(self.apollo, self.log)
+        self.dut_bt_addr = self.apollo.bluetooth_address
+
+    def setup_test(self):
+        # Make sure bluetooth is on
+        enable_bluetooth(self.phone.droid, self.phone.ed)
+        set_location_service(self.phone, True)
+        self.log.info('===== START BLUETOOTH RECONNECT TEST  =====')
+        return True
+
+    def teardown_test(self):
+        self.log.info('Teardown test, shutting down all services...')
+        self.apollo.close()
+        return True
+
+    def test_bluetooth_reconnect_after_android_disconnect(self):
+        """Main test method."""
+        # Make sure devices are paired and connected
+        clear_bonded_devices(self.phone)
+        self.apollo_act.factory_reset()
+
+        # Buffer between reset and pairing
+        time.sleep(5)
+
+        self.phone.droid.bluetoothDiscoverAndBond(self.dut_bt_addr)
+        paired_and_connected = self.apollo_act.wait_for_bluetooth_a2dp_hfp()
+        asserts.assert_true(paired_and_connected,
+                            "Failed to pair and connect devices")
+
+        # Disconnect Bluetooth from the phone side
+        self.log.info("Disabling Bluetooth on phone")
+        bluetooth_disabled = disable_bluetooth(self.phone.droid)
+        asserts.assert_true(bluetooth_disabled,
+                            "Failed to disconnect Bluetooth from phone")
+        self.log.info("Bluetooth disabled on phone")
+
+        # Buffer between disconnect and reconnect
+        time.sleep(5)
+
+        # Reconnect Bluetooth from the phone side
+        self.log.info("Enabling Bluetooth on phone")
+        bluetooth_enabled = enable_bluetooth(self.phone.droid, self.phone.ed)
+        asserts.assert_true(bluetooth_enabled,
+                            "Failed to reconnect Bluetooth from phone")
+        self.log.info("Bluetooth enabled on phone")
+
+        # Verify that the devices have reconnected
+        devices_reconnected = self.apollo_act.wait_for_bluetooth_a2dp_hfp()
+        asserts.assert_true(devices_reconnected,
+                            "Bluetooth profiles failed to reconnect")
\ No newline at end of file
diff --git a/acts/tests/google/wifi/OWNERS b/acts/tests/google/wifi/OWNERS
new file mode 100644
index 0000000..7e868cf
--- /dev/null
+++ b/acts/tests/google/wifi/OWNERS
@@ -0,0 +1,6 @@
+bmahadev@google.com
+etancohen@google.com
+krisr@google.com
+mplass@google.com
+rpius@google.com
+satk@google.com
diff --git a/acts/tests/google/wifi/WifiAutoUpdateTest.py b/acts/tests/google/wifi/WifiAutoUpdateTest.py
index f9e5caa..04fb850 100755
--- a/acts/tests/google/wifi/WifiAutoUpdateTest.py
+++ b/acts/tests/google/wifi/WifiAutoUpdateTest.py
@@ -81,12 +81,13 @@
 
         self.wifi_config_list = []
 
+        # Disabling WiFi setup before OTA for debugging.
         # Setup WiFi and add few open and wpa networks before OTA.
-        self.add_network_and_enable(self.open_network[0]['2g'])
-        self.add_network_and_enable(self.reference_networks[0]['5g'])
+        # self.add_network_and_enable(self.open_network[0]['2g'])
+        # self.add_network_and_enable(self.reference_networks[0]['5g'])
 
         # Add few dummy networks to the list.
-        self.add_and_enable_dummy_networks()
+        # self.add_and_enable_dummy_networks()
 
         # Run OTA below, if ota fails then abort all tests.
         try:
diff --git a/acts/tests/google/wifi/WifiPasspointTest.py b/acts/tests/google/wifi/WifiPasspointTest.py
index b8be0b2..2466260 100755
--- a/acts/tests/google/wifi/WifiPasspointTest.py
+++ b/acts/tests/google/wifi/WifiPasspointTest.py
@@ -27,13 +27,18 @@
 from acts import asserts
 from acts import signals
 from acts.test_decorators import test_tracker_info
+from acts.test_utils.tel.tel_test_utils import get_operator_name
 from acts.utils import force_airplane_mode
 
 WifiEnums = wutils.WifiEnums
 
 DEFAULT_TIMEOUT = 10
+OSU_TEST_TIMEOUT = 300
 GLOBAL_RE = 0
 BOINGO = 1
+ATT = 2
+
+OSU_BOINGO = 0
 UNKNOWN_FQDN = "@#@@!00fffffx"
 
 class WifiPasspointTest(acts.base_test.BaseTestClass):
@@ -189,11 +194,12 @@
         6. Ensure all Passpoint configurations can be deleted.
 
         """
-        for passpoint_config in self.passpoint_networks:
+        for passpoint_config in self.passpoint_networks[:2]:
             self.install_passpoint_profile(passpoint_config)
             time.sleep(DEFAULT_TIMEOUT)
         configs = self.dut.droid.getPasspointConfigs()
-        if not len(configs) or len(configs) != len(self.passpoint_networks):
+        #  It is length -1 because ATT profile will be handled separately
+        if not len(configs) or len(configs) != len(self.passpoint_networks[:2]):
             raise signals.TestFailure("Failed to fetch some or all of the"
                                       " configured passpoint networks.")
         for config in configs:
@@ -230,7 +236,7 @@
         """
         # Install both Passpoint profiles on the device.
         passpoint_ssid = list()
-        for passpoint_config in self.passpoint_networks:
+        for passpoint_config in self.passpoint_networks[:2]:
             passpoint_ssid.append(passpoint_config[WifiEnums.SSID_KEY])
             self.install_passpoint_profile(passpoint_config)
             time.sleep(DEFAULT_TIMEOUT)
@@ -248,7 +254,7 @@
             expected_ssid = self.passpoint_networks[1][WifiEnums.SSID_KEY]
 
         # Remove the current Passpoint profile.
-        for network in self.passpoint_networks:
+        for network in self.passpoint_networks[:2]:
             if network[WifiEnums.SSID_KEY] == current_ssid:
                 if not wutils.delete_passpoint(self.dut, network["fqdn"]):
                     raise signals.TestFailure("Failed to delete Passpoint"
@@ -265,3 +271,73 @@
         # Delete the remaining Passpoint profile.
         self.get_configured_passpoint_and_delete()
         wutils.wait_for_disconnect(self.dut)
+
+    @test_tracker_info(uuid="e3e826d2-7c39-4c37-ab3f-81992d5aa0e8")
+    def test_att_passpoint_network(self):
+        """Add a AT&T Passpoint network and verify device connects to it.
+
+        Steps:
+            1. Install a AT&T Passpoint Profile.
+            2. Verify the device connects to the required Passpoint SSID.
+            3. Get the Passpoint configuration added above.
+            4. Delete Passpoint configuration using its FQDN.
+            5. Verify that we are disconnected from the Passpoint network.
+
+        """
+        carriers = ["att"]
+        operator = get_operator_name(self.log, self.dut)
+        asserts.skip_if(operator not in carriers,
+                        "Device %s does not have a ATT sim" % self.dut.model)
+
+        passpoint_config = self.passpoint_networks[ATT]
+        self.install_passpoint_profile(passpoint_config)
+        ssid = passpoint_config[WifiEnums.SSID_KEY]
+        self.check_passpoint_connection(ssid)
+        self.get_configured_passpoint_and_delete()
+        wutils.wait_for_disconnect(self.dut)
+
+    def test_start_subscription_provisioning(self):
+        """Start subscription provisioning with a default provider."""
+
+        self.unpack_userparams(('osu_configs',))
+        asserts.assert_true(
+            len(self.osu_configs) > 0,
+            "Need at least one osu config.")
+        osu_config = self.osu_configs[OSU_BOINGO]
+        # Clear all previous events.
+        self.dut.ed.clear_all_events()
+        self.dut.droid.startSubscriptionProvisioning(osu_config)
+        start_time = time.time()
+        while time.time() < start_time + OSU_TEST_TIMEOUT:
+            dut_event = self.dut.ed.pop_event("onProvisioningCallback",
+                                              DEFAULT_TIMEOUT * 18)
+            if dut_event['data']['tag'] == 'success':
+                self.log.info("Passpoint Provisioning Success")
+                break
+            if dut_event['data']['tag'] == 'failure':
+                raise signals.TestFailure(
+                    "Passpoint Provisioning is failed with %s" %
+                    dut_event['data'][
+                        'reason'])
+                break
+            if dut_event['data']['tag'] == 'status':
+                self.log.info(
+                    "Passpoint Provisioning status %s" % dut_event['data'][
+                        'status'])
+
+        # Clear all previous events.
+        self.dut.ed.clear_all_events()
+
+        # Verify device connects to the Passpoint network.
+        time.sleep(DEFAULT_TIMEOUT)
+
+        current_passpoint = self.dut.droid.wifiGetConnectionInfo()
+        if current_passpoint[WifiEnums.SSID_KEY] not in osu_config[
+            "expected_ssids"]:
+            raise signals.TestFailure("Device did not connect to the %s"
+                                      " passpoint network" % osu_config[
+                                          "expected_ssids"])
+
+        # Delete the Passpoint profile.
+        self.get_configured_passpoint_and_delete()
+        wutils.wait_for_disconnect(self.dut)
diff --git a/acts/tests/google/wifi/WifiSoftApTest.py b/acts/tests/google/wifi/WifiSoftApTest.py
index 2b5a495..f3ed3e3 100644
--- a/acts/tests/google/wifi/WifiSoftApTest.py
+++ b/acts/tests/google/wifi/WifiSoftApTest.py
@@ -43,7 +43,7 @@
         """
         self.dut = self.android_devices[0]
         self.dut_client = self.android_devices[1]
-        req_params = []
+        req_params = ["dbs_supported_models"]
         opt_param = ["open_network"]
         self.unpack_userparams(
             req_param_names=req_params, opt_param_names=opt_param)
@@ -69,6 +69,15 @@
         asserts.assert_equal(self.dut_client.droid.wifiGetVerboseLoggingLevel(), 1,
             "Failed to enable WiFi verbose logging on the client dut.")
         wutils.wifi_toggle_state(self.dut_client, True)
+        if len(self.android_devices) > 2:
+            utils.sync_device_time(self.android_devices[2])
+            self.android_devices[2].droid.wifiSetCountryCode(wutils.WifiEnums.CountryCode.US)
+            self.android_devices[2].droid.wifiEnableVerboseLogging(1)
+            asserts.assert_equal(self.android_devices[2].droid.wifiGetVerboseLoggingLevel(), 1,
+                "Failed to enable WiFi verbose logging on the client dut.")
+        # Disable wifi for dbs supported models
+        if self.dut.model in self.dbs_supported_models:
+            wutils.wifi_toggle_state(self.dut, False)
 
     def teardown_class(self):
         wutils.stop_wifi_tethering(self.dut)
@@ -78,6 +87,10 @@
             del self.user_params["reference_networks"]
             del self.user_params["open_network"]
 
+    def teardown_test(self):
+        if self.dut.droid.wifiIsApEnabled():
+            wutils.stop_wifi_tethering(self.dut)
+
     def on_fail(self, test_name, begin_time):
         self.dut.take_bug_report(test_name, begin_time)
         self.dut_client.take_bug_report(test_name, begin_time)
@@ -151,7 +164,7 @@
                                 "Failed to enable cell data for softap dut.")
 
     def validate_full_tether_startup(self, band=None, hidden=None,
-                                     test_clients=None):
+                                     test_ping=False, test_clients=None):
         """Test full startup of wifi tethering
 
         1. Report current state.
@@ -181,8 +194,13 @@
             asserts.assert_true(ret != -1, "Add network %r failed" % config)
             self.dut_client.droid.wifiEnableNetwork(ret, 0)
         self.confirm_softap_in_scan_results(config[wutils.WifiEnums.SSID_KEY])
+        if test_ping:
+            self.validate_ping_between_softap_and_client(config)
         if test_clients:
-            self.validate_traffic_between_softap_clients(config)
+            if hasattr(self, 'arduino_wifi_dongles'):
+                self.validate_traffic_between_softap_clients(config)
+            if len(self.android_devices) > 2:
+                self.validate_ping_between_two_clients(config)
         wutils.stop_wifi_tethering(self.dut)
         asserts.assert_false(self.dut.droid.wifiIsApEnabled(),
                              "SoftAp is still reported as running")
@@ -191,6 +209,61 @@
         elif self.dut.droid.wifiCheckState():
             asserts.fail("Wifi was disabled before softap and now it is enabled")
 
+    def validate_ping_between_softap_and_client(self, config):
+        """Test ping between softap and its client.
+
+        Connect one android device to the wifi hotspot.
+        Verify they can ping each other.
+
+        Args:
+            config: wifi network config with SSID, password
+        """
+        wutils.wifi_connect(self.dut_client, config, check_connectivity=False)
+
+        dut_ip = self.dut.droid.connectivityGetIPv4Addresses("wlan0")[0]
+        dut_client_ip = self.dut_client.droid.connectivityGetIPv4Addresses("wlan0")[0]
+
+        self.dut.log.info("Try to ping %s" % dut_client_ip)
+        asserts.assert_true(
+            utils.adb_shell_ping(self.dut, count=10, dest_ip=dut_client_ip, timeout=20),
+            "%s ping %s failed" % (self.dut.serial, dut_client_ip))
+
+        self.dut_client.log.info("Try to ping %s" % dut_ip)
+        asserts.assert_true(
+            utils.adb_shell_ping(self.dut_client, count=10, dest_ip=dut_ip, timeout=20),
+            "%s ping %s failed" % (self.dut_client.serial, dut_ip))
+
+        wutils.stop_wifi_tethering(self.dut)
+
+    def validate_ping_between_two_clients(self, config):
+        """Test ping between softap's clients.
+
+        Connect two android device to the wifi hotspot.
+        Verify the clients can ping each other.
+
+        Args:
+            config: wifi network config with SSID, password
+        """
+        # Connect DUT to Network
+        ad1 = self.dut_client
+        ad2 = self.android_devices[2]
+
+        wutils.wifi_connect(ad1, config, check_connectivity=False)
+        wutils.wifi_connect(ad2, config, check_connectivity=False)
+        ad1_ip = ad1.droid.connectivityGetIPv4Addresses('wlan0')[0]
+        ad2_ip = ad2.droid.connectivityGetIPv4Addresses('wlan0')[0]
+
+        # Ping each other
+        ad1.log.info("Try to ping %s" % ad2_ip)
+        asserts.assert_true(
+            utils.adb_shell_ping(ad1, count=10, dest_ip=ad2_ip, timeout=20),
+            "%s ping %s failed" % (ad1.serial, ad2_ip))
+
+        ad2.log.info("Try to ping %s" % ad1_ip)
+        asserts.assert_true(
+            utils.adb_shell_ping(ad2, count=10, dest_ip=ad1_ip, timeout=20),
+            "%s ping %s failed" % (ad2.serial, ad1_ip))
+
     """ Tests Begin """
 
     @test_tracker_info(uuid="495f1252-e440-461c-87a7-2c45f369e129")
@@ -321,6 +394,27 @@
         wutils.stop_wifi_tethering(self.dut)
         wutils.wait_for_disconnect(self.dut_client)
 
+    @test_tracker_info(uuid="")
+    def test_full_tether_startup_2G_with_airplane_mode_on(self):
+        """Test full startup of wifi tethering in 2G band with
+        airplane mode on.
+
+        1. Turn on airplane mode.
+        2. Report current state.
+        3. Switch to AP mode.
+        4. verify SoftAP active.
+        5. Shutdown wifi tethering.
+        6. verify back to previous mode.
+        7. Turn off airplane mode.
+        """
+        self.dut.log.debug("Toggling Airplane mode ON.")
+        asserts.assert_true(utils.force_airplane_mode(self.dut, True),
+                            "Can not turn on airplane mode: %s" % self.dut.serial)
+        self.validate_full_tether_startup(WIFI_CONFIG_APBAND_2G)
+        self.dut.log.debug("Toggling Airplane mode OFF.")
+        asserts.assert_true(utils.force_airplane_mode(self.dut, False),
+                            "Can not turn off airplane mode: %s" % self.dut.serial)
+
     @test_tracker_info(uuid="05c6f929-7754-477f-a9cd-f77e850b818b")
     def test_full_tether_startup_2G_multiple_clients(self):
         """Test full startup of wifi tethering in 2G band, connect clients
@@ -339,6 +433,52 @@
         self.validate_full_tether_startup(WIFI_CONFIG_APBAND_2G,
                                           test_clients=True)
 
+    @test_tracker_info(uuid="883dd5b1-50c6-4958-a50f-bb4bea77ccaf")
+    def test_full_tether_startup_2G_one_client_ping_softap(self):
+        """(AP) 1 Device can connect to 2G hotspot
+
+        Steps:
+        1. Turn on DUT's 2G softap
+        2. Client connects to the softap
+        3. Client and DUT ping each other
+        """
+        self.validate_full_tether_startup(WIFI_CONFIG_APBAND_2G, test_ping=True)
+
+    @test_tracker_info(uuid="6604e848-99d6-422c-9fdc-2882642438b6")
+    def test_full_tether_startup_5G_one_client_ping_softap(self):
+        """(AP) 1 Device can connect to 5G hotspot
+
+        Steps:
+        1. Turn on DUT's 5G softap
+        2. Client connects to the softap
+        3. Client and DUT ping each other
+        """
+        self.validate_full_tether_startup(WIFI_CONFIG_APBAND_5G, test_ping=True)
+
+    @test_tracker_info(uuid="17725ecd-f900-4cf7-8b2d-d7515b0a595c")
+    def test_softap_2G_two_clients_ping_each_other(self):
+        """Test for 2G hotspot with 2 clients
+
+        1. Turn on 2G hotspot
+        2. Two clients connect to the hotspot
+        3. Two clients ping each other
+        """
+        asserts.skip_if(len(self.android_devices) < 3,
+                        "No extra android devices. Skip test")
+        self.validate_full_tether_startup(WIFI_CONFIG_APBAND_2G, test_clients=True)
+
+    @test_tracker_info(uuid="98c09888-1021-4f79-9065-b3cf9b132146")
+    def test_softap_5G_two_clients_ping_each_other(self):
+        """Test for 5G hotspot with 2 clients
+
+        1. Turn on 5G hotspot
+        2. Two clients connect to the hotspot
+        3. Two clients ping each other
+        """
+        asserts.skip_if(len(self.android_devices) < 3,
+                        "No extra android devices. Skip test")
+        self.validate_full_tether_startup(WIFI_CONFIG_APBAND_5G, test_clients=True)
+
     """ Tests End """
 
 
diff --git a/acts/tests/google/wifi/WifiStressTest.py b/acts/tests/google/wifi/WifiStressTest.py
index 0157c8c..1ac785b 100755
--- a/acts/tests/google/wifi/WifiStressTest.py
+++ b/acts/tests/google/wifi/WifiStressTest.py
@@ -25,6 +25,8 @@
 from acts import signals
 from acts import utils
 from acts.test_decorators import test_tracker_info
+from acts.test_utils.bt.bt_test_utils import enable_bluetooth
+from acts.test_utils.bt.bt_test_utils import disable_bluetooth
 from acts.test_utils.wifi.WifiBaseTest import WifiBaseTest
 WifiEnums = wutils.WifiEnums
 
@@ -53,7 +55,7 @@
         req_params = []
         opt_param = [
             "open_network", "reference_networks", "iperf_server_address",
-            "stress_count", "stress_hours"]
+            "stress_count", "stress_hours", "attn_vals", "pno_interval"]
         self.unpack_userparams(
             req_param_names=req_params, opt_param_names=opt_param)
 
@@ -78,6 +80,8 @@
         self.dut.droid.wakeUpNow()
 
     def teardown_test(self):
+        if self.dut.droid.wifiIsApEnabled():
+            wutils.stop_wifi_tethering(self.dut)
         self.dut.droid.wakeLockRelease()
         self.dut.droid.goToSleepNow()
         wutils.reset_wifi(self.dut)
@@ -159,6 +163,90 @@
             raise signals.TestFailure("Youtube video did not start. Current WiFi "
                 "state is %d" % self.dut.droid.wifiCheckState())
 
+    def add_networks(self, ad, networks):
+        """Add Wi-Fi networks to an Android device and verify the networks were
+        added correctly.
+
+        Args:
+            ad: the AndroidDevice object to add networks to.
+            networks: a list of dicts, each dict represents a Wi-Fi network.
+        """
+        for network in networks:
+            ret = ad.droid.wifiAddNetwork(network)
+            asserts.assert_true(ret != -1, "Failed to add network %s" %
+                                network)
+            ad.droid.wifiEnableNetwork(ret, 0)
+        configured_networks = ad.droid.wifiGetConfiguredNetworks()
+        self.log.debug("Configured networks: %s", configured_networks)
+
+    def connect_and_verify_connected_bssid(self, expected_bssid):
+        """Start a scan to get the DUT connected to an AP and verify the DUT
+        is connected to the correct BSSID.
+
+        Args:
+            expected_bssid: Network bssid to which connection.
+
+        Returns:
+            True if connection to given network happen, else return False.
+        """
+        #force start a single scan so we don't have to wait for the
+        #WCM scheduled scan.
+        wutils.start_wifi_connection_scan(self.dut)
+        #wait for connection
+        time.sleep(20)
+        #verify connection
+        actual_network = self.dut.droid.wifiGetConnectionInfo()
+        self.log.info("Actual network: %s", actual_network)
+        try:
+            asserts.assert_equal(expected_bssid,
+                                 actual_network[WifiEnums.BSSID_KEY])
+        except:
+           msg = "Device did not connect to any network."
+           raise signals.TestFailure(msg)
+
+    def set_attns(self, attn_val_name):
+        """Sets attenuation values on attenuators used in this test.
+
+        Args:
+            attn_val_name: Name of the attenuation value pair to use.
+        """
+        self.log.info("Set attenuation values to %s", self.attn_vals[attn_val_name])
+        try:
+            self.attenuators[0].set_atten(self.attn_vals[attn_val_name][0])
+            self.attenuators[1].set_atten(self.attn_vals[attn_val_name][1])
+            self.attenuators[2].set_atten(95)
+            self.attenuators[3].set_atten(95)
+        except:
+            self.log.error("Failed to set attenuation values %s.", attn_val_name)
+            raise
+
+    def trigger_pno_and_assert_connect(self, attn_val_name, expected_con):
+        """Sets attenuators to disconnect current connection to trigger PNO.
+        Validate that the DUT connected to the new SSID as expected after PNO.
+
+        Args:
+            attn_val_name: Name of the attenuation value pair to use.
+            expected_con: The expected info of the network to we expect the DUT
+                to roam to.
+        """
+        connection_info = self.dut.droid.wifiGetConnectionInfo()
+        self.log.info("Triggering PNO connect from %s to %s",
+                      connection_info[WifiEnums.SSID_KEY],
+                      expected_con[WifiEnums.SSID_KEY])
+        self.set_attns(attn_val_name)
+        self.log.info("Wait %ss for PNO to trigger.", self.pno_interval)
+        time.sleep(self.pno_interval)
+        try:
+            self.log.info("Connected to %s network after PNO interval"
+                          % self.dut.droid.wifiGetConnectionInfo())
+            expected_ssid = expected_con[WifiEnums.SSID_KEY]
+            verify_con = {WifiEnums.SSID_KEY: expected_ssid}
+            wutils.verify_wifi_connection_info(self.dut, verify_con)
+            self.log.info("Connected to %s successfully after PNO",
+                          expected_ssid)
+        finally:
+            pass
+
     """Tests"""
 
     @test_tracker_info(uuid="cd0016c6-58cf-4361-b551-821c0b8d2554")
@@ -181,6 +269,28 @@
         raise signals.TestPass(details="", extras={"Iterations":"%d" %
             self.stress_count, "Pass":"%d" %(count+1)})
 
+    @test_tracker_info(uuid="4e591cec-9251-4d52-bc6e-6621507524dc")
+    def test_stress_toggle_wifi_state_bluetooth_on(self):
+        """Toggle WiFi state ON and OFF for N times when bluetooth ON."""
+        enable_bluetooth(self.dut.droid, self.dut.ed)
+        for count in range(self.stress_count):
+            """Test toggling wifi"""
+            try:
+                self.log.debug("Going from on to off.")
+                wutils.wifi_toggle_state(self.dut, False)
+                self.log.debug("Going from off to on.")
+                startTime = time.time()
+                wutils.wifi_toggle_state(self.dut, True)
+                startup_time = time.time() - startTime
+                self.log.debug("WiFi was enabled on the device in %s s." %
+                    startup_time)
+            except:
+                signals.TestFailure(details="", extras={"Iterations":"%d" %
+                    self.stress_count, "Pass":"%d" %count})
+        disable_bluetooth(self.dut.droid)
+        raise signals.TestPass(details="", extras={"Iterations":"%d" %
+            self.stress_count, "Pass":"%d" %(count+1)})
+
     @test_tracker_info(uuid="49e3916a-9580-4bf7-a60d-a0f2545dcdde")
     def test_stress_connect_traffic_disconnect_5g(self):
         """Test to connect and disconnect from a network for N times.
@@ -301,6 +411,7 @@
                 ret = self.dut.droid.wifiAddNetwork(network)
                 asserts.assert_true(ret != -1, "Add network %r failed" % network)
                 self.dut.droid.wifiEnableNetwork(ret, 0)
+            self.dut.droid.wifiStartScan()
             time.sleep(WAIT_FOR_AUTO_CONNECT)
             cur_network = self.dut.droid.wifiGetConnectionInfo()
             cur_ssid = cur_network[WifiEnums.SSID_KEY]
@@ -346,6 +457,9 @@
         self.dut.log.info("softap setup: %s %s", ap_ssid, ap_password)
         config = {wutils.WifiEnums.SSID_KEY: ap_ssid}
         config[wutils.WifiEnums.PWD_KEY] = ap_password
+        # Set country code explicitly to "US".
+        self.dut.droid.wifiSetCountryCode(wutils.WifiEnums.CountryCode.US)
+        self.dut_client.droid.wifiSetCountryCode(wutils.WifiEnums.CountryCode.US)
         for count in range(self.stress_count):
             initial_wifi_state = self.dut.droid.wifiCheckState()
             wutils.start_wifi_tethering(self.dut,
@@ -392,3 +506,48 @@
         raise signals.TestPass(details="", extras={"Iterations":"%d" %
             self.stress_count, "Pass":"%d" %((count+1)*2)})
 
+    @test_tracker_info(uuid="e8ae8cd2-c315-4c08-9eb3-83db65b78a58")
+    def test_stress_network_selector_2G_connection(self):
+        """
+            1. Add one saved 2G network to DUT.
+            2. Move the DUT in range.
+            3. Verify the DUT is connected to the network.
+            4. Move the DUT out of range
+            5. Repeat step 2-4
+        """
+        for attenuator in self.attenuators:
+            attenuator.set_atten(95)
+
+        # add a saved network to DUT
+        networks = [self.reference_networks[0]['2g']]
+        self.add_networks(self.dut, networks)
+
+        for count in range(self.stress_count):
+            # move the DUT in range
+            self.attenuators[0].set_atten(0)
+            # verify
+            self.connect_and_verify_connected_bssid(self.reference_networks[0]['2g']['bssid'])
+            # move the DUT out of range
+            self.attenuators[0].set_atten(95)
+        raise signals.TestPass(details="", extras={"Iterations":"%d" %
+            self.stress_count, "Pass":"%d" %(count+1)})
+
+    @test_tracker_info(uuid="5d5d14cb-3cd1-4b3d-8c04-0d6f4b764b6b")
+    def test_stress_pno_connection_to_2g(self):
+        """Test PNO triggered autoconnect to a network for N times
+
+        Steps:
+        1. Save 2Ghz valid network configuration in the device.
+        2. Attenuate 5Ghz network and wait for a few seconds to trigger PNO.
+        3. Check the device connected to 2Ghz network automatically.
+        4. Repeat step 2-3
+        """
+        networks = [self.reference_networks[0]['2g']]
+        self.add_networks(self.dut, networks)
+        for count in range(self.stress_count):
+            self.trigger_pno_and_assert_connect("a_on_b_off", self.reference_networks[0]['2g'])
+            self.set_attns("b_on_a_off")
+            time.sleep(10)
+        wutils.set_attns(self.attenuators, "default")
+        raise signals.TestPass(details="", extras={"Iterations":"%d" %
+            self.stress_count, "Pass":"%d" %(count+1)})
diff --git a/acts/tests/google/wifi/WifiWakeTest.py b/acts/tests/google/wifi/WifiWakeTest.py
new file mode 100644
index 0000000..9327597
--- /dev/null
+++ b/acts/tests/google/wifi/WifiWakeTest.py
@@ -0,0 +1,321 @@
+#!/usr/bin/env python3.4
+#
+#   Copyright 2018 - The Android Open Source Project
+#
+#   Licensed under the Apache License, Version 2.0 (the "License");
+#   you may not use this file except in compliance with the License.
+#   You may obtain a copy of the License at
+#
+#       http://www.apache.org/licenses/LICENSE-2.0
+#
+#   Unless required by applicable law or agreed to in writing, software
+#   distributed under the License is distributed on an "AS IS" BASIS,
+#   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#   See the License for the specific language governing permissions and
+#   limitations under the License.
+
+import time
+import queue
+
+from acts import asserts
+from acts.test_utils.wifi.WifiBaseTest import WifiBaseTest
+import acts.test_utils.wifi.wifi_test_utils as wutils
+import acts.utils
+
+CONSECUTIVE_MISSED_SCANS_REQUIRED_TO_EVICT = 5
+LAST_DISCONNECT_TIMEOUT_MILLIS = 5000
+LAST_DISCONNECT_TIMEOUT_SEC = LAST_DISCONNECT_TIMEOUT_MILLIS / 1000
+PRESCAN_DELAY_SEC = 5
+
+
+class WifiWakeTest(WifiBaseTest):
+    """
+    Tests Wifi Wake.
+
+    Test Bed Requirements:
+    * One Android Device
+    * Two APs that can be turned on and off
+    """
+
+    def __init__(self, controllers):
+        super().__init__(controllers)
+
+    def setup_class(self):
+        self.dut = self.android_devices[0]
+        wutils.wifi_test_device_init(self.dut)
+        # turn location back on
+        acts.utils.set_location_service(self.dut, True)
+        self.dut.droid.wifiScannerToggleAlwaysAvailable(True)
+
+        self.unpack_userparams(req_param_names=[],
+                               opt_param_names=["reference_networks"])
+
+        if "AccessPoint" in self.user_params:
+            self.legacy_configure_ap_and_start(mirror_ap=False, ap_count=2)
+
+        # use 2G since Wifi Wake does not work if an AP is on a 5G DFS channel
+        self.ap_a = self.reference_networks[0]["2g"]
+        self.ap_b = self.reference_networks[1]["2g"]
+
+        self.ap_a_atten = self.attenuators[0]
+        self.ap_b_atten = self.attenuators[2]
+
+    # TODO(b/119040540): this method of disabling/re-enabling Wifi on APs is
+    # hacky, switch to using public methods when they are implemented
+    def ap_a_off(self):
+        ap_a_hostapd = self.access_points[0]._aps['wlan0'].hostapd
+        if ap_a_hostapd.is_alive():
+            ap_a_hostapd.stop()
+            self.log.info('Turned AP A off')
+
+    def ap_a_on(self):
+        ap_a_hostapd = self.access_points[0]._aps['wlan0'].hostapd
+        if not ap_a_hostapd.is_alive():
+            ap_a_hostapd.start(ap_a_hostapd.config)
+            self.log.info('Turned AP A on')
+
+    def ap_b_off(self):
+        ap_b_hostapd = self.access_points[1]._aps['wlan0'].hostapd
+        if ap_b_hostapd.is_alive():
+            ap_b_hostapd.stop()
+            self.log.info('Turned AP B off')
+
+    def ap_b_on(self):
+        ap_b_hostapd = self.access_points[1]._aps['wlan0'].hostapd
+        if not ap_b_hostapd.is_alive():
+            ap_b_hostapd.start(ap_b_hostapd.config)
+            self.log.info('Turned AP B on')
+
+    def setup_test(self):
+        self.dut.droid.wakeLockAcquireBright()
+        self.dut.droid.wakeUpNow()
+        self.ap_a_on()
+        self.ap_b_on()
+        self.ap_a_atten.set_atten(0)
+        self.ap_b_atten.set_atten(0)
+        wutils.reset_wifi(self.dut)
+        wutils.wifi_toggle_state(self.dut, new_state=True)
+        # clear events from event dispatcher
+        self.dut.droid.wifiStartTrackingStateChange()
+        self.dut.droid.wifiStopTrackingStateChange()
+        self.dut.ed.clear_all_events()
+
+    def teardown_test(self):
+        self.dut.droid.wakeLockRelease()
+        self.dut.droid.goToSleepNow()
+
+    def on_fail(self, test_name, begin_time):
+        self.dut.take_bug_report(test_name, begin_time)
+        self.dut.cat_adb_log(test_name, begin_time)
+
+    def do_location_scan(self, num_times=1):
+        scan_settings = {
+            "band": wutils.WifiEnums.WIFI_BAND_BOTH,
+            "periodInMs": 0,
+            "reportEvents": wutils.WifiEnums.REPORT_EVENT_AFTER_EACH_SCAN
+        }
+
+        wifi_chs = wutils.WifiChannelUS(self.dut.model)
+        stime_channel = 47  # dwell time plus 2ms
+        leeway = 10
+
+        for i in range(num_times):
+            self.log.info("Scan count: {}".format(i))
+            data = wutils.start_wifi_single_scan(self.dut, scan_settings)
+            idx = data["Index"]
+            scan_rt = data["ScanElapsedRealtime"]
+            self.log.debug(
+                "Wifi single shot scan started index: %s at real time: %s", idx,
+                scan_rt)
+            # generating event wait time from scan setting plus leeway
+            scan_time, scan_channels = wutils.get_scan_time_and_channels(
+                wifi_chs, scan_settings, stime_channel)
+            wait_time = int(scan_time / 1000) + leeway
+            # track number of result received
+            result_received = 0
+            try:
+                for _ in range(1, 3):
+                    event_name = "{}{}onResults".format("WifiScannerScan", idx)
+                    self.log.debug("Waiting for event: %s for time %s",
+                                   event_name, wait_time)
+                    event = self.dut.ed.pop_event(event_name, wait_time)
+                    self.log.debug("Event received: %s", event)
+                    result_received += 1
+            except queue.Empty as error:
+                asserts.assert_true(
+                    result_received >= 1,
+                    "Event did not triggered for single shot {}".format(error))
+            finally:
+                self.dut.droid.wifiScannerStopScan(idx)
+                # For single shot number of result received and length of result
+                # should be one
+                asserts.assert_true(
+                    result_received == 1,
+                    "Test fail because received result {}".format(
+                        result_received))
+
+    def test_no_reconnect_manual_disable_wifi(self):
+        """
+        Tests that Wifi Wake does not reconnect to a network if the user turned
+        off Wifi while connected to that network and the user has not moved
+        (i.e. moved out of range of the AP then came back).
+        """
+        wutils.wifi_connect(self.dut, self.ap_a, num_of_tries=5)
+        wutils.wifi_toggle_state(self.dut, new_state=False)
+        time.sleep(PRESCAN_DELAY_SEC)
+        self.do_location_scan(
+            2 * CONSECUTIVE_MISSED_SCANS_REQUIRED_TO_EVICT + 2)
+        asserts.assert_false(
+            self.dut.droid.wifiCheckState(),
+            "Expect Wifi Wake to not enable Wifi, but Wifi was enabled.")
+
+    def test_reconnect_wifi_saved_network(self):
+        """Tests that Wifi Wake re-enables Wifi for a saved network."""
+        wutils.wifi_connect(self.dut, self.ap_a, num_of_tries=5)
+        wutils.wifi_connect(self.dut, self.ap_b, num_of_tries=5)
+        self.dut.ed.clear_all_events()
+        self.ap_a_off()
+        self.ap_b_off()
+        wutils.wait_for_disconnect(self.dut)
+        self.log.info("Wifi Disconnected")
+        time.sleep(LAST_DISCONNECT_TIMEOUT_SEC * 1.2)
+        wutils.wifi_toggle_state(self.dut, new_state=False)
+        time.sleep(PRESCAN_DELAY_SEC)
+        self.do_location_scan(CONSECUTIVE_MISSED_SCANS_REQUIRED_TO_EVICT + 2)
+
+        self.ap_a_on()
+        self.do_location_scan()
+        asserts.assert_true(
+            self.dut.droid.wifiCheckState(),
+            "Expect Wifi Wake to enable Wifi, but Wifi is disabled.")
+
+    def test_reconnect_wifi_move_back_in_range(self):
+        """
+        Tests that Wifi Wake re-enables Wifi if the device moves out of range of
+        the AP then came back.
+        """
+        wutils.wifi_connect(self.dut, self.ap_a, num_of_tries=5)
+        wutils.wifi_toggle_state(self.dut, new_state=False)
+        time.sleep(PRESCAN_DELAY_SEC)
+        # init Wakeup Lock with AP A
+        self.do_location_scan(CONSECUTIVE_MISSED_SCANS_REQUIRED_TO_EVICT + 2)
+        self.ap_a_off()
+        # evict AP A from Wakeup Lock
+        self.do_location_scan(CONSECUTIVE_MISSED_SCANS_REQUIRED_TO_EVICT + 2)
+        self.ap_a_on()
+        self.do_location_scan()
+        asserts.assert_true(
+            self.dut.droid.wifiCheckState(),
+            "Expect Wifi Wake to enable Wifi, but Wifi is disabled.")
+
+    def test_no_reconnect_to_flaky_ap(self):
+        """
+        Tests that Wifi Wake does not reconnect to flaky networks.
+        If a network sporadically connects and disconnects, and the user turns
+        off Wifi even during the disconnected phase, Wifi Wake should not
+        re-enable Wifi for that network.
+        """
+        wutils.wifi_connect(self.dut, self.ap_a, num_of_tries=5)
+        self.ap_a_off()
+        time.sleep(LAST_DISCONNECT_TIMEOUT_SEC * 0.4)
+        wutils.wifi_toggle_state(self.dut, new_state=False)
+        time.sleep(PRESCAN_DELAY_SEC)
+        self.do_location_scan(CONSECUTIVE_MISSED_SCANS_REQUIRED_TO_EVICT + 2)
+        self.ap_a_on()
+        self.do_location_scan(
+            2 * CONSECUTIVE_MISSED_SCANS_REQUIRED_TO_EVICT + 2)
+        asserts.assert_false(
+            self.dut.droid.wifiCheckState(),
+            "Expect Wifi Wake to not enable Wifi, but Wifi was enabled.")
+
+    def test_reconnect_wifi_disabled_after_disconnecting(self):
+        """
+        Tests that Wifi Wake reconnects to a network if Wifi was disabled long
+        after disconnecting from a network.
+        """
+        wutils.wifi_connect(self.dut, self.ap_a, num_of_tries=5)
+        self.dut.ed.clear_all_events()
+        self.ap_a_off()
+        wutils.wait_for_disconnect(self.dut)
+        self.log.info("Wifi Disconnected")
+        time.sleep(LAST_DISCONNECT_TIMEOUT_SEC * 1.2)
+        wutils.wifi_toggle_state(self.dut, new_state=False)
+        time.sleep(PRESCAN_DELAY_SEC)
+        self.do_location_scan(CONSECUTIVE_MISSED_SCANS_REQUIRED_TO_EVICT + 2)
+        self.ap_a_on()
+        self.do_location_scan()
+        asserts.assert_true(
+            self.dut.droid.wifiCheckState(),
+            "Expect Wifi Wake to enable Wifi, but Wifi is disabled.")
+
+    def test_no_reconnect_if_exists_ap_in_wakeup_lock(self):
+        """
+        2 APs in Wakeup Lock, user moves out of range of one AP but stays in
+        range of the other, should not reconnect when user moves back in range
+        of both.
+        """
+        wutils.wifi_connect(self.dut, self.ap_a, num_of_tries=5)
+        wutils.wifi_connect(self.dut, self.ap_b, num_of_tries=5)
+        wutils.wifi_toggle_state(self.dut, new_state=False)
+        time.sleep(PRESCAN_DELAY_SEC)
+        self.do_location_scan(CONSECUTIVE_MISSED_SCANS_REQUIRED_TO_EVICT + 2)
+        self.ap_b_off()
+        self.do_location_scan(CONSECUTIVE_MISSED_SCANS_REQUIRED_TO_EVICT + 2)
+        self.ap_b_on()
+        self.do_location_scan(CONSECUTIVE_MISSED_SCANS_REQUIRED_TO_EVICT + 2)
+        asserts.assert_false(
+            self.dut.droid.wifiCheckState(),
+            "Expect Wifi Wake to not enable Wifi, but Wifi was enabled.")
+
+    def test_reconnect_if_both_ap_evicted_from_wakeup_lock(self):
+        """
+        2 APs in Wakeup Lock, user moves out of range of both APs, should
+        reconnect when user moves back in range of either AP.
+        """
+        wutils.wifi_connect(self.dut, self.ap_a, num_of_tries=5)
+        wutils.wifi_connect(self.dut, self.ap_b, num_of_tries=5)
+        wutils.wifi_toggle_state(self.dut, new_state=False)
+        time.sleep(PRESCAN_DELAY_SEC)
+        self.do_location_scan(CONSECUTIVE_MISSED_SCANS_REQUIRED_TO_EVICT + 2)
+        self.ap_a_off()
+        self.ap_b_off()
+        self.do_location_scan(CONSECUTIVE_MISSED_SCANS_REQUIRED_TO_EVICT + 2)
+        self.ap_a_on()
+        self.do_location_scan()
+        asserts.assert_true(
+            self.dut.droid.wifiCheckState(),
+            "Expect Wifi Wake to enable Wifi, but Wifi is disabled.")
+
+    def test_reconnect_to_better_saved_network(self):
+        """
+        2 saved APs, one attenuated, one unattenuated, Wifi Wake should connect
+        to the unattenuated AP
+        """
+        wutils.wifi_connect(self.dut, self.ap_a, num_of_tries=5)
+        wutils.wifi_connect(self.dut, self.ap_b, num_of_tries=5)
+        self.dut.ed.clear_all_events()
+        self.ap_a_off()
+        self.ap_b_off()
+        wutils.wait_for_disconnect(self.dut)
+        self.log.info("Wifi Disconnected")
+        time.sleep(LAST_DISCONNECT_TIMEOUT_SEC * 1.2)
+        wutils.wifi_toggle_state(self.dut, new_state=False)
+        time.sleep(PRESCAN_DELAY_SEC)
+        self.do_location_scan(CONSECUTIVE_MISSED_SCANS_REQUIRED_TO_EVICT + 2)
+
+        self.ap_a_on()
+        self.ap_b_on()
+        self.ap_a_atten.set_atten(30)
+        self.ap_b_atten.set_atten(0)
+
+        self.do_location_scan()
+        asserts.assert_true(
+            self.dut.droid.wifiCheckState(),
+            "Expect Wifi Wake to enable Wifi, but Wifi is disabled.")
+        expected_ssid = self.ap_b[wutils.WifiEnums.SSID_KEY]
+        actual_ssid = self.dut.droid.wifiGetConnectionInfo()[
+            wutils.WifiEnums.SSID_KEY]
+        asserts.assert_equal(
+            expected_ssid, actual_ssid,
+            ("Expected to connect to SSID '{}', but actually connected to "
+             "'{}' instead.").format(expected_ssid, actual_ssid))
diff --git a/acts/tests/google/wifi/aware/functional/AttachTest.py b/acts/tests/google/wifi/aware/functional/AttachTest.py
index 37f07e0..167c29f 100644
--- a/acts/tests/google/wifi/aware/functional/AttachTest.py
+++ b/acts/tests/google/wifi/aware/functional/AttachTest.py
@@ -26,142 +26,139 @@
 
 
 class AttachTest(AwareBaseTest):
-  def __init__(self, controllers):
-    AwareBaseTest.__init__(self, controllers)
+    def __init__(self, controllers):
+        AwareBaseTest.__init__(self, controllers)
 
-  @test_tracker_info(uuid="cdafd1e0-bcf5-4fe8-ae32-f55483db9925")
-  def test_attach(self):
-    """Functional test case / Attach test cases / attach
+    @test_tracker_info(uuid="cdafd1e0-bcf5-4fe8-ae32-f55483db9925")
+    def test_attach(self):
+        """Functional test case / Attach test cases / attach
 
     Validates that attaching to the Wi-Fi Aware service works (receive
     the expected callback).
     """
-    dut = self.android_devices[0]
-    dut.droid.wifiAwareAttach(False)
-    autils.wait_for_event(dut, aconsts.EVENT_CB_ON_ATTACHED)
-    autils.fail_on_event(dut, aconsts.EVENT_CB_ON_IDENTITY_CHANGED)
+        dut = self.android_devices[0]
+        dut.droid.wifiAwareAttach(False)
+        autils.wait_for_event(dut, aconsts.EVENT_CB_ON_ATTACHED)
+        autils.fail_on_event(dut, aconsts.EVENT_CB_ON_IDENTITY_CHANGED)
 
-  @test_tracker_info(uuid="82f2a8bc-a62b-49c2-ac8a-fe8460010ba2")
-  def test_attach_with_identity(self):
-    """Functional test case / Attach test cases / attach with identity callback
+    @test_tracker_info(uuid="82f2a8bc-a62b-49c2-ac8a-fe8460010ba2")
+    def test_attach_with_identity(self):
+        """Functional test case / Attach test cases / attach with identity callback
 
     Validates that attaching to the Wi-Fi Aware service works (receive
     the expected callbacks).
     """
-    dut = self.android_devices[0]
-    dut.droid.wifiAwareAttach(True)
-    autils.wait_for_event(dut, aconsts.EVENT_CB_ON_ATTACHED)
-    autils.wait_for_event(dut, aconsts.EVENT_CB_ON_IDENTITY_CHANGED)
+        dut = self.android_devices[0]
+        dut.droid.wifiAwareAttach(True)
+        autils.wait_for_event(dut, aconsts.EVENT_CB_ON_ATTACHED)
+        autils.wait_for_event(dut, aconsts.EVENT_CB_ON_IDENTITY_CHANGED)
 
-  @test_tracker_info(uuid="d2714d14-f330-47d4-b8e9-ee4d5e5b7ea0")
-  def test_attach_multiple_sessions(self):
-    """Functional test case / Attach test cases / multiple attach sessions
+    @test_tracker_info(uuid="d2714d14-f330-47d4-b8e9-ee4d5e5b7ea0")
+    def test_attach_multiple_sessions(self):
+        """Functional test case / Attach test cases / multiple attach sessions
 
     Validates that when creating multiple attach sessions each can be
     configured independently as to whether or not to receive an identity
     callback.
     """
-    dut = self.android_devices[0]
+        dut = self.android_devices[0]
 
-    # Create 3 attach sessions: 2 without identity callback, 1 with
-    id1 = dut.droid.wifiAwareAttach(False, None, True)
-    time.sleep(10) # to make sure all calls and callbacks are done
-    id2 = dut.droid.wifiAwareAttach(True, None, True)
-    time.sleep(10) # to make sure all calls and callbacks are done
-    id3 = dut.droid.wifiAwareAttach(False, None, True)
-    dut.log.info('id1=%d, id2=%d, id3=%d', id1, id2, id3)
+        # Create 3 attach sessions: 2 without identity callback, 1 with
+        id1 = dut.droid.wifiAwareAttach(False, None, True)
+        time.sleep(10)  # to make sure all calls and callbacks are done
+        id2 = dut.droid.wifiAwareAttach(True, None, True)
+        time.sleep(10)  # to make sure all calls and callbacks are done
+        id3 = dut.droid.wifiAwareAttach(False, None, True)
+        dut.log.info('id1=%d, id2=%d, id3=%d', id1, id2, id3)
 
-    # Attach session 1: wait for attach, should not get identity
-    autils.wait_for_event(dut,
-                          autils.decorate_event(aconsts.EVENT_CB_ON_ATTACHED,
-                                                id1))
-    autils.fail_on_event(dut,
-                         autils.decorate_event(
-                             aconsts.EVENT_CB_ON_IDENTITY_CHANGED, id1))
+        # Attach session 1: wait for attach, should not get identity
+        autils.wait_for_event(
+            dut, autils.decorate_event(aconsts.EVENT_CB_ON_ATTACHED, id1))
+        autils.fail_on_event(
+            dut,
+            autils.decorate_event(aconsts.EVENT_CB_ON_IDENTITY_CHANGED, id1))
 
-    # Attach session 2: wait for attach and for identity callback
-    autils.wait_for_event(dut,
-                          autils.decorate_event(aconsts.EVENT_CB_ON_ATTACHED,
-                                                id2))
-    autils.wait_for_event(dut,
-                          autils.decorate_event(
-                              aconsts.EVENT_CB_ON_IDENTITY_CHANGED, id2))
+        # Attach session 2: wait for attach and for identity callback
+        autils.wait_for_event(
+            dut, autils.decorate_event(aconsts.EVENT_CB_ON_ATTACHED, id2))
+        autils.wait_for_event(
+            dut,
+            autils.decorate_event(aconsts.EVENT_CB_ON_IDENTITY_CHANGED, id2))
 
-    # Attach session 3: wait for attach, should not get identity
-    autils.wait_for_event(dut,
-                          autils.decorate_event(aconsts.EVENT_CB_ON_ATTACHED,
-                                                id3))
-    autils.fail_on_event(dut,
-                         autils.decorate_event(
-                             aconsts.EVENT_CB_ON_IDENTITY_CHANGED, id3))
+        # Attach session 3: wait for attach, should not get identity
+        autils.wait_for_event(
+            dut, autils.decorate_event(aconsts.EVENT_CB_ON_ATTACHED, id3))
+        autils.fail_on_event(
+            dut,
+            autils.decorate_event(aconsts.EVENT_CB_ON_IDENTITY_CHANGED, id3))
 
-  @test_tracker_info(uuid="b8ea4d02-ae23-42a7-a85e-def52932c858")
-  def test_attach_with_no_wifi(self):
-    """Function test case / Attach test cases / attempt to attach with wifi off
+    @test_tracker_info(uuid="b8ea4d02-ae23-42a7-a85e-def52932c858")
+    def test_attach_with_no_wifi(self):
+        """Function test case / Attach test cases / attempt to attach with wifi off
 
     Validates that if trying to attach with Wi-Fi disabled will receive the
     expected failure callback. As a side-effect also validates that the
     broadcast for Aware unavailable is received.
     """
-    dut = self.android_devices[0]
-    wutils.wifi_toggle_state(dut, False)
-    autils.wait_for_event(dut, aconsts.BROADCAST_WIFI_AWARE_NOT_AVAILABLE)
-    dut.droid.wifiAwareAttach()
-    autils.wait_for_event(dut, aconsts.EVENT_CB_ON_ATTACH_FAILED)
+        dut = self.android_devices[0]
+        wutils.wifi_toggle_state(dut, False)
+        autils.wait_for_event(dut, aconsts.BROADCAST_WIFI_AWARE_NOT_AVAILABLE)
+        dut.droid.wifiAwareAttach()
+        autils.wait_for_event(dut, aconsts.EVENT_CB_ON_ATTACH_FAILED)
 
-  @test_tracker_info(uuid="7dcc4530-c936-4447-9d22-a7c5b315e2ce")
-  def test_attach_with_doze(self):
-    """Function test case / Attach test cases / attempt to attach with doze on
+    @test_tracker_info(uuid="7dcc4530-c936-4447-9d22-a7c5b315e2ce")
+    def test_attach_with_doze(self):
+        """Function test case / Attach test cases / attempt to attach with doze on
 
     Validates that if trying to attach with device in doze mode will receive the
     expected failure callback. As a side-effect also validates that the
     broadcast for Aware unavailable is received.
     """
-    dut = self.android_devices[0]
-    asserts.assert_true(utils.enable_doze(dut), "Can't enable doze")
-    autils.wait_for_event(dut, aconsts.BROADCAST_WIFI_AWARE_NOT_AVAILABLE)
-    dut.droid.wifiAwareAttach()
-    autils.wait_for_event(dut, aconsts.EVENT_CB_ON_ATTACH_FAILED)
-    asserts.assert_true(utils.disable_doze(dut), "Can't disable doze")
-    autils.wait_for_event(dut, aconsts.BROADCAST_WIFI_AWARE_AVAILABLE)
+        dut = self.android_devices[0]
+        asserts.assert_true(utils.enable_doze(dut), "Can't enable doze")
+        autils.wait_for_event(dut, aconsts.BROADCAST_WIFI_AWARE_NOT_AVAILABLE)
+        dut.droid.wifiAwareAttach()
+        autils.wait_for_event(dut, aconsts.EVENT_CB_ON_ATTACH_FAILED)
+        asserts.assert_true(utils.disable_doze(dut), "Can't disable doze")
+        autils.wait_for_event(dut, aconsts.BROADCAST_WIFI_AWARE_AVAILABLE)
 
-  @test_tracker_info(uuid="2574fd01-8974-4dd0-aeb8-a7194461140e")
-  def test_attach_with_location_off(self):
-    """Function test case / Attach test cases / attempt to attach with location
+    @test_tracker_info(uuid="2574fd01-8974-4dd0-aeb8-a7194461140e")
+    def test_attach_with_location_off(self):
+        """Function test case / Attach test cases / attempt to attach with location
     mode off.
 
     Validates that if trying to attach with device location mode off will
     receive the expected failure callback. As a side-effect also validates that
     the broadcast for Aware unavailable is received.
     """
-    dut = self.android_devices[0]
-    utils.set_location_service(dut, False)
-    autils.wait_for_event(dut, aconsts.BROADCAST_WIFI_AWARE_NOT_AVAILABLE)
-    dut.droid.wifiAwareAttach()
-    autils.wait_for_event(dut, aconsts.EVENT_CB_ON_ATTACH_FAILED)
-    utils.set_location_service(dut, True)
-    autils.wait_for_event(dut, aconsts.BROADCAST_WIFI_AWARE_AVAILABLE)
+        dut = self.android_devices[0]
+        utils.set_location_service(dut, False)
+        autils.wait_for_event(dut, aconsts.BROADCAST_WIFI_AWARE_NOT_AVAILABLE)
+        dut.droid.wifiAwareAttach()
+        autils.wait_for_event(dut, aconsts.EVENT_CB_ON_ATTACH_FAILED)
+        utils.set_location_service(dut, True)
+        autils.wait_for_event(dut, aconsts.BROADCAST_WIFI_AWARE_AVAILABLE)
 
-  @test_tracker_info(uuid="7ffde8e7-a010-4b77-97f5-959f263b5249")
-  def test_attach_apm_toggle_attach_again(self):
-    """Validates that enabling Airplane mode while Aware is on resets it
+    @test_tracker_info(uuid="7ffde8e7-a010-4b77-97f5-959f263b5249")
+    def test_attach_apm_toggle_attach_again(self):
+        """Validates that enabling Airplane mode while Aware is on resets it
     correctly, and allows it to be re-enabled when Airplane mode is then
     disabled."""
-    dut = self.android_devices[0]
+        dut = self.android_devices[0]
 
-    # enable Aware (attach)
-    dut.droid.wifiAwareAttach()
-    autils.wait_for_event(dut, aconsts.EVENT_CB_ON_ATTACHED)
+        # enable Aware (attach)
+        dut.droid.wifiAwareAttach()
+        autils.wait_for_event(dut, aconsts.EVENT_CB_ON_ATTACHED)
 
-    # enable airplane mode
-    utils.force_airplane_mode(dut, True)
-    autils.wait_for_event(dut, aconsts.BROADCAST_WIFI_AWARE_NOT_AVAILABLE)
+        # enable airplane mode
+        utils.force_airplane_mode(dut, True)
+        autils.wait_for_event(dut, aconsts.BROADCAST_WIFI_AWARE_NOT_AVAILABLE)
 
-    # wait a few seconds and disable airplane mode
-    time.sleep(10)
-    utils.force_airplane_mode(dut, False)
-    autils.wait_for_event(dut, aconsts.BROADCAST_WIFI_AWARE_AVAILABLE)
+        # wait a few seconds and disable airplane mode
+        time.sleep(10)
+        utils.force_airplane_mode(dut, False)
+        autils.wait_for_event(dut, aconsts.BROADCAST_WIFI_AWARE_AVAILABLE)
 
-    # try enabling Aware again (attach)
-    dut.droid.wifiAwareAttach()
-    autils.wait_for_event(dut, aconsts.EVENT_CB_ON_ATTACHED)
+        # try enabling Aware again (attach)
+        dut.droid.wifiAwareAttach()
+        autils.wait_for_event(dut, aconsts.EVENT_CB_ON_ATTACHED)
diff --git a/acts/tests/google/wifi/aware/functional/CapabilitiesTest.py b/acts/tests/google/wifi/aware/functional/CapabilitiesTest.py
index b9b6108..ea5b867 100644
--- a/acts/tests/google/wifi/aware/functional/CapabilitiesTest.py
+++ b/acts/tests/google/wifi/aware/functional/CapabilitiesTest.py
@@ -24,16 +24,16 @@
 
 
 class CapabilitiesTest(AwareBaseTest):
-  """Set of tests for Wi-Fi Aware Capabilities - verifying that the provided
+    """Set of tests for Wi-Fi Aware Capabilities - verifying that the provided
   capabilities are real (i.e. available)."""
 
-  SERVICE_NAME = "GoogleTestXYZ"
+    SERVICE_NAME = "GoogleTestXYZ"
 
-  def __init__(self, controllers):
-    AwareBaseTest.__init__(self, controllers)
+    def __init__(self, controllers):
+        AwareBaseTest.__init__(self, controllers)
 
-  def create_config(self, dtype, service_name):
-    """Create a discovery configuration based on input parameters.
+    def create_config(self, dtype, service_name):
+        """Create a discovery configuration based on input parameters.
 
     Args:
       dtype: Publish or Subscribe discovery type
@@ -42,14 +42,14 @@
     Returns:
       Discovery configuration object.
     """
-    config = {}
-    config[aconsts.DISCOVERY_KEY_DISCOVERY_TYPE] = dtype
-    config[aconsts.DISCOVERY_KEY_SERVICE_NAME] = service_name
-    return config
+        config = {}
+        config[aconsts.DISCOVERY_KEY_DISCOVERY_TYPE] = dtype
+        config[aconsts.DISCOVERY_KEY_SERVICE_NAME] = service_name
+        return config
 
-  def start_discovery_session(self, dut, session_id, is_publish, dtype,
-                              service_name, expect_success):
-    """Start a discovery session
+    def start_discovery_session(self, dut, session_id, is_publish, dtype,
+                                service_name, expect_success):
+        """Start a discovery session
 
     Args:
       dut: Device under test
@@ -62,85 +62,86 @@
     Returns:
       Discovery session ID.
     """
-    config = {}
-    config[aconsts.DISCOVERY_KEY_DISCOVERY_TYPE] = dtype
-    config[aconsts.DISCOVERY_KEY_SERVICE_NAME] = service_name
+        config = {}
+        config[aconsts.DISCOVERY_KEY_DISCOVERY_TYPE] = dtype
+        config[aconsts.DISCOVERY_KEY_SERVICE_NAME] = service_name
 
-    if is_publish:
-      disc_id = dut.droid.wifiAwarePublish(session_id, config)
-      event_name = aconsts.SESSION_CB_ON_PUBLISH_STARTED
-    else:
-      disc_id = dut.droid.wifiAwareSubscribe(session_id, config)
-      event_name = aconsts.SESSION_CB_ON_SUBSCRIBE_STARTED
+        if is_publish:
+            disc_id = dut.droid.wifiAwarePublish(session_id, config)
+            event_name = aconsts.SESSION_CB_ON_PUBLISH_STARTED
+        else:
+            disc_id = dut.droid.wifiAwareSubscribe(session_id, config)
+            event_name = aconsts.SESSION_CB_ON_SUBSCRIBE_STARTED
 
-    if expect_success:
-      autils.wait_for_event(dut, event_name)
-    else:
-      autils.wait_for_event(dut, aconsts.SESSION_CB_ON_SESSION_CONFIG_FAILED)
+        if expect_success:
+            autils.wait_for_event(dut, event_name)
+        else:
+            autils.wait_for_event(dut,
+                                  aconsts.SESSION_CB_ON_SESSION_CONFIG_FAILED)
 
-    return disc_id
+        return disc_id
 
-  ###############################
+    ###############################
 
-  @test_tracker_info(uuid="45da8a41-6c02-4434-9eb9-aa0a36ff9f65")
-  def test_max_discovery_sessions(self):
-    """Validate that the device can create as many discovery sessions as are
+    @test_tracker_info(uuid="45da8a41-6c02-4434-9eb9-aa0a36ff9f65")
+    def test_max_discovery_sessions(self):
+        """Validate that the device can create as many discovery sessions as are
     indicated in the device capabilities
     """
-    dut = self.android_devices[0]
+        dut = self.android_devices[0]
 
-    # attach
-    session_id = dut.droid.wifiAwareAttach(True)
-    autils.wait_for_event(dut, aconsts.EVENT_CB_ON_ATTACHED)
+        # attach
+        session_id = dut.droid.wifiAwareAttach(True)
+        autils.wait_for_event(dut, aconsts.EVENT_CB_ON_ATTACHED)
 
-    service_name_template = 'GoogleTestService-%s-%d'
+        service_name_template = 'GoogleTestService-%s-%d'
 
-    # start the max number of publish sessions
-    for i in range(dut.aware_capabilities[aconsts.CAP_MAX_PUBLISHES]):
-      # create publish discovery session of both types
-      pub_disc_id = self.start_discovery_session(
-          dut, session_id, True, aconsts.PUBLISH_TYPE_UNSOLICITED
-          if i % 2 == 0 else aconsts.PUBLISH_TYPE_SOLICITED,
-          service_name_template % ('pub', i), True)
+        # start the max number of publish sessions
+        for i in range(dut.aware_capabilities[aconsts.CAP_MAX_PUBLISHES]):
+            # create publish discovery session of both types
+            pub_disc_id = self.start_discovery_session(
+                dut, session_id, True, aconsts.PUBLISH_TYPE_UNSOLICITED
+                if i % 2 == 0 else aconsts.PUBLISH_TYPE_SOLICITED,
+                service_name_template % ('pub', i), True)
 
-    # start the max number of subscribe sessions
-    for i in range(dut.aware_capabilities[aconsts.CAP_MAX_SUBSCRIBES]):
-      # create publish discovery session of both types
-      sub_disc_id = self.start_discovery_session(
-          dut, session_id, False, aconsts.SUBSCRIBE_TYPE_PASSIVE
-          if i % 2 == 0 else aconsts.SUBSCRIBE_TYPE_ACTIVE,
-          service_name_template % ('sub', i), True)
+        # start the max number of subscribe sessions
+        for i in range(dut.aware_capabilities[aconsts.CAP_MAX_SUBSCRIBES]):
+            # create publish discovery session of both types
+            sub_disc_id = self.start_discovery_session(
+                dut, session_id, False, aconsts.SUBSCRIBE_TYPE_PASSIVE
+                if i % 2 == 0 else aconsts.SUBSCRIBE_TYPE_ACTIVE,
+                service_name_template % ('sub', i), True)
 
-    # start another publish & subscribe and expect failure
-    self.start_discovery_session(dut, session_id, True,
-                                 aconsts.PUBLISH_TYPE_UNSOLICITED,
-                                 service_name_template % ('pub', 900), False)
-    self.start_discovery_session(dut, session_id, False,
-                                 aconsts.SUBSCRIBE_TYPE_ACTIVE,
-                                 service_name_template % ('pub', 901), False)
+        # start another publish & subscribe and expect failure
+        self.start_discovery_session(
+            dut, session_id, True, aconsts.PUBLISH_TYPE_UNSOLICITED,
+            service_name_template % ('pub', 900), False)
+        self.start_discovery_session(
+            dut, session_id, False, aconsts.SUBSCRIBE_TYPE_ACTIVE,
+            service_name_template % ('pub', 901), False)
 
-    # delete one of the publishes and try again (see if can create subscribe
-    # instead - should not)
-    dut.droid.wifiAwareDestroyDiscoverySession(pub_disc_id)
-    self.start_discovery_session(dut, session_id, False,
-                                 aconsts.SUBSCRIBE_TYPE_ACTIVE,
-                                 service_name_template % ('pub', 902), False)
-    self.start_discovery_session(dut, session_id, True,
-                                 aconsts.PUBLISH_TYPE_UNSOLICITED,
-                                 service_name_template % ('pub', 903), True)
+        # delete one of the publishes and try again (see if can create subscribe
+        # instead - should not)
+        dut.droid.wifiAwareDestroyDiscoverySession(pub_disc_id)
+        self.start_discovery_session(
+            dut, session_id, False, aconsts.SUBSCRIBE_TYPE_ACTIVE,
+            service_name_template % ('pub', 902), False)
+        self.start_discovery_session(
+            dut, session_id, True, aconsts.PUBLISH_TYPE_UNSOLICITED,
+            service_name_template % ('pub', 903), True)
 
-    # delete one of the subscribes and try again (see if can create publish
-    # instead - should not)
-    dut.droid.wifiAwareDestroyDiscoverySession(sub_disc_id)
-    self.start_discovery_session(dut, session_id, True,
-                                 aconsts.PUBLISH_TYPE_UNSOLICITED,
-                                 service_name_template % ('pub', 904), False)
-    self.start_discovery_session(dut, session_id, False,
-                                 aconsts.SUBSCRIBE_TYPE_ACTIVE,
-                                 service_name_template % ('pub', 905), True)
+        # delete one of the subscribes and try again (see if can create publish
+        # instead - should not)
+        dut.droid.wifiAwareDestroyDiscoverySession(sub_disc_id)
+        self.start_discovery_session(
+            dut, session_id, True, aconsts.PUBLISH_TYPE_UNSOLICITED,
+            service_name_template % ('pub', 904), False)
+        self.start_discovery_session(
+            dut, session_id, False, aconsts.SUBSCRIBE_TYPE_ACTIVE,
+            service_name_template % ('pub', 905), True)
 
-  def test_max_ndp(self):
-    """Validate that the device can create as many NDPs as are specified
+    def test_max_ndp(self):
+        """Validate that the device can create as many NDPs as are specified
     by its capabilities.
 
     Mechanics:
@@ -151,113 +152,127 @@
     Note: the test requires MAX_NDP + 2 devices to be validated. If these are
     not available the test will fail.
     """
-    dut = self.android_devices[0]
+        dut = self.android_devices[0]
 
-    # get max NDP: using first available device (assumes all devices are the
-    # same)
-    max_ndp = dut.aware_capabilities[aconsts.CAP_MAX_NDP_SESSIONS]
+        # get max NDP: using first available device (assumes all devices are the
+        # same)
+        max_ndp = dut.aware_capabilities[aconsts.CAP_MAX_NDP_SESSIONS]
 
-    # get number of attached devices: needs to be max_ndp+2 to allow for max_ndp
-    # NDPs + an additional one expected to fail.
-    # However, will run the test with max_ndp+1 devices to verify that at least
-    # that many NDPs can be created. Will still fail at the end to indicate that
-    # full test was not run.
-    num_peer_devices = min(len(self.android_devices) - 1, max_ndp + 1)
-    asserts.assert_true(
-        num_peer_devices >= max_ndp,
-        'A minimum of %d devices is needed to run the test, have %d' %
-        (max_ndp + 1, len(self.android_devices)))
+        # get number of attached devices: needs to be max_ndp+2 to allow for max_ndp
+        # NDPs + an additional one expected to fail.
+        # However, will run the test with max_ndp+1 devices to verify that at least
+        # that many NDPs can be created. Will still fail at the end to indicate that
+        # full test was not run.
+        num_peer_devices = min(len(self.android_devices) - 1, max_ndp + 1)
+        asserts.assert_true(
+            num_peer_devices >= max_ndp,
+            'A minimum of %d devices is needed to run the test, have %d' %
+            (max_ndp + 1, len(self.android_devices)))
 
-    # attach
-    session_id = dut.droid.wifiAwareAttach()
-    autils.wait_for_event(dut, aconsts.EVENT_CB_ON_ATTACHED)
+        # attach
+        session_id = dut.droid.wifiAwareAttach()
+        autils.wait_for_event(dut, aconsts.EVENT_CB_ON_ATTACHED)
 
-    # start publisher
-    p_disc_id = self.start_discovery_session(
-        dut,
-        session_id,
-        is_publish=True,
-        dtype=aconsts.PUBLISH_TYPE_UNSOLICITED,
-        service_name=self.SERVICE_NAME,
-        expect_success=True)
+        # start publisher
+        p_disc_id = self.start_discovery_session(
+            dut,
+            session_id,
+            is_publish=True,
+            dtype=aconsts.PUBLISH_TYPE_UNSOLICITED,
+            service_name=self.SERVICE_NAME,
+            expect_success=True)
 
-    # loop over other DUTs
-    for i in range(num_peer_devices):
-      other_dut = self.android_devices[i + 1]
+        # loop over other DUTs
+        for i in range(num_peer_devices):
+            other_dut = self.android_devices[i + 1]
 
-      # attach
-      other_session_id = other_dut.droid.wifiAwareAttach()
-      autils.wait_for_event(other_dut, aconsts.EVENT_CB_ON_ATTACHED)
+            # attach
+            other_session_id = other_dut.droid.wifiAwareAttach()
+            autils.wait_for_event(other_dut, aconsts.EVENT_CB_ON_ATTACHED)
 
-      # start subscriber
-      s_disc_id = self.start_discovery_session(
-          other_dut,
-          other_session_id,
-          is_publish=False,
-          dtype=aconsts.SUBSCRIBE_TYPE_PASSIVE,
-          service_name=self.SERVICE_NAME,
-          expect_success=True)
+            # start subscriber
+            s_disc_id = self.start_discovery_session(
+                other_dut,
+                other_session_id,
+                is_publish=False,
+                dtype=aconsts.SUBSCRIBE_TYPE_PASSIVE,
+                service_name=self.SERVICE_NAME,
+                expect_success=True)
 
-      discovery_event = autils.wait_for_event(
-          other_dut, aconsts.SESSION_CB_ON_SERVICE_DISCOVERED)
-      peer_id_on_sub = discovery_event['data'][aconsts.SESSION_CB_KEY_PEER_ID]
+            discovery_event = autils.wait_for_event(
+                other_dut, aconsts.SESSION_CB_ON_SERVICE_DISCOVERED)
+            peer_id_on_sub = discovery_event['data'][
+                aconsts.SESSION_CB_KEY_PEER_ID]
 
-      # Subscriber: send message to peer (Publisher - so it knows our address)
-      other_dut.droid.wifiAwareSendMessage(
-          s_disc_id, peer_id_on_sub,
-          self.get_next_msg_id(), "ping", aconsts.MAX_TX_RETRIES)
-      autils.wait_for_event(other_dut, aconsts.SESSION_CB_ON_MESSAGE_SENT)
+            # Subscriber: send message to peer (Publisher - so it knows our address)
+            other_dut.droid.wifiAwareSendMessage(s_disc_id, peer_id_on_sub,
+                                                 self.get_next_msg_id(),
+                                                 "ping",
+                                                 aconsts.MAX_TX_RETRIES)
+            autils.wait_for_event(other_dut,
+                                  aconsts.SESSION_CB_ON_MESSAGE_SENT)
 
-      # Publisher: wait for received message
-      pub_rx_msg_event = autils.wait_for_event(
-          dut, aconsts.SESSION_CB_ON_MESSAGE_RECEIVED)
-      peer_id_on_pub = pub_rx_msg_event['data'][aconsts.SESSION_CB_KEY_PEER_ID]
+            # Publisher: wait for received message
+            pub_rx_msg_event = autils.wait_for_event(
+                dut, aconsts.SESSION_CB_ON_MESSAGE_RECEIVED)
+            peer_id_on_pub = pub_rx_msg_event['data'][
+                aconsts.SESSION_CB_KEY_PEER_ID]
 
-      # publisher (responder): request network
-      p_req_key = autils.request_network(
-          dut,
-          dut.droid.wifiAwareCreateNetworkSpecifier(p_disc_id, peer_id_on_pub))
+            # publisher (responder): request network
+            p_req_key = autils.request_network(
+                dut,
+                dut.droid.wifiAwareCreateNetworkSpecifier(
+                    p_disc_id, peer_id_on_pub))
 
-      # subscriber (initiator): request network
-      s_req_key = autils.request_network(
-          other_dut,
-          other_dut.droid.wifiAwareCreateNetworkSpecifier(
-              s_disc_id, peer_id_on_sub))
+            # subscriber (initiator): request network
+            s_req_key = autils.request_network(
+                other_dut,
+                other_dut.droid.wifiAwareCreateNetworkSpecifier(
+                    s_disc_id, peer_id_on_sub))
 
-      # wait for network (or not - on the last iteration)
-      if i != max_ndp:
-        p_net_event = autils.wait_for_event_with_keys(
-            dut, cconsts.EVENT_NETWORK_CALLBACK, autils.EVENT_NDP_TIMEOUT,
-            (cconsts.NETWORK_CB_KEY_EVENT,
-             cconsts.NETWORK_CB_LINK_PROPERTIES_CHANGED),
-            (cconsts.NETWORK_CB_KEY_ID, p_req_key))
-        s_net_event = autils.wait_for_event_with_keys(
-            other_dut, cconsts.EVENT_NETWORK_CALLBACK, autils.EVENT_NDP_TIMEOUT,
-            (cconsts.NETWORK_CB_KEY_EVENT,
-             cconsts.NETWORK_CB_LINK_PROPERTIES_CHANGED),
-            (cconsts.NETWORK_CB_KEY_ID, s_req_key))
+            # wait for network (or not - on the last iteration)
+            if i != max_ndp:
+                p_net_event = autils.wait_for_event_with_keys(
+                    dut, cconsts.EVENT_NETWORK_CALLBACK,
+                    autils.EVENT_NDP_TIMEOUT,
+                    (cconsts.NETWORK_CB_KEY_EVENT,
+                     cconsts.NETWORK_CB_LINK_PROPERTIES_CHANGED),
+                    (cconsts.NETWORK_CB_KEY_ID, p_req_key))
+                s_net_event = autils.wait_for_event_with_keys(
+                    other_dut, cconsts.EVENT_NETWORK_CALLBACK,
+                    autils.EVENT_NDP_TIMEOUT,
+                    (cconsts.NETWORK_CB_KEY_EVENT,
+                     cconsts.NETWORK_CB_LINK_PROPERTIES_CHANGED),
+                    (cconsts.NETWORK_CB_KEY_ID, s_req_key))
 
-        p_aware_if = p_net_event['data'][cconsts.NETWORK_CB_KEY_INTERFACE_NAME]
-        s_aware_if = s_net_event['data'][cconsts.NETWORK_CB_KEY_INTERFACE_NAME]
-        self.log.info('Interface names: p=%s, s=%s', p_aware_if, s_aware_if)
+                p_aware_if = p_net_event['data'][
+                    cconsts.NETWORK_CB_KEY_INTERFACE_NAME]
+                s_aware_if = s_net_event['data'][
+                    cconsts.NETWORK_CB_KEY_INTERFACE_NAME]
+                self.log.info('Interface names: p=%s, s=%s', p_aware_if,
+                              s_aware_if)
 
-        p_ipv6 = dut.droid.connectivityGetLinkLocalIpv6Address(
-            p_aware_if).split('%')[0]
-        s_ipv6 = other_dut.droid.connectivityGetLinkLocalIpv6Address(
-            s_aware_if).split('%')[0]
-        self.log.info('Interface addresses (IPv6): p=%s, s=%s', p_ipv6, s_ipv6)
-      else:
-        autils.fail_on_event_with_keys(
-            dut, cconsts.EVENT_NETWORK_CALLBACK, autils.EVENT_NDP_TIMEOUT,
-            (cconsts.NETWORK_CB_KEY_EVENT,
-             cconsts.NETWORK_CB_LINK_PROPERTIES_CHANGED),
-            (cconsts.NETWORK_CB_KEY_ID, p_req_key))
-        autils.fail_on_event_with_keys(
-            other_dut, cconsts.EVENT_NETWORK_CALLBACK, autils.EVENT_NDP_TIMEOUT,
-            (cconsts.NETWORK_CB_KEY_EVENT,
-             cconsts.NETWORK_CB_LINK_PROPERTIES_CHANGED),
-            (cconsts.NETWORK_CB_KEY_ID, s_req_key))
+                p_ipv6 = dut.droid.connectivityGetLinkLocalIpv6Address(
+                    p_aware_if).split('%')[0]
+                s_ipv6 = other_dut.droid.connectivityGetLinkLocalIpv6Address(
+                    s_aware_if).split('%')[0]
+                self.log.info('Interface addresses (IPv6): p=%s, s=%s', p_ipv6,
+                              s_ipv6)
+            else:
+                autils.fail_on_event_with_keys(
+                    dut, cconsts.EVENT_NETWORK_CALLBACK,
+                    autils.EVENT_NDP_TIMEOUT,
+                    (cconsts.NETWORK_CB_KEY_EVENT,
+                     cconsts.NETWORK_CB_LINK_PROPERTIES_CHANGED),
+                    (cconsts.NETWORK_CB_KEY_ID, p_req_key))
+                autils.fail_on_event_with_keys(
+                    other_dut, cconsts.EVENT_NETWORK_CALLBACK,
+                    autils.EVENT_NDP_TIMEOUT,
+                    (cconsts.NETWORK_CB_KEY_EVENT,
+                     cconsts.NETWORK_CB_LINK_PROPERTIES_CHANGED),
+                    (cconsts.NETWORK_CB_KEY_ID, s_req_key))
 
-    asserts.assert_true(num_peer_devices > max_ndp,
-                        'Needed %d devices to run the test, have %d' %
-                        (max_ndp + 2, len(self.android_devices)))
+        asserts.assert_true(
+            num_peer_devices > max_ndp,
+            'Needed %d devices to run the test, have %d' %
+            (max_ndp + 2, len(self.android_devices)))
diff --git a/acts/tests/google/wifi/aware/functional/DataPathTest.py b/acts/tests/google/wifi/aware/functional/DataPathTest.py
index 8afb2a4..cc81433 100644
--- a/acts/tests/google/wifi/aware/functional/DataPathTest.py
+++ b/acts/tests/google/wifi/aware/functional/DataPathTest.py
@@ -26,36 +26,36 @@
 
 
 class DataPathTest(AwareBaseTest):
-  """Set of tests for Wi-Fi Aware data-path."""
+    """Set of tests for Wi-Fi Aware data-path."""
 
-  # configuration parameters used by tests
-  ENCR_TYPE_OPEN = 0
-  ENCR_TYPE_PASSPHRASE = 1
-  ENCR_TYPE_PMK = 2
+    # configuration parameters used by tests
+    ENCR_TYPE_OPEN = 0
+    ENCR_TYPE_PASSPHRASE = 1
+    ENCR_TYPE_PMK = 2
 
-  PASSPHRASE = "This is some random passphrase - very very secure!!"
-  PASSPHRASE_MIN = "01234567"
-  PASSPHRASE_MAX = "012345678901234567890123456789012345678901234567890123456789012"
-  PMK = "ODU0YjE3YzdmNDJiNWI4NTQ2NDJjNDI3M2VkZTQyZGU="
-  PASSPHRASE2 = "This is some random passphrase - very very secure - but diff!!"
-  PMK2 = "MTIzNDU2Nzg5MDEyMzQ1Njc4OTAxMjM0NTY3ODkwMTI="
+    PASSPHRASE = "This is some random passphrase - very very secure!!"
+    PASSPHRASE_MIN = "01234567"
+    PASSPHRASE_MAX = "012345678901234567890123456789012345678901234567890123456789012"
+    PMK = "ODU0YjE3YzdmNDJiNWI4NTQ2NDJjNDI3M2VkZTQyZGU="
+    PASSPHRASE2 = "This is some random passphrase - very very secure - but diff!!"
+    PMK2 = "MTIzNDU2Nzg5MDEyMzQ1Njc4OTAxMjM0NTY3ODkwMTI="
 
-  PING_MSG = "ping"
+    PING_MSG = "ping"
 
-  # message re-transmit counter (increases reliability in open-environment)
-  # Note: reliability of message transmission is tested elsewhere
-  MSG_RETX_COUNT = 5  # hard-coded max value, internal API
+    # message re-transmit counter (increases reliability in open-environment)
+    # Note: reliability of message transmission is tested elsewhere
+    MSG_RETX_COUNT = 5  # hard-coded max value, internal API
 
-  # number of second to 'reasonably' wait to make sure that devices synchronize
-  # with each other - useful for OOB test cases, where the OOB discovery would
-  # take some time
-  WAIT_FOR_CLUSTER = 5
+    # number of second to 'reasonably' wait to make sure that devices synchronize
+    # with each other - useful for OOB test cases, where the OOB discovery would
+    # take some time
+    WAIT_FOR_CLUSTER = 5
 
-  def __init__(self, controllers):
-    AwareBaseTest.__init__(self, controllers)
+    def __init__(self, controllers):
+        AwareBaseTest.__init__(self, controllers)
 
-  def create_config(self, dtype):
-    """Create a base configuration based on input parameters.
+    def create_config(self, dtype):
+        """Create a base configuration based on input parameters.
 
     Args:
       dtype: Publish or Subscribe discovery type
@@ -63,25 +63,30 @@
     Returns:
       Discovery configuration object.
     """
-    config = {}
-    config[aconsts.DISCOVERY_KEY_DISCOVERY_TYPE] = dtype
-    config[aconsts.DISCOVERY_KEY_SERVICE_NAME] = "GoogleTestServiceDataPath"
-    return config
+        config = {}
+        config[aconsts.DISCOVERY_KEY_DISCOVERY_TYPE] = dtype
+        config[
+            aconsts.DISCOVERY_KEY_SERVICE_NAME] = "GoogleTestServiceDataPath"
+        return config
 
-  def request_network(self, dut, ns):
-    """Request a Wi-Fi Aware network.
+    def request_network(self, dut, ns):
+        """Request a Wi-Fi Aware network.
 
     Args:
       dut: Device
       ns: Network specifier
     Returns: the request key
     """
-    network_req = {"TransportType": 5, "NetworkSpecifier": ns}
-    return dut.droid.connectivityRequestWifiAwareNetwork(network_req)
+        network_req = {"TransportType": 5, "NetworkSpecifier": ns}
+        return dut.droid.connectivityRequestWifiAwareNetwork(network_req)
 
-  def set_up_discovery(self, ptype, stype, get_peer_id, pub_on_both=False,
-      pub_on_both_same=True):
-    """Set up discovery sessions and wait for service discovery.
+    def set_up_discovery(self,
+                         ptype,
+                         stype,
+                         get_peer_id,
+                         pub_on_both=False,
+                         pub_on_both_same=True):
+        """Set up discovery sessions and wait for service discovery.
 
     Args:
       ptype: Publish discovery type
@@ -92,66 +97,71 @@
       pub_on_both_same: If True then the second publish uses an identical
                         service name, otherwise a different service name.
     """
-    p_dut = self.android_devices[0]
-    p_dut.pretty_name = "Publisher"
-    s_dut = self.android_devices[1]
-    s_dut.pretty_name = "Subscriber"
+        p_dut = self.android_devices[0]
+        p_dut.pretty_name = "Publisher"
+        s_dut = self.android_devices[1]
+        s_dut.pretty_name = "Subscriber"
 
-    # Publisher+Subscriber: attach and wait for confirmation
-    p_id = p_dut.droid.wifiAwareAttach()
-    autils.wait_for_event(p_dut, aconsts.EVENT_CB_ON_ATTACHED)
-    time.sleep(self.device_startup_offset)
-    s_id = s_dut.droid.wifiAwareAttach()
-    autils.wait_for_event(s_dut, aconsts.EVENT_CB_ON_ATTACHED)
+        # Publisher+Subscriber: attach and wait for confirmation
+        p_id = p_dut.droid.wifiAwareAttach()
+        autils.wait_for_event(p_dut, aconsts.EVENT_CB_ON_ATTACHED)
+        time.sleep(self.device_startup_offset)
+        s_id = s_dut.droid.wifiAwareAttach()
+        autils.wait_for_event(s_dut, aconsts.EVENT_CB_ON_ATTACHED)
 
-    # Publisher: start publish and wait for confirmation
-    p_disc_id = p_dut.droid.wifiAwarePublish(p_id, self.create_config(ptype))
-    autils.wait_for_event(p_dut, aconsts.SESSION_CB_ON_PUBLISH_STARTED)
+        # Publisher: start publish and wait for confirmation
+        p_disc_id = p_dut.droid.wifiAwarePublish(p_id,
+                                                 self.create_config(ptype))
+        autils.wait_for_event(p_dut, aconsts.SESSION_CB_ON_PUBLISH_STARTED)
 
-    # Optionally set up a publish session on the Subscriber device
-    if pub_on_both:
-      p2_config = self.create_config(ptype)
-      if not pub_on_both_same:
-        p2_config[aconsts.DISCOVERY_KEY_SERVICE_NAME] = (
-          p2_config[aconsts.DISCOVERY_KEY_SERVICE_NAME] + "-XYZXYZ")
-      s_dut.droid.wifiAwarePublish(s_id, p2_config)
-      autils.wait_for_event(s_dut, aconsts.SESSION_CB_ON_PUBLISH_STARTED)
+        # Optionally set up a publish session on the Subscriber device
+        if pub_on_both:
+            p2_config = self.create_config(ptype)
+            if not pub_on_both_same:
+                p2_config[aconsts.DISCOVERY_KEY_SERVICE_NAME] = (
+                    p2_config[aconsts.DISCOVERY_KEY_SERVICE_NAME] + "-XYZXYZ")
+            s_dut.droid.wifiAwarePublish(s_id, p2_config)
+            autils.wait_for_event(s_dut, aconsts.SESSION_CB_ON_PUBLISH_STARTED)
 
-    # Subscriber: start subscribe and wait for confirmation
-    s_disc_id = s_dut.droid.wifiAwareSubscribe(s_id, self.create_config(stype))
-    autils.wait_for_event(s_dut, aconsts.SESSION_CB_ON_SUBSCRIBE_STARTED)
+        # Subscriber: start subscribe and wait for confirmation
+        s_disc_id = s_dut.droid.wifiAwareSubscribe(s_id,
+                                                   self.create_config(stype))
+        autils.wait_for_event(s_dut, aconsts.SESSION_CB_ON_SUBSCRIBE_STARTED)
 
-    # Subscriber: wait for service discovery
-    discovery_event = autils.wait_for_event(
-        s_dut, aconsts.SESSION_CB_ON_SERVICE_DISCOVERED)
-    peer_id_on_sub = discovery_event["data"][aconsts.SESSION_CB_KEY_PEER_ID]
+        # Subscriber: wait for service discovery
+        discovery_event = autils.wait_for_event(
+            s_dut, aconsts.SESSION_CB_ON_SERVICE_DISCOVERED)
+        peer_id_on_sub = discovery_event["data"][
+            aconsts.SESSION_CB_KEY_PEER_ID]
 
-    peer_id_on_pub = None
-    if get_peer_id: # only need message to receive peer ID
-      # Subscriber: send message to peer (Publisher - so it knows our address)
-      s_dut.droid.wifiAwareSendMessage(s_disc_id, peer_id_on_sub,
-                                       self.get_next_msg_id(), self.PING_MSG,
-                                       self.MSG_RETX_COUNT)
-      autils.wait_for_event(s_dut, aconsts.SESSION_CB_ON_MESSAGE_SENT)
+        peer_id_on_pub = None
+        if get_peer_id:  # only need message to receive peer ID
+            # Subscriber: send message to peer (Publisher - so it knows our address)
+            s_dut.droid.wifiAwareSendMessage(s_disc_id, peer_id_on_sub,
+                                             self.get_next_msg_id(),
+                                             self.PING_MSG,
+                                             self.MSG_RETX_COUNT)
+            autils.wait_for_event(s_dut, aconsts.SESSION_CB_ON_MESSAGE_SENT)
 
-      # Publisher: wait for received message
-      pub_rx_msg_event = autils.wait_for_event(
-          p_dut, aconsts.SESSION_CB_ON_MESSAGE_RECEIVED)
-      peer_id_on_pub = pub_rx_msg_event["data"][aconsts.SESSION_CB_KEY_PEER_ID]
+            # Publisher: wait for received message
+            pub_rx_msg_event = autils.wait_for_event(
+                p_dut, aconsts.SESSION_CB_ON_MESSAGE_RECEIVED)
+            peer_id_on_pub = pub_rx_msg_event["data"][
+                aconsts.SESSION_CB_KEY_PEER_ID]
 
-    return (p_dut, s_dut, p_id, s_id, p_disc_id, s_disc_id, peer_id_on_sub,
-            peer_id_on_pub)
+        return (p_dut, s_dut, p_id, s_id, p_disc_id, s_disc_id, peer_id_on_sub,
+                peer_id_on_pub)
 
-  def run_ib_data_path_test(self,
-      ptype,
-      stype,
-      encr_type,
-      use_peer_id,
-      passphrase_to_use=None,
-      pub_on_both=False,
-      pub_on_both_same=True,
-      expect_failure=False):
-    """Runs the in-band data-path tests.
+    def run_ib_data_path_test(self,
+                              ptype,
+                              stype,
+                              encr_type,
+                              use_peer_id,
+                              passphrase_to_use=None,
+                              pub_on_both=False,
+                              pub_on_both_same=True,
+                              expect_failure=False):
+        """Runs the in-band data-path tests.
 
     Args:
       ptype: Publish discovery type
@@ -168,85 +178,100 @@
       expect_failure: If True then don't expect NDP formation, otherwise expect
                       NDP setup to succeed.
     """
-    (p_dut, s_dut, p_id, s_id, p_disc_id, s_disc_id, peer_id_on_sub,
-     peer_id_on_pub) = self.set_up_discovery(ptype, stype, use_peer_id,
-                                             pub_on_both=pub_on_both,
-                                             pub_on_both_same=pub_on_both_same)
+        (p_dut, s_dut, p_id, s_id, p_disc_id, s_disc_id, peer_id_on_sub,
+         peer_id_on_pub) = self.set_up_discovery(
+             ptype,
+             stype,
+             use_peer_id,
+             pub_on_both=pub_on_both,
+             pub_on_both_same=pub_on_both_same)
 
-    passphrase = None
-    pmk = None
-    if encr_type == self.ENCR_TYPE_PASSPHRASE:
-      passphrase = (
-        self.PASSPHRASE if passphrase_to_use == None else passphrase_to_use)
-    elif encr_type == self.ENCR_TYPE_PMK:
-      pmk = self.PMK
+        passphrase = None
+        pmk = None
+        if encr_type == self.ENCR_TYPE_PASSPHRASE:
+            passphrase = (self.PASSPHRASE
+                          if passphrase_to_use == None else passphrase_to_use)
+        elif encr_type == self.ENCR_TYPE_PMK:
+            pmk = self.PMK
 
-    # Publisher: request network
-    p_req_key = self.request_network(
-        p_dut,
-        p_dut.droid.wifiAwareCreateNetworkSpecifier(p_disc_id, peer_id_on_pub if
-        use_peer_id else None, passphrase, pmk))
+        # Publisher: request network
+        p_req_key = self.request_network(
+            p_dut,
+            p_dut.droid.wifiAwareCreateNetworkSpecifier(
+                p_disc_id, peer_id_on_pub
+                if use_peer_id else None, passphrase, pmk))
 
-    # Subscriber: request network
-    s_req_key = self.request_network(
-        s_dut,
-        s_dut.droid.wifiAwareCreateNetworkSpecifier(s_disc_id, peer_id_on_sub,
-                                                    passphrase, pmk))
+        # Subscriber: request network
+        s_req_key = self.request_network(
+            s_dut,
+            s_dut.droid.wifiAwareCreateNetworkSpecifier(
+                s_disc_id, peer_id_on_sub, passphrase, pmk))
 
-    if expect_failure:
-      # Publisher & Subscriber: fail on network formation
-      time.sleep(autils.EVENT_NDP_TIMEOUT)
-      autils.fail_on_event_with_keys(p_dut, cconsts.EVENT_NETWORK_CALLBACK, 0,
-                                     (cconsts.NETWORK_CB_KEY_ID, p_req_key))
-      autils.fail_on_event_with_keys(s_dut, cconsts.EVENT_NETWORK_CALLBACK, 0,
-                                     (cconsts.NETWORK_CB_KEY_ID, s_req_key))
-    else:
-      # Publisher & Subscriber: wait for network formation
-      p_net_event = autils.wait_for_event_with_keys(
-          p_dut, cconsts.EVENT_NETWORK_CALLBACK,
-          autils.EVENT_NDP_TIMEOUT,
-          (cconsts.NETWORK_CB_KEY_EVENT,
-           cconsts.NETWORK_CB_LINK_PROPERTIES_CHANGED),
-          (cconsts.NETWORK_CB_KEY_ID, p_req_key))
-      s_net_event = autils.wait_for_event_with_keys(
-          s_dut, cconsts.EVENT_NETWORK_CALLBACK,
-          autils.EVENT_NDP_TIMEOUT,
-          (cconsts.NETWORK_CB_KEY_EVENT,
-           cconsts.NETWORK_CB_LINK_PROPERTIES_CHANGED),
-          (cconsts.NETWORK_CB_KEY_ID, s_req_key))
+        if expect_failure:
+            # Publisher & Subscriber: fail on network formation
+            time.sleep(autils.EVENT_NDP_TIMEOUT)
+            autils.fail_on_event_with_keys(
+                p_dut, cconsts.EVENT_NETWORK_CALLBACK, 0,
+                (cconsts.NETWORK_CB_KEY_ID, p_req_key))
+            autils.fail_on_event_with_keys(
+                s_dut, cconsts.EVENT_NETWORK_CALLBACK, 0,
+                (cconsts.NETWORK_CB_KEY_ID, s_req_key))
+        else:
+            # Publisher & Subscriber: wait for network formation
+            p_net_event = autils.wait_for_event_with_keys(
+                p_dut, cconsts.EVENT_NETWORK_CALLBACK,
+                autils.EVENT_NDP_TIMEOUT,
+                (cconsts.NETWORK_CB_KEY_EVENT,
+                 cconsts.NETWORK_CB_LINK_PROPERTIES_CHANGED),
+                (cconsts.NETWORK_CB_KEY_ID, p_req_key))
+            s_net_event = autils.wait_for_event_with_keys(
+                s_dut, cconsts.EVENT_NETWORK_CALLBACK,
+                autils.EVENT_NDP_TIMEOUT,
+                (cconsts.NETWORK_CB_KEY_EVENT,
+                 cconsts.NETWORK_CB_LINK_PROPERTIES_CHANGED),
+                (cconsts.NETWORK_CB_KEY_ID, s_req_key))
 
-      p_aware_if = p_net_event["data"][cconsts.NETWORK_CB_KEY_INTERFACE_NAME]
-      s_aware_if = s_net_event["data"][cconsts.NETWORK_CB_KEY_INTERFACE_NAME]
-      self.log.info("Interface names: p=%s, s=%s", p_aware_if, s_aware_if)
+            p_aware_if = p_net_event["data"][
+                cconsts.NETWORK_CB_KEY_INTERFACE_NAME]
+            s_aware_if = s_net_event["data"][
+                cconsts.NETWORK_CB_KEY_INTERFACE_NAME]
+            self.log.info("Interface names: p=%s, s=%s", p_aware_if,
+                          s_aware_if)
 
-      p_ipv6 = \
-      p_dut.droid.connectivityGetLinkLocalIpv6Address(p_aware_if).split("%")[0]
-      s_ipv6 = \
-      s_dut.droid.connectivityGetLinkLocalIpv6Address(s_aware_if).split("%")[0]
-      self.log.info("Interface addresses (IPv6): p=%s, s=%s", p_ipv6, s_ipv6)
+            p_ipv6 = \
+            p_dut.droid.connectivityGetLinkLocalIpv6Address(p_aware_if).split("%")[0]
+            s_ipv6 = \
+            s_dut.droid.connectivityGetLinkLocalIpv6Address(s_aware_if).split("%")[0]
+            self.log.info("Interface addresses (IPv6): p=%s, s=%s", p_ipv6,
+                          s_ipv6)
 
-      # TODO: possibly send messages back and forth, prefer to use netcat/nc
+            # TODO: possibly send messages back and forth, prefer to use netcat/nc
 
-      # terminate sessions and wait for ON_LOST callbacks
-      p_dut.droid.wifiAwareDestroy(p_id)
-      s_dut.droid.wifiAwareDestroy(s_id)
+            # terminate sessions and wait for ON_LOST callbacks
+            p_dut.droid.wifiAwareDestroy(p_id)
+            s_dut.droid.wifiAwareDestroy(s_id)
 
-      autils.wait_for_event_with_keys(
-          p_dut, cconsts.EVENT_NETWORK_CALLBACK, autils.EVENT_NDP_TIMEOUT,
-          (cconsts.NETWORK_CB_KEY_EVENT,
-           cconsts.NETWORK_CB_LOST), (cconsts.NETWORK_CB_KEY_ID, p_req_key))
-      autils.wait_for_event_with_keys(
-          s_dut, cconsts.EVENT_NETWORK_CALLBACK, autils.EVENT_NDP_TIMEOUT,
-          (cconsts.NETWORK_CB_KEY_EVENT,
-           cconsts.NETWORK_CB_LOST), (cconsts.NETWORK_CB_KEY_ID, s_req_key))
+            autils.wait_for_event_with_keys(
+                p_dut, cconsts.EVENT_NETWORK_CALLBACK,
+                autils.EVENT_NDP_TIMEOUT,
+                (cconsts.NETWORK_CB_KEY_EVENT, cconsts.NETWORK_CB_LOST),
+                (cconsts.NETWORK_CB_KEY_ID, p_req_key))
+            autils.wait_for_event_with_keys(
+                s_dut, cconsts.EVENT_NETWORK_CALLBACK,
+                autils.EVENT_NDP_TIMEOUT,
+                (cconsts.NETWORK_CB_KEY_EVENT, cconsts.NETWORK_CB_LOST),
+                (cconsts.NETWORK_CB_KEY_ID, s_req_key))
 
-    # clean-up
-    p_dut.droid.connectivityUnregisterNetworkCallback(p_req_key)
-    s_dut.droid.connectivityUnregisterNetworkCallback(s_req_key)
+        # clean-up
+        p_dut.droid.connectivityUnregisterNetworkCallback(p_req_key)
+        s_dut.droid.connectivityUnregisterNetworkCallback(s_req_key)
 
-  def run_oob_data_path_test(self, encr_type, use_peer_id,
-      setup_discovery_sessions=False, expect_failure=False):
-    """Runs the out-of-band data-path tests.
+    def run_oob_data_path_test(self,
+                               encr_type,
+                               use_peer_id,
+                               setup_discovery_sessions=False,
+                               expect_failure=False):
+        """Runs the out-of-band data-path tests.
 
     Args:
       encr_type: Encryption type, one of ENCR_TYPE_*
@@ -258,162 +283,173 @@
       expect_failure: If True then don't expect NDP formation, otherwise expect
                       NDP setup to succeed.
     """
-    init_dut = self.android_devices[0]
-    init_dut.pretty_name = "Initiator"
-    resp_dut = self.android_devices[1]
-    resp_dut.pretty_name = "Responder"
+        init_dut = self.android_devices[0]
+        init_dut.pretty_name = "Initiator"
+        resp_dut = self.android_devices[1]
+        resp_dut.pretty_name = "Responder"
 
-    # Initiator+Responder: attach and wait for confirmation & identity
-    init_id = init_dut.droid.wifiAwareAttach(True)
-    autils.wait_for_event(init_dut, aconsts.EVENT_CB_ON_ATTACHED)
-    init_ident_event = autils.wait_for_event(
-        init_dut, aconsts.EVENT_CB_ON_IDENTITY_CHANGED)
-    init_mac = init_ident_event["data"]["mac"]
-    time.sleep(self.device_startup_offset)
-    resp_id = resp_dut.droid.wifiAwareAttach(True)
-    autils.wait_for_event(resp_dut, aconsts.EVENT_CB_ON_ATTACHED)
-    resp_ident_event = autils.wait_for_event(
-        resp_dut, aconsts.EVENT_CB_ON_IDENTITY_CHANGED)
-    resp_mac = resp_ident_event["data"]["mac"]
+        # Initiator+Responder: attach and wait for confirmation & identity
+        init_id = init_dut.droid.wifiAwareAttach(True)
+        autils.wait_for_event(init_dut, aconsts.EVENT_CB_ON_ATTACHED)
+        init_ident_event = autils.wait_for_event(
+            init_dut, aconsts.EVENT_CB_ON_IDENTITY_CHANGED)
+        init_mac = init_ident_event["data"]["mac"]
+        time.sleep(self.device_startup_offset)
+        resp_id = resp_dut.droid.wifiAwareAttach(True)
+        autils.wait_for_event(resp_dut, aconsts.EVENT_CB_ON_ATTACHED)
+        resp_ident_event = autils.wait_for_event(
+            resp_dut, aconsts.EVENT_CB_ON_IDENTITY_CHANGED)
+        resp_mac = resp_ident_event["data"]["mac"]
 
-    # wait for for devices to synchronize with each other - there are no other
-    # mechanisms to make sure this happens for OOB discovery (except retrying
-    # to execute the data-path request)
-    time.sleep(self.WAIT_FOR_CLUSTER)
+        # wait for for devices to synchronize with each other - there are no other
+        # mechanisms to make sure this happens for OOB discovery (except retrying
+        # to execute the data-path request)
+        time.sleep(self.WAIT_FOR_CLUSTER)
 
-    if setup_discovery_sessions:
-      init_dut.droid.wifiAwarePublish(init_id, self.create_config(
-        aconsts.PUBLISH_TYPE_UNSOLICITED))
-      autils.wait_for_event(init_dut, aconsts.SESSION_CB_ON_PUBLISH_STARTED)
-      resp_dut.droid.wifiAwarePublish(resp_id, self.create_config(
-          aconsts.PUBLISH_TYPE_UNSOLICITED))
-      autils.wait_for_event(resp_dut, aconsts.SESSION_CB_ON_PUBLISH_STARTED)
-      resp_dut.droid.wifiAwareSubscribe(resp_id, self.create_config(
-          aconsts.SUBSCRIBE_TYPE_PASSIVE))
-      autils.wait_for_event(resp_dut, aconsts.SESSION_CB_ON_SUBSCRIBE_STARTED)
-      autils.wait_for_event(resp_dut, aconsts.SESSION_CB_ON_SERVICE_DISCOVERED)
+        if setup_discovery_sessions:
+            init_dut.droid.wifiAwarePublish(
+                init_id, self.create_config(aconsts.PUBLISH_TYPE_UNSOLICITED))
+            autils.wait_for_event(init_dut,
+                                  aconsts.SESSION_CB_ON_PUBLISH_STARTED)
+            resp_dut.droid.wifiAwarePublish(
+                resp_id, self.create_config(aconsts.PUBLISH_TYPE_UNSOLICITED))
+            autils.wait_for_event(resp_dut,
+                                  aconsts.SESSION_CB_ON_PUBLISH_STARTED)
+            resp_dut.droid.wifiAwareSubscribe(
+                resp_id, self.create_config(aconsts.SUBSCRIBE_TYPE_PASSIVE))
+            autils.wait_for_event(resp_dut,
+                                  aconsts.SESSION_CB_ON_SUBSCRIBE_STARTED)
+            autils.wait_for_event(resp_dut,
+                                  aconsts.SESSION_CB_ON_SERVICE_DISCOVERED)
 
-    passphrase = None
-    pmk = None
-    if encr_type == self.ENCR_TYPE_PASSPHRASE:
-      passphrase = self.PASSPHRASE
-    elif encr_type == self.ENCR_TYPE_PMK:
-      pmk = self.PMK
+        passphrase = None
+        pmk = None
+        if encr_type == self.ENCR_TYPE_PASSPHRASE:
+            passphrase = self.PASSPHRASE
+        elif encr_type == self.ENCR_TYPE_PMK:
+            pmk = self.PMK
 
-    # Responder: request network
-    resp_req_key = self.request_network(
-        resp_dut,
-        resp_dut.droid.wifiAwareCreateNetworkSpecifierOob(
-            resp_id, aconsts.DATA_PATH_RESPONDER, init_mac
-            if use_peer_id else None, passphrase, pmk))
+        # Responder: request network
+        resp_req_key = self.request_network(
+            resp_dut,
+            resp_dut.droid.wifiAwareCreateNetworkSpecifierOob(
+                resp_id, aconsts.DATA_PATH_RESPONDER, init_mac
+                if use_peer_id else None, passphrase, pmk))
 
-    # Initiator: request network
-    init_req_key = self.request_network(
-        init_dut,
-        init_dut.droid.wifiAwareCreateNetworkSpecifierOob(
-            init_id, aconsts.DATA_PATH_INITIATOR, resp_mac, passphrase, pmk))
+        # Initiator: request network
+        init_req_key = self.request_network(
+            init_dut,
+            init_dut.droid.wifiAwareCreateNetworkSpecifierOob(
+                init_id, aconsts.DATA_PATH_INITIATOR, resp_mac, passphrase,
+                pmk))
 
-    if expect_failure:
-      # Initiator & Responder: fail on network formation
-      time.sleep(autils.EVENT_NDP_TIMEOUT)
-      autils.fail_on_event_with_keys(resp_dut, cconsts.EVENT_NETWORK_CALLBACK,
-                                     0,
-                                     (cconsts.NETWORK_CB_KEY_ID, resp_req_key))
-      autils.fail_on_event_with_keys(init_dut, cconsts.EVENT_NETWORK_CALLBACK,
-                                     0,
-                                     (cconsts.NETWORK_CB_KEY_ID, init_req_key))
-    else:
-      # Initiator & Responder: wait for network formation
-      init_net_event = autils.wait_for_event_with_keys(
-          init_dut, cconsts.EVENT_NETWORK_CALLBACK,
-          autils.EVENT_NDP_TIMEOUT,
-          (cconsts.NETWORK_CB_KEY_EVENT,
-           cconsts.NETWORK_CB_LINK_PROPERTIES_CHANGED),
-          (cconsts.NETWORK_CB_KEY_ID, init_req_key))
-      resp_net_event = autils.wait_for_event_with_keys(
-          resp_dut, cconsts.EVENT_NETWORK_CALLBACK,
-          autils.EVENT_NDP_TIMEOUT,
-          (cconsts.NETWORK_CB_KEY_EVENT,
-           cconsts.NETWORK_CB_LINK_PROPERTIES_CHANGED),
-          (cconsts.NETWORK_CB_KEY_ID, resp_req_key))
+        if expect_failure:
+            # Initiator & Responder: fail on network formation
+            time.sleep(autils.EVENT_NDP_TIMEOUT)
+            autils.fail_on_event_with_keys(
+                resp_dut, cconsts.EVENT_NETWORK_CALLBACK, 0,
+                (cconsts.NETWORK_CB_KEY_ID, resp_req_key))
+            autils.fail_on_event_with_keys(
+                init_dut, cconsts.EVENT_NETWORK_CALLBACK, 0,
+                (cconsts.NETWORK_CB_KEY_ID, init_req_key))
+        else:
+            # Initiator & Responder: wait for network formation
+            init_net_event = autils.wait_for_event_with_keys(
+                init_dut, cconsts.EVENT_NETWORK_CALLBACK,
+                autils.EVENT_NDP_TIMEOUT,
+                (cconsts.NETWORK_CB_KEY_EVENT,
+                 cconsts.NETWORK_CB_LINK_PROPERTIES_CHANGED),
+                (cconsts.NETWORK_CB_KEY_ID, init_req_key))
+            resp_net_event = autils.wait_for_event_with_keys(
+                resp_dut, cconsts.EVENT_NETWORK_CALLBACK,
+                autils.EVENT_NDP_TIMEOUT,
+                (cconsts.NETWORK_CB_KEY_EVENT,
+                 cconsts.NETWORK_CB_LINK_PROPERTIES_CHANGED),
+                (cconsts.NETWORK_CB_KEY_ID, resp_req_key))
 
-      init_aware_if = init_net_event["data"][
-        cconsts.NETWORK_CB_KEY_INTERFACE_NAME]
-      resp_aware_if = resp_net_event["data"][
-        cconsts.NETWORK_CB_KEY_INTERFACE_NAME]
-      self.log.info("Interface names: I=%s, R=%s", init_aware_if, resp_aware_if)
+            init_aware_if = init_net_event["data"][
+                cconsts.NETWORK_CB_KEY_INTERFACE_NAME]
+            resp_aware_if = resp_net_event["data"][
+                cconsts.NETWORK_CB_KEY_INTERFACE_NAME]
+            self.log.info("Interface names: I=%s, R=%s", init_aware_if,
+                          resp_aware_if)
 
-      init_ipv6 = init_dut.droid.connectivityGetLinkLocalIpv6Address(
-          init_aware_if).split("%")[0]
-      resp_ipv6 = resp_dut.droid.connectivityGetLinkLocalIpv6Address(
-          resp_aware_if).split("%")[0]
-      self.log.info("Interface addresses (IPv6): I=%s, R=%s", init_ipv6,
-                    resp_ipv6)
+            init_ipv6 = init_dut.droid.connectivityGetLinkLocalIpv6Address(
+                init_aware_if).split("%")[0]
+            resp_ipv6 = resp_dut.droid.connectivityGetLinkLocalIpv6Address(
+                resp_aware_if).split("%")[0]
+            self.log.info("Interface addresses (IPv6): I=%s, R=%s", init_ipv6,
+                          resp_ipv6)
 
-      # TODO: possibly send messages back and forth, prefer to use netcat/nc
+            # TODO: possibly send messages back and forth, prefer to use netcat/nc
 
-      # terminate sessions and wait for ON_LOST callbacks
-      init_dut.droid.wifiAwareDestroy(init_id)
-      resp_dut.droid.wifiAwareDestroy(resp_id)
+            # terminate sessions and wait for ON_LOST callbacks
+            init_dut.droid.wifiAwareDestroy(init_id)
+            resp_dut.droid.wifiAwareDestroy(resp_id)
 
-      autils.wait_for_event_with_keys(
-          init_dut, cconsts.EVENT_NETWORK_CALLBACK, autils.EVENT_NDP_TIMEOUT,
-          (cconsts.NETWORK_CB_KEY_EVENT,
-           cconsts.NETWORK_CB_LOST), (cconsts.NETWORK_CB_KEY_ID, init_req_key))
-      autils.wait_for_event_with_keys(
-          resp_dut, cconsts.EVENT_NETWORK_CALLBACK, autils.EVENT_NDP_TIMEOUT,
-          (cconsts.NETWORK_CB_KEY_EVENT,
-           cconsts.NETWORK_CB_LOST), (cconsts.NETWORK_CB_KEY_ID, resp_req_key))
+            autils.wait_for_event_with_keys(
+                init_dut, cconsts.EVENT_NETWORK_CALLBACK,
+                autils.EVENT_NDP_TIMEOUT,
+                (cconsts.NETWORK_CB_KEY_EVENT, cconsts.NETWORK_CB_LOST),
+                (cconsts.NETWORK_CB_KEY_ID, init_req_key))
+            autils.wait_for_event_with_keys(
+                resp_dut, cconsts.EVENT_NETWORK_CALLBACK,
+                autils.EVENT_NDP_TIMEOUT,
+                (cconsts.NETWORK_CB_KEY_EVENT, cconsts.NETWORK_CB_LOST),
+                (cconsts.NETWORK_CB_KEY_ID, resp_req_key))
 
-    # clean-up
-    resp_dut.droid.connectivityUnregisterNetworkCallback(resp_req_key)
-    init_dut.droid.connectivityUnregisterNetworkCallback(init_req_key)
+        # clean-up
+        resp_dut.droid.connectivityUnregisterNetworkCallback(resp_req_key)
+        init_dut.droid.connectivityUnregisterNetworkCallback(init_req_key)
 
-  def run_mismatched_ib_data_path_test(self, pub_mismatch, sub_mismatch):
-    """Runs the negative in-band data-path tests: mismatched peer ID.
+    def run_mismatched_ib_data_path_test(self, pub_mismatch, sub_mismatch):
+        """Runs the negative in-band data-path tests: mismatched peer ID.
 
     Args:
       pub_mismatch: Mismatch the publisher's ID
       sub_mismatch: Mismatch the subscriber's ID
     """
-    (p_dut, s_dut, p_id, s_id, p_disc_id, s_disc_id,
-     peer_id_on_sub, peer_id_on_pub) = self.set_up_discovery(
-         aconsts.PUBLISH_TYPE_UNSOLICITED, aconsts.SUBSCRIBE_TYPE_PASSIVE, True)
+        (p_dut, s_dut, p_id, s_id, p_disc_id, s_disc_id, peer_id_on_sub,
+         peer_id_on_pub) = self.set_up_discovery(
+             aconsts.PUBLISH_TYPE_UNSOLICITED, aconsts.SUBSCRIBE_TYPE_PASSIVE,
+             True)
 
-    if pub_mismatch:
-      peer_id_on_pub = peer_id_on_pub -1
-    if sub_mismatch:
-      peer_id_on_sub = peer_id_on_sub - 1
+        if pub_mismatch:
+            peer_id_on_pub = peer_id_on_pub - 1
+        if sub_mismatch:
+            peer_id_on_sub = peer_id_on_sub - 1
 
-    # Publisher: request network
-    p_req_key = self.request_network(
-        p_dut,
-        p_dut.droid.wifiAwareCreateNetworkSpecifier(p_disc_id, peer_id_on_pub,
-                                                    None))
+        # Publisher: request network
+        p_req_key = self.request_network(
+            p_dut,
+            p_dut.droid.wifiAwareCreateNetworkSpecifier(
+                p_disc_id, peer_id_on_pub, None))
 
-    # Subscriber: request network
-    s_req_key = self.request_network(
-        s_dut,
-        s_dut.droid.wifiAwareCreateNetworkSpecifier(s_disc_id, peer_id_on_sub,
-                                                    None))
+        # Subscriber: request network
+        s_req_key = self.request_network(
+            s_dut,
+            s_dut.droid.wifiAwareCreateNetworkSpecifier(
+                s_disc_id, peer_id_on_sub, None))
 
-    # Publisher & Subscriber: fail on network formation
-    time.sleep(autils.EVENT_NDP_TIMEOUT)
-    autils.fail_on_event_with_keys(p_dut, cconsts.EVENT_NETWORK_CALLBACK, 0,
-                                   (cconsts.NETWORK_CB_KEY_ID, p_req_key))
-    autils.fail_on_event_with_keys(s_dut, cconsts.EVENT_NETWORK_CALLBACK, 0,
-                                   (cconsts.NETWORK_CB_KEY_ID, s_req_key))
+        # Publisher & Subscriber: fail on network formation
+        time.sleep(autils.EVENT_NDP_TIMEOUT)
+        autils.fail_on_event_with_keys(p_dut, cconsts.EVENT_NETWORK_CALLBACK,
+                                       0,
+                                       (cconsts.NETWORK_CB_KEY_ID, p_req_key))
+        autils.fail_on_event_with_keys(s_dut, cconsts.EVENT_NETWORK_CALLBACK,
+                                       0,
+                                       (cconsts.NETWORK_CB_KEY_ID, s_req_key))
 
-    # clean-up
-    p_dut.droid.connectivityUnregisterNetworkCallback(p_req_key)
-    s_dut.droid.connectivityUnregisterNetworkCallback(s_req_key)
+        # clean-up
+        p_dut.droid.connectivityUnregisterNetworkCallback(p_req_key)
+        s_dut.droid.connectivityUnregisterNetworkCallback(s_req_key)
 
-  def run_mismatched_oob_data_path_test(self,
-      init_mismatch_mac=False,
-      resp_mismatch_mac=False,
-      init_encr_type=ENCR_TYPE_OPEN,
-      resp_encr_type=ENCR_TYPE_OPEN):
-    """Runs the negative out-of-band data-path tests: mismatched information
+    def run_mismatched_oob_data_path_test(self,
+                                          init_mismatch_mac=False,
+                                          resp_mismatch_mac=False,
+                                          init_encr_type=ENCR_TYPE_OPEN,
+                                          resp_encr_type=ENCR_TYPE_OPEN):
+        """Runs the negative out-of-band data-path tests: mismatched information
     between Responder and Initiator.
 
     Args:
@@ -422,786 +458,794 @@
       init_encr_type: Encryption type of Initiator - ENCR_TYPE_*
       resp_encr_type: Encryption type of Responder - ENCR_TYPE_*
     """
-    init_dut = self.android_devices[0]
-    init_dut.pretty_name = "Initiator"
-    resp_dut = self.android_devices[1]
-    resp_dut.pretty_name = "Responder"
+        init_dut = self.android_devices[0]
+        init_dut.pretty_name = "Initiator"
+        resp_dut = self.android_devices[1]
+        resp_dut.pretty_name = "Responder"
 
-    # Initiator+Responder: attach and wait for confirmation & identity
-    init_id = init_dut.droid.wifiAwareAttach(True)
-    autils.wait_for_event(init_dut, aconsts.EVENT_CB_ON_ATTACHED)
-    init_ident_event = autils.wait_for_event(
-        init_dut, aconsts.EVENT_CB_ON_IDENTITY_CHANGED)
-    init_mac = init_ident_event["data"]["mac"]
-    time.sleep(self.device_startup_offset)
-    resp_id = resp_dut.droid.wifiAwareAttach(True)
-    autils.wait_for_event(resp_dut, aconsts.EVENT_CB_ON_ATTACHED)
-    resp_ident_event = autils.wait_for_event(
-        resp_dut, aconsts.EVENT_CB_ON_IDENTITY_CHANGED)
-    resp_mac = resp_ident_event["data"]["mac"]
+        # Initiator+Responder: attach and wait for confirmation & identity
+        init_id = init_dut.droid.wifiAwareAttach(True)
+        autils.wait_for_event(init_dut, aconsts.EVENT_CB_ON_ATTACHED)
+        init_ident_event = autils.wait_for_event(
+            init_dut, aconsts.EVENT_CB_ON_IDENTITY_CHANGED)
+        init_mac = init_ident_event["data"]["mac"]
+        time.sleep(self.device_startup_offset)
+        resp_id = resp_dut.droid.wifiAwareAttach(True)
+        autils.wait_for_event(resp_dut, aconsts.EVENT_CB_ON_ATTACHED)
+        resp_ident_event = autils.wait_for_event(
+            resp_dut, aconsts.EVENT_CB_ON_IDENTITY_CHANGED)
+        resp_mac = resp_ident_event["data"]["mac"]
 
-    if init_mismatch_mac: # assumes legit ones don't start with "00"
-      init_mac = "00" + init_mac[2:]
-    if resp_mismatch_mac:
-      resp_mac = "00" + resp_mac[2:]
+        if init_mismatch_mac:  # assumes legit ones don't start with "00"
+            init_mac = "00" + init_mac[2:]
+        if resp_mismatch_mac:
+            resp_mac = "00" + resp_mac[2:]
 
-    # wait for for devices to synchronize with each other - there are no other
-    # mechanisms to make sure this happens for OOB discovery (except retrying
-    # to execute the data-path request)
-    time.sleep(self.WAIT_FOR_CLUSTER)
+        # wait for for devices to synchronize with each other - there are no other
+        # mechanisms to make sure this happens for OOB discovery (except retrying
+        # to execute the data-path request)
+        time.sleep(self.WAIT_FOR_CLUSTER)
 
-    # set up separate keys: even if types are the same we want a mismatch
-    init_passphrase = None
-    init_pmk = None
-    if init_encr_type == self.ENCR_TYPE_PASSPHRASE:
-      init_passphrase = self.PASSPHRASE
-    elif init_encr_type == self.ENCR_TYPE_PMK:
-      init_pmk = self.PMK
+        # set up separate keys: even if types are the same we want a mismatch
+        init_passphrase = None
+        init_pmk = None
+        if init_encr_type == self.ENCR_TYPE_PASSPHRASE:
+            init_passphrase = self.PASSPHRASE
+        elif init_encr_type == self.ENCR_TYPE_PMK:
+            init_pmk = self.PMK
 
-    resp_passphrase = None
-    resp_pmk = None
-    if resp_encr_type == self.ENCR_TYPE_PASSPHRASE:
-      resp_passphrase = self.PASSPHRASE2
-    elif resp_encr_type == self.ENCR_TYPE_PMK:
-      resp_pmk = self.PMK2
+        resp_passphrase = None
+        resp_pmk = None
+        if resp_encr_type == self.ENCR_TYPE_PASSPHRASE:
+            resp_passphrase = self.PASSPHRASE2
+        elif resp_encr_type == self.ENCR_TYPE_PMK:
+            resp_pmk = self.PMK2
 
-    # Responder: request network
-    resp_req_key = self.request_network(
-        resp_dut,
-        resp_dut.droid.wifiAwareCreateNetworkSpecifierOob(
-            resp_id, aconsts.DATA_PATH_RESPONDER, init_mac, resp_passphrase,
-            resp_pmk))
+        # Responder: request network
+        resp_req_key = self.request_network(
+            resp_dut,
+            resp_dut.droid.wifiAwareCreateNetworkSpecifierOob(
+                resp_id, aconsts.DATA_PATH_RESPONDER, init_mac,
+                resp_passphrase, resp_pmk))
 
-    # Initiator: request network
-    init_req_key = self.request_network(
-        init_dut,
-        init_dut.droid.wifiAwareCreateNetworkSpecifierOob(
-            init_id, aconsts.DATA_PATH_INITIATOR, resp_mac, init_passphrase,
-            init_pmk))
+        # Initiator: request network
+        init_req_key = self.request_network(
+            init_dut,
+            init_dut.droid.wifiAwareCreateNetworkSpecifierOob(
+                init_id, aconsts.DATA_PATH_INITIATOR, resp_mac,
+                init_passphrase, init_pmk))
 
-    # Initiator & Responder: fail on network formation
-    time.sleep(autils.EVENT_NDP_TIMEOUT)
-    autils.fail_on_event_with_keys(init_dut, cconsts.EVENT_NETWORK_CALLBACK, 0,
-                                   (cconsts.NETWORK_CB_KEY_ID, init_req_key))
-    autils.fail_on_event_with_keys(resp_dut, cconsts.EVENT_NETWORK_CALLBACK, 0,
-                                   (cconsts.NETWORK_CB_KEY_ID, resp_req_key))
+        # Initiator & Responder: fail on network formation
+        time.sleep(autils.EVENT_NDP_TIMEOUT)
+        autils.fail_on_event_with_keys(
+            init_dut, cconsts.EVENT_NETWORK_CALLBACK, 0,
+            (cconsts.NETWORK_CB_KEY_ID, init_req_key))
+        autils.fail_on_event_with_keys(
+            resp_dut, cconsts.EVENT_NETWORK_CALLBACK, 0,
+            (cconsts.NETWORK_CB_KEY_ID, resp_req_key))
 
-    # clean-up
-    resp_dut.droid.connectivityUnregisterNetworkCallback(resp_req_key)
-    init_dut.droid.connectivityUnregisterNetworkCallback(init_req_key)
+        # clean-up
+        resp_dut.droid.connectivityUnregisterNetworkCallback(resp_req_key)
+        init_dut.droid.connectivityUnregisterNetworkCallback(init_req_key)
 
+    #######################################
+    # Positive In-Band (IB) tests key:
+    #
+    # names is: test_ib_<pub_type>_<sub_type>_<encr_type>_<peer_spec>
+    # where:
+    #
+    # pub_type: Type of publish discovery session: unsolicited or solicited.
+    # sub_type: Type of subscribe discovery session: passive or active.
+    # encr_type: Encription type: open, passphrase
+    # peer_spec: Peer specification method: any or specific
+    #
+    # Note: In-Band means using Wi-Fi Aware for discovery and referring to the
+    # peer using the Aware-provided peer handle (as opposed to a MAC address).
+    #######################################
 
-  #######################################
-  # Positive In-Band (IB) tests key:
-  #
-  # names is: test_ib_<pub_type>_<sub_type>_<encr_type>_<peer_spec>
-  # where:
-  #
-  # pub_type: Type of publish discovery session: unsolicited or solicited.
-  # sub_type: Type of subscribe discovery session: passive or active.
-  # encr_type: Encription type: open, passphrase
-  # peer_spec: Peer specification method: any or specific
-  #
-  # Note: In-Band means using Wi-Fi Aware for discovery and referring to the
-  # peer using the Aware-provided peer handle (as opposed to a MAC address).
-  #######################################
-
-  @test_tracker_info(uuid="fa30bedc-d1de-4440-bf25-ec00d10555af")
-  def test_ib_unsolicited_passive_open_specific(self):
-    """Data-path: in-band, unsolicited/passive, open encryption, specific peer
+    @test_tracker_info(uuid="fa30bedc-d1de-4440-bf25-ec00d10555af")
+    def test_ib_unsolicited_passive_open_specific(self):
+        """Data-path: in-band, unsolicited/passive, open encryption, specific peer
 
     Verifies end-to-end discovery + data-path creation.
     """
-    self.run_ib_data_path_test(
-        ptype=aconsts.PUBLISH_TYPE_UNSOLICITED,
-        stype=aconsts.SUBSCRIBE_TYPE_PASSIVE,
-        encr_type=self.ENCR_TYPE_OPEN,
-        use_peer_id=True)
+        self.run_ib_data_path_test(
+            ptype=aconsts.PUBLISH_TYPE_UNSOLICITED,
+            stype=aconsts.SUBSCRIBE_TYPE_PASSIVE,
+            encr_type=self.ENCR_TYPE_OPEN,
+            use_peer_id=True)
 
-  @test_tracker_info(uuid="57fc9d53-32ae-470f-a8b1-2fe37893687d")
-  def test_ib_unsolicited_passive_open_any(self):
-    """Data-path: in-band, unsolicited/passive, open encryption, any peer
+    @test_tracker_info(uuid="57fc9d53-32ae-470f-a8b1-2fe37893687d")
+    def test_ib_unsolicited_passive_open_any(self):
+        """Data-path: in-band, unsolicited/passive, open encryption, any peer
 
     Verifies end-to-end discovery + data-path creation.
     """
-    self.run_ib_data_path_test(
-        ptype=aconsts.PUBLISH_TYPE_UNSOLICITED,
-        stype=aconsts.SUBSCRIBE_TYPE_PASSIVE,
-        encr_type=self.ENCR_TYPE_OPEN,
-        use_peer_id=False)
+        self.run_ib_data_path_test(
+            ptype=aconsts.PUBLISH_TYPE_UNSOLICITED,
+            stype=aconsts.SUBSCRIBE_TYPE_PASSIVE,
+            encr_type=self.ENCR_TYPE_OPEN,
+            use_peer_id=False)
 
-  @test_tracker_info(uuid="93b2a23d-8579-448a-936c-7812929464cf")
-  def test_ib_unsolicited_passive_passphrase_specific(self):
-    """Data-path: in-band, unsolicited/passive, passphrase, specific peer
+    @test_tracker_info(uuid="93b2a23d-8579-448a-936c-7812929464cf")
+    def test_ib_unsolicited_passive_passphrase_specific(self):
+        """Data-path: in-band, unsolicited/passive, passphrase, specific peer
 
     Verifies end-to-end discovery + data-path creation.
     """
-    self.run_ib_data_path_test(
-        ptype=aconsts.PUBLISH_TYPE_UNSOLICITED,
-        stype=aconsts.SUBSCRIBE_TYPE_PASSIVE,
-        encr_type=self.ENCR_TYPE_PASSPHRASE,
-        use_peer_id=True)
+        self.run_ib_data_path_test(
+            ptype=aconsts.PUBLISH_TYPE_UNSOLICITED,
+            stype=aconsts.SUBSCRIBE_TYPE_PASSIVE,
+            encr_type=self.ENCR_TYPE_PASSPHRASE,
+            use_peer_id=True)
 
-  @test_tracker_info(uuid="1736126f-a0ff-4712-acc4-f89b4eef5716")
-  def test_ib_unsolicited_passive_passphrase_any(self):
-    """Data-path: in-band, unsolicited/passive, passphrase, any peer
+    @test_tracker_info(uuid="1736126f-a0ff-4712-acc4-f89b4eef5716")
+    def test_ib_unsolicited_passive_passphrase_any(self):
+        """Data-path: in-band, unsolicited/passive, passphrase, any peer
 
     Verifies end-to-end discovery + data-path creation.
     """
-    self.run_ib_data_path_test(
-        ptype=aconsts.PUBLISH_TYPE_UNSOLICITED,
-        stype=aconsts.SUBSCRIBE_TYPE_PASSIVE,
-        encr_type=self.ENCR_TYPE_PASSPHRASE,
-        use_peer_id=False)
+        self.run_ib_data_path_test(
+            ptype=aconsts.PUBLISH_TYPE_UNSOLICITED,
+            stype=aconsts.SUBSCRIBE_TYPE_PASSIVE,
+            encr_type=self.ENCR_TYPE_PASSPHRASE,
+            use_peer_id=False)
 
-  @test_tracker_info(uuid="b9353d5b-3f77-46bf-bfd9-65d56a7c939a")
-  def test_ib_unsolicited_passive_pmk_specific(self):
-    """Data-path: in-band, unsolicited/passive, PMK, specific peer
+    @test_tracker_info(uuid="b9353d5b-3f77-46bf-bfd9-65d56a7c939a")
+    def test_ib_unsolicited_passive_pmk_specific(self):
+        """Data-path: in-band, unsolicited/passive, PMK, specific peer
 
     Verifies end-to-end discovery + data-path creation.
     """
-    self.run_ib_data_path_test(
-        ptype=aconsts.PUBLISH_TYPE_UNSOLICITED,
-        stype=aconsts.SUBSCRIBE_TYPE_PASSIVE,
-        encr_type=self.ENCR_TYPE_PMK,
-        use_peer_id=True)
+        self.run_ib_data_path_test(
+            ptype=aconsts.PUBLISH_TYPE_UNSOLICITED,
+            stype=aconsts.SUBSCRIBE_TYPE_PASSIVE,
+            encr_type=self.ENCR_TYPE_PMK,
+            use_peer_id=True)
 
-  @test_tracker_info(uuid="06f3b2ab-4a10-4398-83a4-6a23851b1662")
-  def test_ib_unsolicited_passive_pmk_any(self):
-    """Data-path: in-band, unsolicited/passive, PMK, any peer
+    @test_tracker_info(uuid="06f3b2ab-4a10-4398-83a4-6a23851b1662")
+    def test_ib_unsolicited_passive_pmk_any(self):
+        """Data-path: in-band, unsolicited/passive, PMK, any peer
 
     Verifies end-to-end discovery + data-path creation.
     """
-    self.run_ib_data_path_test(
-        ptype=aconsts.PUBLISH_TYPE_UNSOLICITED,
-        stype=aconsts.SUBSCRIBE_TYPE_PASSIVE,
-        encr_type=self.ENCR_TYPE_PMK,
-        use_peer_id=False)
+        self.run_ib_data_path_test(
+            ptype=aconsts.PUBLISH_TYPE_UNSOLICITED,
+            stype=aconsts.SUBSCRIBE_TYPE_PASSIVE,
+            encr_type=self.ENCR_TYPE_PMK,
+            use_peer_id=False)
 
-  @test_tracker_info(uuid="0ed7d8b3-a69e-46ba-aeb7-13e507ecf290")
-  def test_ib_solicited_active_open_specific(self):
-    """Data-path: in-band, solicited/active, open encryption, specific peer
+    @test_tracker_info(uuid="0ed7d8b3-a69e-46ba-aeb7-13e507ecf290")
+    def test_ib_solicited_active_open_specific(self):
+        """Data-path: in-band, solicited/active, open encryption, specific peer
 
     Verifies end-to-end discovery + data-path creation.
     """
-    self.run_ib_data_path_test(
-        ptype=aconsts.PUBLISH_TYPE_SOLICITED,
-        stype=aconsts.SUBSCRIBE_TYPE_ACTIVE,
-        encr_type=self.ENCR_TYPE_OPEN,
-        use_peer_id=True)
+        self.run_ib_data_path_test(
+            ptype=aconsts.PUBLISH_TYPE_SOLICITED,
+            stype=aconsts.SUBSCRIBE_TYPE_ACTIVE,
+            encr_type=self.ENCR_TYPE_OPEN,
+            use_peer_id=True)
 
-  @test_tracker_info(uuid="c7ba6d28-5ef6-45d9-95d5-583ad6d981f3")
-  def test_ib_solicited_active_open_any(self):
-    """Data-path: in-band, solicited/active, open encryption, any peer
+    @test_tracker_info(uuid="c7ba6d28-5ef6-45d9-95d5-583ad6d981f3")
+    def test_ib_solicited_active_open_any(self):
+        """Data-path: in-band, solicited/active, open encryption, any peer
 
     Verifies end-to-end discovery + data-path creation.
     """
-    self.run_ib_data_path_test(
-        ptype=aconsts.PUBLISH_TYPE_SOLICITED,
-        stype=aconsts.SUBSCRIBE_TYPE_ACTIVE,
-        encr_type=self.ENCR_TYPE_OPEN,
-        use_peer_id=False)
+        self.run_ib_data_path_test(
+            ptype=aconsts.PUBLISH_TYPE_SOLICITED,
+            stype=aconsts.SUBSCRIBE_TYPE_ACTIVE,
+            encr_type=self.ENCR_TYPE_OPEN,
+            use_peer_id=False)
 
-  @test_tracker_info(uuid="388cea99-0e2e-49ea-b00e-f3e56b6236e5")
-  def test_ib_solicited_active_passphrase_specific(self):
-    """Data-path: in-band, solicited/active, passphrase, specific peer
+    @test_tracker_info(uuid="388cea99-0e2e-49ea-b00e-f3e56b6236e5")
+    def test_ib_solicited_active_passphrase_specific(self):
+        """Data-path: in-band, solicited/active, passphrase, specific peer
 
     Verifies end-to-end discovery + data-path creation.
     """
-    self.run_ib_data_path_test(
-        ptype=aconsts.PUBLISH_TYPE_SOLICITED,
-        stype=aconsts.SUBSCRIBE_TYPE_ACTIVE,
-        encr_type=self.ENCR_TYPE_PASSPHRASE,
-        use_peer_id=True)
+        self.run_ib_data_path_test(
+            ptype=aconsts.PUBLISH_TYPE_SOLICITED,
+            stype=aconsts.SUBSCRIBE_TYPE_ACTIVE,
+            encr_type=self.ENCR_TYPE_PASSPHRASE,
+            use_peer_id=True)
 
-  @test_tracker_info(uuid="fcd3e28a-5eab-4169-8a0c-dc7204dcdc13")
-  def test_ib_solicited_active_passphrase_any(self):
-    """Data-path: in-band, solicited/active, passphrase, any peer
+    @test_tracker_info(uuid="fcd3e28a-5eab-4169-8a0c-dc7204dcdc13")
+    def test_ib_solicited_active_passphrase_any(self):
+        """Data-path: in-band, solicited/active, passphrase, any peer
 
     Verifies end-to-end discovery + data-path creation.
     """
-    self.run_ib_data_path_test(
-        ptype=aconsts.PUBLISH_TYPE_SOLICITED,
-        stype=aconsts.SUBSCRIBE_TYPE_ACTIVE,
-        encr_type=self.ENCR_TYPE_PASSPHRASE,
-        use_peer_id=False)
+        self.run_ib_data_path_test(
+            ptype=aconsts.PUBLISH_TYPE_SOLICITED,
+            stype=aconsts.SUBSCRIBE_TYPE_ACTIVE,
+            encr_type=self.ENCR_TYPE_PASSPHRASE,
+            use_peer_id=False)
 
-  @test_tracker_info(uuid="9d4eaad7-ba53-4a06-8ce0-e308daea3309")
-  def test_ib_solicited_active_pmk_specific(self):
-    """Data-path: in-band, solicited/active, PMK, specific peer
+    @test_tracker_info(uuid="9d4eaad7-ba53-4a06-8ce0-e308daea3309")
+    def test_ib_solicited_active_pmk_specific(self):
+        """Data-path: in-band, solicited/active, PMK, specific peer
 
     Verifies end-to-end discovery + data-path creation.
     """
-    self.run_ib_data_path_test(
-        ptype=aconsts.PUBLISH_TYPE_SOLICITED,
-        stype=aconsts.SUBSCRIBE_TYPE_ACTIVE,
-        encr_type=self.ENCR_TYPE_PMK,
-        use_peer_id=True)
+        self.run_ib_data_path_test(
+            ptype=aconsts.PUBLISH_TYPE_SOLICITED,
+            stype=aconsts.SUBSCRIBE_TYPE_ACTIVE,
+            encr_type=self.ENCR_TYPE_PMK,
+            use_peer_id=True)
 
-  @test_tracker_info(uuid="129d850e-c312-4137-a67b-05ae95fe66cc")
-  def test_ib_solicited_active_pmk_any(self):
-    """Data-path: in-band, solicited/active, PMK, any peer
+    @test_tracker_info(uuid="129d850e-c312-4137-a67b-05ae95fe66cc")
+    def test_ib_solicited_active_pmk_any(self):
+        """Data-path: in-band, solicited/active, PMK, any peer
 
     Verifies end-to-end discovery + data-path creation.
     """
-    self.run_ib_data_path_test(
-        ptype=aconsts.PUBLISH_TYPE_SOLICITED,
-        stype=aconsts.SUBSCRIBE_TYPE_ACTIVE,
-        encr_type=self.ENCR_TYPE_PMK,
-        use_peer_id=False)
+        self.run_ib_data_path_test(
+            ptype=aconsts.PUBLISH_TYPE_SOLICITED,
+            stype=aconsts.SUBSCRIBE_TYPE_ACTIVE,
+            encr_type=self.ENCR_TYPE_PMK,
+            use_peer_id=False)
 
-  #######################################
-  # Positive In-Band (IB) with a publish session running on the subscriber
-  # tests key:
-  #
-  # names is: test_ib_extra_pub_<same|diff>_<pub_type>_<sub_type>
-  #                                          _<encr_type>_<peer_spec>
-  # where:
-  #
-  # same|diff: Whether the extra publish session (on the subscriber) is the same
-  #            or different from the primary session.
-  # pub_type: Type of publish discovery session: unsolicited or solicited.
-  # sub_type: Type of subscribe discovery session: passive or active.
-  # encr_type: Encryption type: open, passphrase
-  # peer_spec: Peer specification method: any or specific
-  #
-  # Note: In-Band means using Wi-Fi Aware for discovery and referring to the
-  # peer using the Aware-provided peer handle (as opposed to a MAC address).
-  #######################################
+    #######################################
+    # Positive In-Band (IB) with a publish session running on the subscriber
+    # tests key:
+    #
+    # names is: test_ib_extra_pub_<same|diff>_<pub_type>_<sub_type>
+    #                                          _<encr_type>_<peer_spec>
+    # where:
+    #
+    # same|diff: Whether the extra publish session (on the subscriber) is the same
+    #            or different from the primary session.
+    # pub_type: Type of publish discovery session: unsolicited or solicited.
+    # sub_type: Type of subscribe discovery session: passive or active.
+    # encr_type: Encryption type: open, passphrase
+    # peer_spec: Peer specification method: any or specific
+    #
+    # Note: In-Band means using Wi-Fi Aware for discovery and referring to the
+    # peer using the Aware-provided peer handle (as opposed to a MAC address).
+    #######################################
 
-  @test_tracker_info(uuid="e855dd81-45c8-4bb2-a204-7687c48ff843")
-  def test_ib_extra_pub_same_unsolicited_passive_open_specific(self):
-    """Data-path: in-band, unsolicited/passive, open encryption, specific peer.
+    @test_tracker_info(uuid="e855dd81-45c8-4bb2-a204-7687c48ff843")
+    def test_ib_extra_pub_same_unsolicited_passive_open_specific(self):
+        """Data-path: in-band, unsolicited/passive, open encryption, specific peer.
 
     Configuration contains a publisher (for the same service) running on *both*
     devices.
 
     Verifies end-to-end discovery + data-path creation.
     """
-    self.run_ib_data_path_test(
-        ptype=aconsts.PUBLISH_TYPE_UNSOLICITED,
-        stype=aconsts.SUBSCRIBE_TYPE_PASSIVE,
-        encr_type=self.ENCR_TYPE_OPEN,
-        use_peer_id=True,
-        pub_on_both=True,
-        pub_on_both_same=True)
+        self.run_ib_data_path_test(
+            ptype=aconsts.PUBLISH_TYPE_UNSOLICITED,
+            stype=aconsts.SUBSCRIBE_TYPE_PASSIVE,
+            encr_type=self.ENCR_TYPE_OPEN,
+            use_peer_id=True,
+            pub_on_both=True,
+            pub_on_both_same=True)
 
-  @test_tracker_info(uuid="57fc9d53-32ae-470f-a8b1-2fe37893687d")
-  def test_ib_extra_pub_same_unsolicited_passive_open_any(self):
-    """Data-path: in-band, unsolicited/passive, open encryption, any peer.
+    @test_tracker_info(uuid="57fc9d53-32ae-470f-a8b1-2fe37893687d")
+    def test_ib_extra_pub_same_unsolicited_passive_open_any(self):
+        """Data-path: in-band, unsolicited/passive, open encryption, any peer.
 
     Configuration contains a publisher (for the same service) running on *both*
     devices.
 
     Verifies end-to-end discovery + data-path creation.
     """
-    self.run_ib_data_path_test(
-        ptype=aconsts.PUBLISH_TYPE_UNSOLICITED,
-        stype=aconsts.SUBSCRIBE_TYPE_PASSIVE,
-        encr_type=self.ENCR_TYPE_OPEN,
-        use_peer_id=False,
-        pub_on_both=True,
-        pub_on_both_same=True)
+        self.run_ib_data_path_test(
+            ptype=aconsts.PUBLISH_TYPE_UNSOLICITED,
+            stype=aconsts.SUBSCRIBE_TYPE_PASSIVE,
+            encr_type=self.ENCR_TYPE_OPEN,
+            use_peer_id=False,
+            pub_on_both=True,
+            pub_on_both_same=True)
 
-  @test_tracker_info(uuid="7a32f439-d745-4716-a75e-b54109aaaf82")
-  def test_ib_extra_pub_diff_unsolicited_passive_open_specific(self):
-    """Data-path: in-band, unsolicited/passive, open encryption, specific peer.
+    @test_tracker_info(uuid="7a32f439-d745-4716-a75e-b54109aaaf82")
+    def test_ib_extra_pub_diff_unsolicited_passive_open_specific(self):
+        """Data-path: in-band, unsolicited/passive, open encryption, specific peer.
 
     Configuration contains a publisher (for a different service) running on
     *both* devices.
 
     Verifies end-to-end discovery + data-path creation.
     """
-    self.run_ib_data_path_test(
-        ptype=aconsts.PUBLISH_TYPE_UNSOLICITED,
-        stype=aconsts.SUBSCRIBE_TYPE_PASSIVE,
-        encr_type=self.ENCR_TYPE_OPEN,
-        use_peer_id=True,
-        pub_on_both=True,
-        pub_on_both_same=False)
+        self.run_ib_data_path_test(
+            ptype=aconsts.PUBLISH_TYPE_UNSOLICITED,
+            stype=aconsts.SUBSCRIBE_TYPE_PASSIVE,
+            encr_type=self.ENCR_TYPE_OPEN,
+            use_peer_id=True,
+            pub_on_both=True,
+            pub_on_both_same=False)
 
-  @test_tracker_info(uuid="a14ddc66-88fd-4b49-ab37-225533867c63")
-  def test_ib_extra_pub_diff_unsolicited_passive_open_any(self):
-    """Data-path: in-band, unsolicited/passive, open encryption, any peer.
+    @test_tracker_info(uuid="a14ddc66-88fd-4b49-ab37-225533867c63")
+    def test_ib_extra_pub_diff_unsolicited_passive_open_any(self):
+        """Data-path: in-band, unsolicited/passive, open encryption, any peer.
 
     Configuration contains a publisher (for a different service) running on
     *both* devices.
 
     Verifies end-to-end discovery + data-path creation.
     """
-    self.run_ib_data_path_test(
-        ptype=aconsts.PUBLISH_TYPE_UNSOLICITED,
-        stype=aconsts.SUBSCRIBE_TYPE_PASSIVE,
-        encr_type=self.ENCR_TYPE_OPEN,
-        use_peer_id=False,
-        pub_on_both=True,
-        pub_on_both_same=False)
+        self.run_ib_data_path_test(
+            ptype=aconsts.PUBLISH_TYPE_UNSOLICITED,
+            stype=aconsts.SUBSCRIBE_TYPE_PASSIVE,
+            encr_type=self.ENCR_TYPE_OPEN,
+            use_peer_id=False,
+            pub_on_both=True,
+            pub_on_both_same=False)
 
-  #######################################
-  # Positive Out-of-Band (OOB) tests key:
-  #
-  # names is: test_oob_<encr_type>_<peer_spec>
-  # where:
-  #
-  # encr_type: Encryption type: open, passphrase
-  # peer_spec: Peer specification method: any or specific
-  #
-  # Optionally set up an extra discovery session to test coexistence. If so
-  # add "ib_coex" to test name.
-  #
-  # Note: Out-of-Band means using a non-Wi-Fi Aware mechanism for discovery and
-  # exchange of MAC addresses and then Wi-Fi Aware for data-path.
-  #######################################
+    #######################################
+    # Positive Out-of-Band (OOB) tests key:
+    #
+    # names is: test_oob_<encr_type>_<peer_spec>
+    # where:
+    #
+    # encr_type: Encryption type: open, passphrase
+    # peer_spec: Peer specification method: any or specific
+    #
+    # Optionally set up an extra discovery session to test coexistence. If so
+    # add "ib_coex" to test name.
+    #
+    # Note: Out-of-Band means using a non-Wi-Fi Aware mechanism for discovery and
+    # exchange of MAC addresses and then Wi-Fi Aware for data-path.
+    #######################################
 
-  @test_tracker_info(uuid="7db17d8c-1dce-4084-b695-215bbcfe7d41")
-  def test_oob_open_specific(self):
-    """Data-path: out-of-band, open encryption, specific peer
+    @test_tracker_info(uuid="7db17d8c-1dce-4084-b695-215bbcfe7d41")
+    def test_oob_open_specific(self):
+        """Data-path: out-of-band, open encryption, specific peer
 
     Verifies end-to-end discovery + data-path creation.
     """
-    self.run_oob_data_path_test(
-        encr_type=self.ENCR_TYPE_OPEN,
-        use_peer_id=True)
+        self.run_oob_data_path_test(
+            encr_type=self.ENCR_TYPE_OPEN, use_peer_id=True)
 
-  @test_tracker_info(uuid="ad416d89-cb95-4a07-8d29-ee213117450b")
-  def test_oob_open_any(self):
-    """Data-path: out-of-band, open encryption, any peer
+    @test_tracker_info(uuid="ad416d89-cb95-4a07-8d29-ee213117450b")
+    def test_oob_open_any(self):
+        """Data-path: out-of-band, open encryption, any peer
 
     Verifies end-to-end discovery + data-path creation.
     """
-    self.run_oob_data_path_test(
-        encr_type=self.ENCR_TYPE_OPEN,
-        use_peer_id=False)
+        self.run_oob_data_path_test(
+            encr_type=self.ENCR_TYPE_OPEN, use_peer_id=False)
 
-  @test_tracker_info(uuid="74937a3a-d524-43e2-8979-4449271cab52")
-  def test_oob_passphrase_specific(self):
-    """Data-path: out-of-band, passphrase, specific peer
+    @test_tracker_info(uuid="74937a3a-d524-43e2-8979-4449271cab52")
+    def test_oob_passphrase_specific(self):
+        """Data-path: out-of-band, passphrase, specific peer
 
     Verifies end-to-end discovery + data-path creation.
     """
-    self.run_oob_data_path_test(
-        encr_type=self.ENCR_TYPE_PASSPHRASE,
-        use_peer_id=True)
+        self.run_oob_data_path_test(
+            encr_type=self.ENCR_TYPE_PASSPHRASE, use_peer_id=True)
 
-  @test_tracker_info(uuid="afcbdc7e-d3a9-465b-b1da-ce2e42e3941e")
-  def test_oob_passphrase_any(self):
-    """Data-path: out-of-band, passphrase, any peer
+    @test_tracker_info(uuid="afcbdc7e-d3a9-465b-b1da-ce2e42e3941e")
+    def test_oob_passphrase_any(self):
+        """Data-path: out-of-band, passphrase, any peer
 
     Verifies end-to-end discovery + data-path creation.
     """
-    self.run_oob_data_path_test(
-        encr_type=self.ENCR_TYPE_PASSPHRASE,
-        use_peer_id=False)
+        self.run_oob_data_path_test(
+            encr_type=self.ENCR_TYPE_PASSPHRASE, use_peer_id=False)
 
-  @test_tracker_info(uuid="0d095031-160a-4537-aab5-41b6ad5d55f8")
-  def test_oob_pmk_specific(self):
-    """Data-path: out-of-band, PMK, specific peer
+    @test_tracker_info(uuid="0d095031-160a-4537-aab5-41b6ad5d55f8")
+    def test_oob_pmk_specific(self):
+        """Data-path: out-of-band, PMK, specific peer
 
     Verifies end-to-end discovery + data-path creation.
     """
-    self.run_oob_data_path_test(
-        encr_type=self.ENCR_TYPE_PMK,
-        use_peer_id=True)
+        self.run_oob_data_path_test(
+            encr_type=self.ENCR_TYPE_PMK, use_peer_id=True)
 
-  @test_tracker_info(uuid="e45477bd-66cc-4eb7-88dd-4518c8aa2a74")
-  def test_oob_pmk_any(self):
-    """Data-path: out-of-band, PMK, any peer
+    @test_tracker_info(uuid="e45477bd-66cc-4eb7-88dd-4518c8aa2a74")
+    def test_oob_pmk_any(self):
+        """Data-path: out-of-band, PMK, any peer
 
     Verifies end-to-end discovery + data-path creation.
     """
-    self.run_oob_data_path_test(
-        encr_type=self.ENCR_TYPE_PMK,
-        use_peer_id=False)
+        self.run_oob_data_path_test(
+            encr_type=self.ENCR_TYPE_PMK, use_peer_id=False)
 
-  @test_tracker_info(uuid="dd464f24-b404-4eea-955c-d10c9e8adefc")
-  def test_oob_ib_coex_open_specific(self):
-    """Data-path: out-of-band, open encryption, specific peer - in-band coex:
+    @test_tracker_info(uuid="dd464f24-b404-4eea-955c-d10c9e8adefc")
+    def test_oob_ib_coex_open_specific(self):
+        """Data-path: out-of-band, open encryption, specific peer - in-band coex:
     set up a concurrent discovery session to verify no impact. The session
     consists of Publisher on both ends, and a Subscriber on the Responder.
 
     Verifies end-to-end discovery + data-path creation.
     """
-    self.run_oob_data_path_test(
-        encr_type=self.ENCR_TYPE_OPEN,
-        use_peer_id=True,
-        setup_discovery_sessions=True)
+        self.run_oob_data_path_test(
+            encr_type=self.ENCR_TYPE_OPEN,
+            use_peer_id=True,
+            setup_discovery_sessions=True)
 
-  @test_tracker_info(uuid="088fcd3a-b015-4179-a9a5-91f782b03e3b")
-  def test_oob_ib_coex_open_any(self):
-    """Data-path: out-of-band, open encryption, any peer - in-band coex:
+    @test_tracker_info(uuid="088fcd3a-b015-4179-a9a5-91f782b03e3b")
+    def test_oob_ib_coex_open_any(self):
+        """Data-path: out-of-band, open encryption, any peer - in-band coex:
     set up a concurrent discovery session to verify no impact. The session
     consists of Publisher on both ends, and a Subscriber on the Responder.
 
     Verifies end-to-end discovery + data-path creation.
     """
-    self.run_oob_data_path_test(
-        encr_type=self.ENCR_TYPE_OPEN,
-        use_peer_id=False,
-        setup_discovery_sessions=True)
+        self.run_oob_data_path_test(
+            encr_type=self.ENCR_TYPE_OPEN,
+            use_peer_id=False,
+            setup_discovery_sessions=True)
 
-  ##############################################################
+    ##############################################################
 
-  @test_tracker_info(uuid="1c2c9805-dc1e-43b5-a1b8-315e8c9a4337")
-  def test_passphrase_min(self):
-    """Data-path: minimum passphrase length
+    @test_tracker_info(uuid="1c2c9805-dc1e-43b5-a1b8-315e8c9a4337")
+    def test_passphrase_min(self):
+        """Data-path: minimum passphrase length
 
     Use in-band, unsolicited/passive, any peer combination
     """
-    self.run_ib_data_path_test(ptype=aconsts.PUBLISH_TYPE_UNSOLICITED,
-                               stype=aconsts.SUBSCRIBE_TYPE_PASSIVE,
-                               encr_type=self.ENCR_TYPE_PASSPHRASE,
-                               use_peer_id=False,
-                               passphrase_to_use=self.PASSPHRASE_MIN)
+        self.run_ib_data_path_test(
+            ptype=aconsts.PUBLISH_TYPE_UNSOLICITED,
+            stype=aconsts.SUBSCRIBE_TYPE_PASSIVE,
+            encr_type=self.ENCR_TYPE_PASSPHRASE,
+            use_peer_id=False,
+            passphrase_to_use=self.PASSPHRASE_MIN)
 
-  @test_tracker_info(uuid="e696e2b9-87a9-4521-b337-61b9efaa2057")
-  def test_passphrase_max(self):
-    """Data-path: maximum passphrase length
+    @test_tracker_info(uuid="e696e2b9-87a9-4521-b337-61b9efaa2057")
+    def test_passphrase_max(self):
+        """Data-path: maximum passphrase length
 
     Use in-band, unsolicited/passive, any peer combination
     """
-    self.run_ib_data_path_test(ptype=aconsts.PUBLISH_TYPE_UNSOLICITED,
-                               stype=aconsts.SUBSCRIBE_TYPE_PASSIVE,
-                               encr_type=self.ENCR_TYPE_PASSPHRASE,
-                               use_peer_id=False,
-                               passphrase_to_use=self.PASSPHRASE_MAX)
+        self.run_ib_data_path_test(
+            ptype=aconsts.PUBLISH_TYPE_UNSOLICITED,
+            stype=aconsts.SUBSCRIBE_TYPE_PASSIVE,
+            encr_type=self.ENCR_TYPE_PASSPHRASE,
+            use_peer_id=False,
+            passphrase_to_use=self.PASSPHRASE_MAX)
 
-  @test_tracker_info(uuid="533cd44c-ff30-4283-ac28-f71fd7b4f02d")
-  def test_negative_mismatch_publisher_peer_id(self):
-    """Data-path: failure when publisher peer ID is mismatched"""
-    self.run_mismatched_ib_data_path_test(pub_mismatch=True, sub_mismatch=False)
+    @test_tracker_info(uuid="533cd44c-ff30-4283-ac28-f71fd7b4f02d")
+    def test_negative_mismatch_publisher_peer_id(self):
+        """Data-path: failure when publisher peer ID is mismatched"""
+        self.run_mismatched_ib_data_path_test(
+            pub_mismatch=True, sub_mismatch=False)
 
-  @test_tracker_info(uuid="682f275e-722a-4f8b-85e7-0dcea9d25532")
-  def test_negative_mismatch_subscriber_peer_id(self):
-    """Data-path: failure when subscriber peer ID is mismatched"""
-    self.run_mismatched_ib_data_path_test(pub_mismatch=False, sub_mismatch=True)
+    @test_tracker_info(uuid="682f275e-722a-4f8b-85e7-0dcea9d25532")
+    def test_negative_mismatch_subscriber_peer_id(self):
+        """Data-path: failure when subscriber peer ID is mismatched"""
+        self.run_mismatched_ib_data_path_test(
+            pub_mismatch=False, sub_mismatch=True)
 
-  @test_tracker_info(uuid="7fa82796-7fc9-4d9e-bbbb-84b751788943")
-  def test_negative_mismatch_init_mac(self):
-    """Data-path: failure when Initiator MAC address mismatch"""
-    self.run_mismatched_oob_data_path_test(
-        init_mismatch_mac=True,
-        resp_mismatch_mac=False)
+    @test_tracker_info(uuid="7fa82796-7fc9-4d9e-bbbb-84b751788943")
+    def test_negative_mismatch_init_mac(self):
+        """Data-path: failure when Initiator MAC address mismatch"""
+        self.run_mismatched_oob_data_path_test(
+            init_mismatch_mac=True, resp_mismatch_mac=False)
 
-  @test_tracker_info(uuid="edeae959-4644-44f9-8d41-bdeb5216954e")
-  def test_negative_mismatch_resp_mac(self):
-    """Data-path: failure when Responder MAC address mismatch"""
-    self.run_mismatched_oob_data_path_test(
-        init_mismatch_mac=False,
-        resp_mismatch_mac=True)
+    @test_tracker_info(uuid="edeae959-4644-44f9-8d41-bdeb5216954e")
+    def test_negative_mismatch_resp_mac(self):
+        """Data-path: failure when Responder MAC address mismatch"""
+        self.run_mismatched_oob_data_path_test(
+            init_mismatch_mac=False, resp_mismatch_mac=True)
 
-  @test_tracker_info(uuid="91f46949-c47f-49f9-a90f-6fae699613a7")
-  def test_negative_mismatch_passphrase(self):
-    """Data-path: failure when passphrases mismatch"""
-    self.run_mismatched_oob_data_path_test(
-        init_encr_type=self.ENCR_TYPE_PASSPHRASE,
-        resp_encr_type=self.ENCR_TYPE_PASSPHRASE)
+    @test_tracker_info(uuid="91f46949-c47f-49f9-a90f-6fae699613a7")
+    def test_negative_mismatch_passphrase(self):
+        """Data-path: failure when passphrases mismatch"""
+        self.run_mismatched_oob_data_path_test(
+            init_encr_type=self.ENCR_TYPE_PASSPHRASE,
+            resp_encr_type=self.ENCR_TYPE_PASSPHRASE)
 
-  @test_tracker_info(uuid="01c49c2e-dc92-4a27-bb47-c4fc67617c23")
-  def test_negative_mismatch_pmk(self):
-    """Data-path: failure when PMK mismatch"""
-    self.run_mismatched_oob_data_path_test(
-        init_encr_type=self.ENCR_TYPE_PMK,
-        resp_encr_type=self.ENCR_TYPE_PMK)
+    @test_tracker_info(uuid="01c49c2e-dc92-4a27-bb47-c4fc67617c23")
+    def test_negative_mismatch_pmk(self):
+        """Data-path: failure when PMK mismatch"""
+        self.run_mismatched_oob_data_path_test(
+            init_encr_type=self.ENCR_TYPE_PMK,
+            resp_encr_type=self.ENCR_TYPE_PMK)
 
-  @test_tracker_info(uuid="4d651797-5fbb-408e-a4b6-a6e1944136da")
-  def test_negative_mismatch_open_passphrase(self):
-    """Data-path: failure when initiator is open, and responder passphrase"""
-    self.run_mismatched_oob_data_path_test(
-        init_encr_type=self.ENCR_TYPE_OPEN,
-        resp_encr_type=self.ENCR_TYPE_PASSPHRASE)
+    @test_tracker_info(uuid="4d651797-5fbb-408e-a4b6-a6e1944136da")
+    def test_negative_mismatch_open_passphrase(self):
+        """Data-path: failure when initiator is open, and responder passphrase"""
+        self.run_mismatched_oob_data_path_test(
+            init_encr_type=self.ENCR_TYPE_OPEN,
+            resp_encr_type=self.ENCR_TYPE_PASSPHRASE)
 
-  @test_tracker_info(uuid="1ae697f4-5987-4187-aeef-1e22d07d4a7c")
-  def test_negative_mismatch_open_pmk(self):
-    """Data-path: failure when initiator is open, and responder PMK"""
-    self.run_mismatched_oob_data_path_test(
-        init_encr_type=self.ENCR_TYPE_OPEN,
-        resp_encr_type=self.ENCR_TYPE_PMK)
+    @test_tracker_info(uuid="1ae697f4-5987-4187-aeef-1e22d07d4a7c")
+    def test_negative_mismatch_open_pmk(self):
+        """Data-path: failure when initiator is open, and responder PMK"""
+        self.run_mismatched_oob_data_path_test(
+            init_encr_type=self.ENCR_TYPE_OPEN,
+            resp_encr_type=self.ENCR_TYPE_PMK)
 
-  @test_tracker_info(uuid="f027b1cc-0e7a-4075-b880-5e64b288afbd")
-  def test_negative_mismatch_pmk_passphrase(self):
-    """Data-path: failure when initiator is pmk, and responder passphrase"""
-    self.run_mismatched_oob_data_path_test(
-        init_encr_type=self.ENCR_TYPE_PMK,
-        resp_encr_type=self.ENCR_TYPE_PASSPHRASE)
+    @test_tracker_info(uuid="f027b1cc-0e7a-4075-b880-5e64b288afbd")
+    def test_negative_mismatch_pmk_passphrase(self):
+        """Data-path: failure when initiator is pmk, and responder passphrase"""
+        self.run_mismatched_oob_data_path_test(
+            init_encr_type=self.ENCR_TYPE_PMK,
+            resp_encr_type=self.ENCR_TYPE_PASSPHRASE)
 
-  @test_tracker_info(uuid="0819bbd4-72ae-49c4-bd46-5448db2b0a06")
-  def test_negative_mismatch_passphrase_open(self):
-    """Data-path: failure when initiator is passphrase, and responder open"""
-    self.run_mismatched_oob_data_path_test(
-        init_encr_type=self.ENCR_TYPE_PASSPHRASE,
-        resp_encr_type=self.ENCR_TYPE_OPEN)
+    @test_tracker_info(uuid="0819bbd4-72ae-49c4-bd46-5448db2b0a06")
+    def test_negative_mismatch_passphrase_open(self):
+        """Data-path: failure when initiator is passphrase, and responder open"""
+        self.run_mismatched_oob_data_path_test(
+            init_encr_type=self.ENCR_TYPE_PASSPHRASE,
+            resp_encr_type=self.ENCR_TYPE_OPEN)
 
-  @test_tracker_info(uuid="7ef24f62-8e6b-4732-88a3-80a43584dda4")
-  def test_negative_mismatch_pmk_open(self):
-    """Data-path: failure when initiator is PMK, and responder open"""
-    self.run_mismatched_oob_data_path_test(
-        init_encr_type=self.ENCR_TYPE_PMK,
-        resp_encr_type=self.ENCR_TYPE_OPEN)
+    @test_tracker_info(uuid="7ef24f62-8e6b-4732-88a3-80a43584dda4")
+    def test_negative_mismatch_pmk_open(self):
+        """Data-path: failure when initiator is PMK, and responder open"""
+        self.run_mismatched_oob_data_path_test(
+            init_encr_type=self.ENCR_TYPE_PMK,
+            resp_encr_type=self.ENCR_TYPE_OPEN)
 
-  @test_tracker_info(uuid="7b9c9efc-1c06-465e-8a5e-d6a22ac1da97")
-  def test_negative_mismatch_passphrase_pmk(self):
-    """Data-path: failure when initiator is passphrase, and responder pmk"""
-    self.run_mismatched_oob_data_path_test(
-        init_encr_type=self.ENCR_TYPE_PASSPHRASE,
-        resp_encr_type=self.ENCR_TYPE_OPEN)
+    @test_tracker_info(uuid="7b9c9efc-1c06-465e-8a5e-d6a22ac1da97")
+    def test_negative_mismatch_passphrase_pmk(self):
+        """Data-path: failure when initiator is passphrase, and responder pmk"""
+        self.run_mismatched_oob_data_path_test(
+            init_encr_type=self.ENCR_TYPE_PASSPHRASE,
+            resp_encr_type=self.ENCR_TYPE_OPEN)
 
+    ##########################################################################
 
-  ##########################################################################
-
-  def wait_for_request_responses(self, dut, req_keys, aware_ifs):
-    """Wait for network request confirmation for all request keys.
+    def wait_for_request_responses(self, dut, req_keys, aware_ifs):
+        """Wait for network request confirmation for all request keys.
 
     Args:
       dut: Device under test
       req_keys: (in) A list of the network requests
       aware_ifs: (out) A list into which to append the network interface
     """
-    num_events = 0
-    while num_events != len(req_keys):
-      event = autils.wait_for_event(dut, cconsts.EVENT_NETWORK_CALLBACK,
-                                    timeout=autils.EVENT_NDP_TIMEOUT)
-      if (event["data"][cconsts.NETWORK_CB_KEY_EVENT] ==
-          cconsts.NETWORK_CB_LINK_PROPERTIES_CHANGED):
-        if event["data"][cconsts.NETWORK_CB_KEY_ID] in req_keys:
-          num_events = num_events + 1
-          aware_ifs.append(event["data"][cconsts.NETWORK_CB_KEY_INTERFACE_NAME])
-        else:
-          self.log.info("Received an unexpected connectivity, the revoked "
+        num_events = 0
+        while num_events != len(req_keys):
+            event = autils.wait_for_event(
+                dut,
+                cconsts.EVENT_NETWORK_CALLBACK,
+                timeout=autils.EVENT_NDP_TIMEOUT)
+            if (event["data"][cconsts.NETWORK_CB_KEY_EVENT] ==
+                    cconsts.NETWORK_CB_LINK_PROPERTIES_CHANGED):
+                if event["data"][cconsts.NETWORK_CB_KEY_ID] in req_keys:
+                    num_events = num_events + 1
+                    aware_ifs.append(
+                        event["data"][cconsts.NETWORK_CB_KEY_INTERFACE_NAME])
+                else:
+                    self.log.info(
+                        "Received an unexpected connectivity, the revoked "
                         "network request probably went through -- %s", event)
 
-  @test_tracker_info(uuid="2e325e2b-d552-4890-b470-20b40284395d")
-  def test_multiple_identical_networks(self):
-    """Validate that creating multiple networks between 2 devices, each network
+    @test_tracker_info(uuid="2e325e2b-d552-4890-b470-20b40284395d")
+    def test_multiple_identical_networks(self):
+        """Validate that creating multiple networks between 2 devices, each network
     with identical configuration is supported over a single NDP.
 
     Verify that the interface and IPv6 address is the same for all networks.
     """
-    init_dut = self.android_devices[0]
-    init_dut.pretty_name = "Initiator"
-    resp_dut = self.android_devices[1]
-    resp_dut.pretty_name = "Responder"
+        init_dut = self.android_devices[0]
+        init_dut.pretty_name = "Initiator"
+        resp_dut = self.android_devices[1]
+        resp_dut.pretty_name = "Responder"
 
-    N = 2 # first iteration (must be 2 to give us a chance to cancel the first)
-    M = 5 # second iteration
+        N = 2  # first iteration (must be 2 to give us a chance to cancel the first)
+        M = 5  # second iteration
 
-    init_ids = []
-    resp_ids = []
+        init_ids = []
+        resp_ids = []
 
-    # Initiator+Responder: attach and wait for confirmation & identity
-    # create N+M sessions to be used in the different (but identical) NDPs
-    for i in range(N + M):
-      id, init_mac = autils.attach_with_identity(init_dut)
-      init_ids.append(id)
-      id, resp_mac = autils.attach_with_identity(resp_dut)
-      resp_ids.append(id)
+        # Initiator+Responder: attach and wait for confirmation & identity
+        # create N+M sessions to be used in the different (but identical) NDPs
+        for i in range(N + M):
+            id, init_mac = autils.attach_with_identity(init_dut)
+            init_ids.append(id)
+            id, resp_mac = autils.attach_with_identity(resp_dut)
+            resp_ids.append(id)
 
-    # wait for for devices to synchronize with each other - there are no other
-    # mechanisms to make sure this happens for OOB discovery (except retrying
-    # to execute the data-path request)
-    time.sleep(autils.WAIT_FOR_CLUSTER)
+        # wait for for devices to synchronize with each other - there are no other
+        # mechanisms to make sure this happens for OOB discovery (except retrying
+        # to execute the data-path request)
+        time.sleep(autils.WAIT_FOR_CLUSTER)
 
-    resp_req_keys = []
-    init_req_keys = []
-    resp_aware_ifs = []
-    init_aware_ifs = []
+        resp_req_keys = []
+        init_req_keys = []
+        resp_aware_ifs = []
+        init_aware_ifs = []
 
-    # issue N quick requests for identical NDPs - without waiting for result
-    # tests whether pre-setup multiple NDP procedure
-    for i in range(N):
-      # Responder: request network
-      resp_req_keys.append(autils.request_network(
-          resp_dut,
-          resp_dut.droid.wifiAwareCreateNetworkSpecifierOob(
-              resp_ids[i], aconsts.DATA_PATH_RESPONDER, init_mac, None)))
+        # issue N quick requests for identical NDPs - without waiting for result
+        # tests whether pre-setup multiple NDP procedure
+        for i in range(N):
+            # Responder: request network
+            resp_req_keys.append(
+                autils.request_network(
+                    resp_dut,
+                    resp_dut.droid.wifiAwareCreateNetworkSpecifierOob(
+                        resp_ids[i], aconsts.DATA_PATH_RESPONDER, init_mac,
+                        None)))
 
-      # Initiator: request network
-      init_req_keys.append(autils.request_network(
-          init_dut,
-          init_dut.droid.wifiAwareCreateNetworkSpecifierOob(
-              init_ids[i], aconsts.DATA_PATH_INITIATOR, resp_mac, None)))
+            # Initiator: request network
+            init_req_keys.append(
+                autils.request_network(
+                    init_dut,
+                    init_dut.droid.wifiAwareCreateNetworkSpecifierOob(
+                        init_ids[i], aconsts.DATA_PATH_INITIATOR, resp_mac,
+                        None)))
 
-    # remove the first request (hopefully before completed) testing that NDP
-    # is still created
-    resp_dut.droid.connectivityUnregisterNetworkCallback(resp_req_keys[0])
-    resp_req_keys.remove(resp_req_keys[0])
-    init_dut.droid.connectivityUnregisterNetworkCallback(init_req_keys[0])
-    init_req_keys.remove(init_req_keys[0])
+        # remove the first request (hopefully before completed) testing that NDP
+        # is still created
+        resp_dut.droid.connectivityUnregisterNetworkCallback(resp_req_keys[0])
+        resp_req_keys.remove(resp_req_keys[0])
+        init_dut.droid.connectivityUnregisterNetworkCallback(init_req_keys[0])
+        init_req_keys.remove(init_req_keys[0])
 
-    # wait for network formation for all initial requests
-    self.wait_for_request_responses(resp_dut, resp_req_keys, resp_aware_ifs)
-    self.wait_for_request_responses(init_dut, init_req_keys, init_aware_ifs)
+        # wait for network formation for all initial requests
+        self.wait_for_request_responses(resp_dut, resp_req_keys,
+                                        resp_aware_ifs)
+        self.wait_for_request_responses(init_dut, init_req_keys,
+                                        init_aware_ifs)
 
-    # issue M more requests for the same NDPs - tests post-setup multiple NDP
-    for i in range(M):
-      # Responder: request network
-      resp_req_keys.append(autils.request_network(
-          resp_dut,
-          resp_dut.droid.wifiAwareCreateNetworkSpecifierOob(
-              resp_ids[N + i], aconsts.DATA_PATH_RESPONDER, init_mac, None)))
+        # issue M more requests for the same NDPs - tests post-setup multiple NDP
+        for i in range(M):
+            # Responder: request network
+            resp_req_keys.append(
+                autils.request_network(
+                    resp_dut,
+                    resp_dut.droid.wifiAwareCreateNetworkSpecifierOob(
+                        resp_ids[N + i], aconsts.DATA_PATH_RESPONDER, init_mac,
+                        None)))
 
-      # Initiator: request network
-      init_req_keys.append(autils.request_network(
-          init_dut,
-          init_dut.droid.wifiAwareCreateNetworkSpecifierOob(
-              init_ids[N + i], aconsts.DATA_PATH_INITIATOR, resp_mac, None)))
+            # Initiator: request network
+            init_req_keys.append(
+                autils.request_network(
+                    init_dut,
+                    init_dut.droid.wifiAwareCreateNetworkSpecifierOob(
+                        init_ids[N + i], aconsts.DATA_PATH_INITIATOR, resp_mac,
+                        None)))
 
-    # wait for network formation for all subsequent requests
-    self.wait_for_request_responses(resp_dut, resp_req_keys[N - 1:],
-                                    resp_aware_ifs)
-    self.wait_for_request_responses(init_dut, init_req_keys[N - 1:],
-                                    init_aware_ifs)
+        # wait for network formation for all subsequent requests
+        self.wait_for_request_responses(resp_dut, resp_req_keys[N - 1:],
+                                        resp_aware_ifs)
+        self.wait_for_request_responses(init_dut, init_req_keys[N - 1:],
+                                        init_aware_ifs)
 
-    # determine whether all interfaces are identical (single NDP) - can't really
-    # test the IPv6 address since it is not part of the callback event - it is
-    # simply obtained from the system (so we'll always get the same for the same
-    # interface)
-    init_aware_ifs = list(set(init_aware_ifs))
-    resp_aware_ifs = list(set(resp_aware_ifs))
+        # determine whether all interfaces are identical (single NDP) - can't really
+        # test the IPv6 address since it is not part of the callback event - it is
+        # simply obtained from the system (so we'll always get the same for the same
+        # interface)
+        init_aware_ifs = list(set(init_aware_ifs))
+        resp_aware_ifs = list(set(resp_aware_ifs))
 
-    self.log.info("Interface names: I=%s, R=%s", init_aware_ifs, resp_aware_ifs)
-    self.log.info("Initiator requests: %s", init_req_keys)
-    self.log.info("Responder requests: %s", resp_req_keys)
+        self.log.info("Interface names: I=%s, R=%s", init_aware_ifs,
+                      resp_aware_ifs)
+        self.log.info("Initiator requests: %s", init_req_keys)
+        self.log.info("Responder requests: %s", resp_req_keys)
 
-    asserts.assert_equal(
-        len(init_aware_ifs), 1, "Multiple initiator interfaces")
-    asserts.assert_equal(
-        len(resp_aware_ifs), 1, "Multiple responder interfaces")
+        asserts.assert_equal(
+            len(init_aware_ifs), 1, "Multiple initiator interfaces")
+        asserts.assert_equal(
+            len(resp_aware_ifs), 1, "Multiple responder interfaces")
 
-    self.log.info("Interface IPv6 (using ifconfig): I=%s, R=%s",
-                  autils.get_ipv6_addr(init_dut, init_aware_ifs[0]),
-                  autils.get_ipv6_addr(resp_dut, resp_aware_ifs[0]))
+        self.log.info("Interface IPv6 (using ifconfig): I=%s, R=%s",
+                      autils.get_ipv6_addr(init_dut, init_aware_ifs[0]),
+                      autils.get_ipv6_addr(resp_dut, resp_aware_ifs[0]))
 
-    for i in range(init_dut.aware_capabilities[aconsts.CAP_MAX_NDI_INTERFACES]):
-      if_name = "%s%d" % (aconsts.AWARE_NDI_PREFIX, i)
-      init_ipv6 = autils.get_ipv6_addr(init_dut, if_name)
-      resp_ipv6 = autils.get_ipv6_addr(resp_dut, if_name)
+        for i in range(
+                init_dut.aware_capabilities[aconsts.CAP_MAX_NDI_INTERFACES]):
+            if_name = "%s%d" % (aconsts.AWARE_NDI_PREFIX, i)
+            init_ipv6 = autils.get_ipv6_addr(init_dut, if_name)
+            resp_ipv6 = autils.get_ipv6_addr(resp_dut, if_name)
 
-      asserts.assert_equal(
-          init_ipv6 is None, if_name not in init_aware_ifs,
-          "Initiator interface %s in unexpected state" % if_name)
-      asserts.assert_equal(
-          resp_ipv6 is None, if_name not in resp_aware_ifs,
-          "Responder interface %s in unexpected state" % if_name)
+            asserts.assert_equal(
+                init_ipv6 is None, if_name not in init_aware_ifs,
+                "Initiator interface %s in unexpected state" % if_name)
+            asserts.assert_equal(
+                resp_ipv6 is None, if_name not in resp_aware_ifs,
+                "Responder interface %s in unexpected state" % if_name)
 
-    # release requests
-    for resp_req_key in resp_req_keys:
-      resp_dut.droid.connectivityUnregisterNetworkCallback(resp_req_key)
-    for init_req_key in init_req_keys:
-      init_dut.droid.connectivityUnregisterNetworkCallback(init_req_key)
+        # release requests
+        for resp_req_key in resp_req_keys:
+            resp_dut.droid.connectivityUnregisterNetworkCallback(resp_req_key)
+        for init_req_key in init_req_keys:
+            init_dut.droid.connectivityUnregisterNetworkCallback(init_req_key)
 
-  def test_identical_network_from_both_sides(self):
-    """Validate that requesting two identical NDPs (Open) each being initiated
+    def test_identical_network_from_both_sides(self):
+        """Validate that requesting two identical NDPs (Open) each being initiated
     from a different side, results in the same/single NDP.
 
     Verify that the interface and IPv6 address is the same for all networks.
     """
-    dut1 = self.android_devices[0]
-    dut2 = self.android_devices[1]
+        dut1 = self.android_devices[0]
+        dut2 = self.android_devices[1]
 
-    id1, mac1 = autils.attach_with_identity(dut1)
-    id2, mac2 = autils.attach_with_identity(dut2)
+        id1, mac1 = autils.attach_with_identity(dut1)
+        id2, mac2 = autils.attach_with_identity(dut2)
 
-    # wait for for devices to synchronize with each other - there are no other
-    # mechanisms to make sure this happens for OOB discovery (except retrying
-    # to execute the data-path request)
-    time.sleep(autils.WAIT_FOR_CLUSTER)
+        # wait for for devices to synchronize with each other - there are no other
+        # mechanisms to make sure this happens for OOB discovery (except retrying
+        # to execute the data-path request)
+        time.sleep(autils.WAIT_FOR_CLUSTER)
 
-    # first NDP: DUT1 (Init) -> DUT2 (Resp)
-    req_a_resp = autils.request_network(dut2,
-                               dut2.droid.wifiAwareCreateNetworkSpecifierOob(
-                                 id2, aconsts.DATA_PATH_RESPONDER,
-                                 mac1))
+        # first NDP: DUT1 (Init) -> DUT2 (Resp)
+        req_a_resp = autils.request_network(
+            dut2,
+            dut2.droid.wifiAwareCreateNetworkSpecifierOob(
+                id2, aconsts.DATA_PATH_RESPONDER, mac1))
 
-    req_a_init = autils.request_network(dut1,
-                               dut1.droid.wifiAwareCreateNetworkSpecifierOob(
-                                 id1, aconsts.DATA_PATH_INITIATOR,
-                                 mac2))
+        req_a_init = autils.request_network(
+            dut1,
+            dut1.droid.wifiAwareCreateNetworkSpecifierOob(
+                id1, aconsts.DATA_PATH_INITIATOR, mac2))
 
-    req_a_resp_event = autils.wait_for_event_with_keys(
-        dut2, cconsts.EVENT_NETWORK_CALLBACK,
-        autils.EVENT_NDP_TIMEOUT,
-        (cconsts.NETWORK_CB_KEY_EVENT,
-         cconsts.NETWORK_CB_LINK_PROPERTIES_CHANGED),
-        (cconsts.NETWORK_CB_KEY_ID, req_a_resp))
-    req_a_init_event = autils.wait_for_event_with_keys(
-        dut1, cconsts.EVENT_NETWORK_CALLBACK,
-        autils.EVENT_NDP_TIMEOUT,
-        (cconsts.NETWORK_CB_KEY_EVENT,
-         cconsts.NETWORK_CB_LINK_PROPERTIES_CHANGED),
-        (cconsts.NETWORK_CB_KEY_ID, req_a_init))
+        req_a_resp_event = autils.wait_for_event_with_keys(
+            dut2, cconsts.EVENT_NETWORK_CALLBACK, autils.EVENT_NDP_TIMEOUT,
+            (cconsts.NETWORK_CB_KEY_EVENT,
+             cconsts.NETWORK_CB_LINK_PROPERTIES_CHANGED),
+            (cconsts.NETWORK_CB_KEY_ID, req_a_resp))
+        req_a_init_event = autils.wait_for_event_with_keys(
+            dut1, cconsts.EVENT_NETWORK_CALLBACK, autils.EVENT_NDP_TIMEOUT,
+            (cconsts.NETWORK_CB_KEY_EVENT,
+             cconsts.NETWORK_CB_LINK_PROPERTIES_CHANGED),
+            (cconsts.NETWORK_CB_KEY_ID, req_a_init))
 
-    req_a_if_resp = req_a_resp_event["data"][
-      cconsts.NETWORK_CB_KEY_INTERFACE_NAME]
-    req_a_if_init = req_a_init_event["data"][
-      cconsts.NETWORK_CB_KEY_INTERFACE_NAME]
-    self.log.info("Interface names for A: I=%s, R=%s", req_a_if_init,
-                  req_a_if_resp)
+        req_a_if_resp = req_a_resp_event["data"][
+            cconsts.NETWORK_CB_KEY_INTERFACE_NAME]
+        req_a_if_init = req_a_init_event["data"][
+            cconsts.NETWORK_CB_KEY_INTERFACE_NAME]
+        self.log.info("Interface names for A: I=%s, R=%s", req_a_if_init,
+                      req_a_if_resp)
 
-    req_a_ipv6_resp = \
-    dut2.droid.connectivityGetLinkLocalIpv6Address(req_a_if_resp).split("%")[0]
-    req_a_ipv6_init = \
-    dut1.droid.connectivityGetLinkLocalIpv6Address(req_a_if_init).split("%")[0]
-    self.log.info("Interface addresses (IPv6) for A: I=%s, R=%s",
-                  req_a_ipv6_init, req_a_ipv6_resp)
+        req_a_ipv6_resp = \
+        dut2.droid.connectivityGetLinkLocalIpv6Address(req_a_if_resp).split("%")[0]
+        req_a_ipv6_init = \
+        dut1.droid.connectivityGetLinkLocalIpv6Address(req_a_if_init).split("%")[0]
+        self.log.info("Interface addresses (IPv6) for A: I=%s, R=%s",
+                      req_a_ipv6_init, req_a_ipv6_resp)
 
-    # second NDP: DUT2 (Init) -> DUT1 (Resp)
-    req_b_resp = autils.request_network(dut1,
-                                dut1.droid.wifiAwareCreateNetworkSpecifierOob(
-                                    id1, aconsts.DATA_PATH_RESPONDER,
-                                    mac2))
+        # second NDP: DUT2 (Init) -> DUT1 (Resp)
+        req_b_resp = autils.request_network(
+            dut1,
+            dut1.droid.wifiAwareCreateNetworkSpecifierOob(
+                id1, aconsts.DATA_PATH_RESPONDER, mac2))
 
-    req_b_init = autils.request_network(dut2,
-                                dut2.droid.wifiAwareCreateNetworkSpecifierOob(
-                                    id2, aconsts.DATA_PATH_INITIATOR,
-                                    mac1))
+        req_b_init = autils.request_network(
+            dut2,
+            dut2.droid.wifiAwareCreateNetworkSpecifierOob(
+                id2, aconsts.DATA_PATH_INITIATOR, mac1))
 
-    req_b_resp_event = autils.wait_for_event_with_keys(
-        dut1, cconsts.EVENT_NETWORK_CALLBACK,
-        autils.EVENT_NDP_TIMEOUT,
-        (cconsts.NETWORK_CB_KEY_EVENT,
-         cconsts.NETWORK_CB_LINK_PROPERTIES_CHANGED),
-        (cconsts.NETWORK_CB_KEY_ID, req_b_resp))
-    req_b_init_event = autils.wait_for_event_with_keys(
-        dut2, cconsts.EVENT_NETWORK_CALLBACK,
-        autils.EVENT_NDP_TIMEOUT,
-        (cconsts.NETWORK_CB_KEY_EVENT,
-         cconsts.NETWORK_CB_LINK_PROPERTIES_CHANGED),
-        (cconsts.NETWORK_CB_KEY_ID, req_b_init))
+        req_b_resp_event = autils.wait_for_event_with_keys(
+            dut1, cconsts.EVENT_NETWORK_CALLBACK, autils.EVENT_NDP_TIMEOUT,
+            (cconsts.NETWORK_CB_KEY_EVENT,
+             cconsts.NETWORK_CB_LINK_PROPERTIES_CHANGED),
+            (cconsts.NETWORK_CB_KEY_ID, req_b_resp))
+        req_b_init_event = autils.wait_for_event_with_keys(
+            dut2, cconsts.EVENT_NETWORK_CALLBACK, autils.EVENT_NDP_TIMEOUT,
+            (cconsts.NETWORK_CB_KEY_EVENT,
+             cconsts.NETWORK_CB_LINK_PROPERTIES_CHANGED),
+            (cconsts.NETWORK_CB_KEY_ID, req_b_init))
 
-    req_b_if_resp = req_b_resp_event["data"][
-      cconsts.NETWORK_CB_KEY_INTERFACE_NAME]
-    req_b_if_init = req_b_init_event["data"][
-      cconsts.NETWORK_CB_KEY_INTERFACE_NAME]
-    self.log.info("Interface names for B: I=%s, R=%s", req_b_if_init,
-                  req_b_if_resp)
+        req_b_if_resp = req_b_resp_event["data"][
+            cconsts.NETWORK_CB_KEY_INTERFACE_NAME]
+        req_b_if_init = req_b_init_event["data"][
+            cconsts.NETWORK_CB_KEY_INTERFACE_NAME]
+        self.log.info("Interface names for B: I=%s, R=%s", req_b_if_init,
+                      req_b_if_resp)
 
-    req_b_ipv6_resp = \
-      dut1.droid.connectivityGetLinkLocalIpv6Address(req_b_if_resp).split("%")[0]
-    req_b_ipv6_init = \
-      dut2.droid.connectivityGetLinkLocalIpv6Address(req_b_if_init).split("%")[0]
-    self.log.info("Interface addresses (IPv6) for B: I=%s, R=%s",
-                  req_b_ipv6_init, req_b_ipv6_resp)
+        req_b_ipv6_resp = \
+          dut1.droid.connectivityGetLinkLocalIpv6Address(req_b_if_resp).split("%")[0]
+        req_b_ipv6_init = \
+          dut2.droid.connectivityGetLinkLocalIpv6Address(req_b_if_init).split("%")[0]
+        self.log.info("Interface addresses (IPv6) for B: I=%s, R=%s",
+                      req_b_ipv6_init, req_b_ipv6_resp)
 
-    # validate equality of NDPs (using interface names & ipv6)
-    asserts.assert_equal(req_a_if_init, req_b_if_resp,
-                         "DUT1 NDPs are on different interfaces")
-    asserts.assert_equal(req_a_if_resp, req_b_if_init,
-                         "DUT2 NDPs are on different interfaces")
-    asserts.assert_equal(req_a_ipv6_init, req_b_ipv6_resp,
-                         "DUT1 NDPs are using different IPv6 addresses")
-    asserts.assert_equal(req_a_ipv6_resp, req_b_ipv6_init,
-                         "DUT2 NDPs are using different IPv6 addresses")
+        # validate equality of NDPs (using interface names & ipv6)
+        asserts.assert_equal(req_a_if_init, req_b_if_resp,
+                             "DUT1 NDPs are on different interfaces")
+        asserts.assert_equal(req_a_if_resp, req_b_if_init,
+                             "DUT2 NDPs are on different interfaces")
+        asserts.assert_equal(req_a_ipv6_init, req_b_ipv6_resp,
+                             "DUT1 NDPs are using different IPv6 addresses")
+        asserts.assert_equal(req_a_ipv6_resp, req_b_ipv6_init,
+                             "DUT2 NDPs are using different IPv6 addresses")
 
-    # release requests
-    dut1.droid.connectivityUnregisterNetworkCallback(req_a_init)
-    dut1.droid.connectivityUnregisterNetworkCallback(req_b_resp)
-    dut2.droid.connectivityUnregisterNetworkCallback(req_a_resp)
-    dut2.droid.connectivityUnregisterNetworkCallback(req_b_init)
+        # release requests
+        dut1.droid.connectivityUnregisterNetworkCallback(req_a_init)
+        dut1.droid.connectivityUnregisterNetworkCallback(req_b_resp)
+        dut2.droid.connectivityUnregisterNetworkCallback(req_a_resp)
+        dut2.droid.connectivityUnregisterNetworkCallback(req_b_init)
 
-  ########################################################################
+    ########################################################################
 
-  def run_multiple_ndi(self, sec_configs, flip_init_resp=False):
-    """Validate that the device can create and use multiple NDIs.
+    def run_multiple_ndi(self, sec_configs, flip_init_resp=False):
+        """Validate that the device can create and use multiple NDIs.
 
     The security configuration can be:
     - None: open
@@ -1214,281 +1258,279 @@
                       between the 2 devices, otherwise same devices are always
                       configured in the same role.
     """
-    dut1 = self.android_devices[0]
-    dut2 = self.android_devices[1]
+        dut1 = self.android_devices[0]
+        dut2 = self.android_devices[1]
 
-    asserts.skip_if(dut1.aware_capabilities[aconsts.CAP_MAX_NDI_INTERFACES]
-                    < len(sec_configs) or
-                    dut2.aware_capabilities[aconsts.CAP_MAX_NDI_INTERFACES]
-                    < len(sec_configs),
-                    "DUTs do not support enough NDIs")
+        asserts.skip_if(
+            dut1.aware_capabilities[aconsts.CAP_MAX_NDI_INTERFACES] <
+            len(sec_configs)
+            or dut2.aware_capabilities[aconsts.CAP_MAX_NDI_INTERFACES] <
+            len(sec_configs), "DUTs do not support enough NDIs")
 
-    id1, mac1 = autils.attach_with_identity(dut1)
-    id2, mac2 = autils.attach_with_identity(dut2)
+        id1, mac1 = autils.attach_with_identity(dut1)
+        id2, mac2 = autils.attach_with_identity(dut2)
 
-    # wait for for devices to synchronize with each other - there are no other
-    # mechanisms to make sure this happens for OOB discovery (except retrying
-    # to execute the data-path request)
-    time.sleep(autils.WAIT_FOR_CLUSTER)
+        # wait for for devices to synchronize with each other - there are no other
+        # mechanisms to make sure this happens for OOB discovery (except retrying
+        # to execute the data-path request)
+        time.sleep(autils.WAIT_FOR_CLUSTER)
 
-    dut2_req_keys = []
-    dut1_req_keys = []
-    dut2_aware_ifs = []
-    dut1_aware_ifs = []
-    dut2_aware_ipv6 = []
-    dut1_aware_ipv6 = []
+        dut2_req_keys = []
+        dut1_req_keys = []
+        dut2_aware_ifs = []
+        dut1_aware_ifs = []
+        dut2_aware_ipv6 = []
+        dut1_aware_ipv6 = []
 
-    dut2_type = aconsts.DATA_PATH_RESPONDER
-    dut1_type = aconsts.DATA_PATH_INITIATOR
-    dut2_is_responder = True
-    for sec in sec_configs:
-      if dut2_is_responder:
-        # DUT2 (Responder): request network
-        dut2_req_key = autils.request_network(dut2,
-                                              autils.get_network_specifier(
-                                                  dut2, id2,
-                                                  dut2_type,
-                                                  mac1, sec))
-        dut2_req_keys.append(dut2_req_key)
+        dut2_type = aconsts.DATA_PATH_RESPONDER
+        dut1_type = aconsts.DATA_PATH_INITIATOR
+        dut2_is_responder = True
+        for sec in sec_configs:
+            if dut2_is_responder:
+                # DUT2 (Responder): request network
+                dut2_req_key = autils.request_network(
+                    dut2,
+                    autils.get_network_specifier(dut2, id2, dut2_type, mac1,
+                                                 sec))
+                dut2_req_keys.append(dut2_req_key)
 
-        # DUT1 (Initiator): request network
-        dut1_req_key = autils.request_network(dut1,
-                                              autils.get_network_specifier(
-                                                  dut1, id1,
-                                                  dut1_type,
-                                                  mac2, sec))
-        dut1_req_keys.append(dut1_req_key)
-      else:
-        # DUT1 (Responder): request network
-        dut1_req_key = autils.request_network(dut1,
-                                              autils.get_network_specifier(
-                                                  dut1, id1,
-                                                  dut1_type,
-                                                  mac2, sec))
-        dut1_req_keys.append(dut1_req_key)
+                # DUT1 (Initiator): request network
+                dut1_req_key = autils.request_network(
+                    dut1,
+                    autils.get_network_specifier(dut1, id1, dut1_type, mac2,
+                                                 sec))
+                dut1_req_keys.append(dut1_req_key)
+            else:
+                # DUT1 (Responder): request network
+                dut1_req_key = autils.request_network(
+                    dut1,
+                    autils.get_network_specifier(dut1, id1, dut1_type, mac2,
+                                                 sec))
+                dut1_req_keys.append(dut1_req_key)
 
-        # DUT2 (Initiator): request network
-        dut2_req_key = autils.request_network(dut2,
-                                              autils.get_network_specifier(
-                                                  dut2, id2,
-                                                  dut2_type,
-                                                  mac1, sec))
-        dut2_req_keys.append(dut2_req_key)
+                # DUT2 (Initiator): request network
+                dut2_req_key = autils.request_network(
+                    dut2,
+                    autils.get_network_specifier(dut2, id2, dut2_type, mac1,
+                                                 sec))
+                dut2_req_keys.append(dut2_req_key)
 
-      # Wait for network
-      dut1_net_event = autils.wait_for_event_with_keys(
-          dut1, cconsts.EVENT_NETWORK_CALLBACK, autils.EVENT_NDP_TIMEOUT,
-          (cconsts.NETWORK_CB_KEY_EVENT,
-           cconsts.NETWORK_CB_LINK_PROPERTIES_CHANGED),
-          (cconsts.NETWORK_CB_KEY_ID, dut1_req_key))
-      dut2_net_event = autils.wait_for_event_with_keys(
-          dut2, cconsts.EVENT_NETWORK_CALLBACK, autils.EVENT_NDP_TIMEOUT,
-          (cconsts.NETWORK_CB_KEY_EVENT,
-           cconsts.NETWORK_CB_LINK_PROPERTIES_CHANGED),
-          (cconsts.NETWORK_CB_KEY_ID, dut2_req_key))
+            # Wait for network
+            dut1_net_event = autils.wait_for_event_with_keys(
+                dut1, cconsts.EVENT_NETWORK_CALLBACK, autils.EVENT_NDP_TIMEOUT,
+                (cconsts.NETWORK_CB_KEY_EVENT,
+                 cconsts.NETWORK_CB_LINK_PROPERTIES_CHANGED),
+                (cconsts.NETWORK_CB_KEY_ID, dut1_req_key))
+            dut2_net_event = autils.wait_for_event_with_keys(
+                dut2, cconsts.EVENT_NETWORK_CALLBACK, autils.EVENT_NDP_TIMEOUT,
+                (cconsts.NETWORK_CB_KEY_EVENT,
+                 cconsts.NETWORK_CB_LINK_PROPERTIES_CHANGED),
+                (cconsts.NETWORK_CB_KEY_ID, dut2_req_key))
 
-      dut2_aware_if = dut2_net_event["data"][
-        cconsts.NETWORK_CB_KEY_INTERFACE_NAME]
-      dut1_aware_if = dut1_net_event["data"][
-        cconsts.NETWORK_CB_KEY_INTERFACE_NAME]
-      dut2_aware_ifs.append(dut2_aware_if)
-      dut1_aware_ifs.append(dut1_aware_if)
-      dut2_aware_ipv6.append(autils.get_ipv6_addr(dut2, dut2_aware_if))
-      dut1_aware_ipv6.append(autils.get_ipv6_addr(dut1, dut1_aware_if))
+            dut2_aware_if = dut2_net_event["data"][
+                cconsts.NETWORK_CB_KEY_INTERFACE_NAME]
+            dut1_aware_if = dut1_net_event["data"][
+                cconsts.NETWORK_CB_KEY_INTERFACE_NAME]
+            dut2_aware_ifs.append(dut2_aware_if)
+            dut1_aware_ifs.append(dut1_aware_if)
+            dut2_aware_ipv6.append(autils.get_ipv6_addr(dut2, dut2_aware_if))
+            dut1_aware_ipv6.append(autils.get_ipv6_addr(dut1, dut1_aware_if))
 
-      if flip_init_resp:
-        if dut2_is_responder:
-          dut2_type = aconsts.DATA_PATH_INITIATOR
-          dut1_type = aconsts.DATA_PATH_RESPONDER
-        else:
-          dut2_type = aconsts.DATA_PATH_RESPONDER
-          dut1_type = aconsts.DATA_PATH_INITIATOR
-        dut2_is_responder = not dut2_is_responder
+            if flip_init_resp:
+                if dut2_is_responder:
+                    dut2_type = aconsts.DATA_PATH_INITIATOR
+                    dut1_type = aconsts.DATA_PATH_RESPONDER
+                else:
+                    dut2_type = aconsts.DATA_PATH_RESPONDER
+                    dut1_type = aconsts.DATA_PATH_INITIATOR
+                dut2_is_responder = not dut2_is_responder
 
-    # check that we are using 2 NDIs & that they have unique IPv6 addresses
-    dut1_aware_ifs = list(set(dut1_aware_ifs))
-    dut2_aware_ifs = list(set(dut2_aware_ifs))
-    dut1_aware_ipv6 = list(set(dut1_aware_ipv6))
-    dut2_aware_ipv6 = list(set(dut2_aware_ipv6))
+        # check that we are using 2 NDIs & that they have unique IPv6 addresses
+        dut1_aware_ifs = list(set(dut1_aware_ifs))
+        dut2_aware_ifs = list(set(dut2_aware_ifs))
+        dut1_aware_ipv6 = list(set(dut1_aware_ipv6))
+        dut2_aware_ipv6 = list(set(dut2_aware_ipv6))
 
-    self.log.info("Interface names: DUT1=%s, DUT2=%s", dut1_aware_ifs,
-                  dut2_aware_ifs)
-    self.log.info("IPv6 addresses: DUT1=%s, DUT2=%s", dut1_aware_ipv6,
-                  dut2_aware_ipv6)
-    self.log.info("DUT1 requests: %s", dut1_req_keys)
-    self.log.info("DUT2 requests: %s", dut2_req_keys)
+        self.log.info("Interface names: DUT1=%s, DUT2=%s", dut1_aware_ifs,
+                      dut2_aware_ifs)
+        self.log.info("IPv6 addresses: DUT1=%s, DUT2=%s", dut1_aware_ipv6,
+                      dut2_aware_ipv6)
+        self.log.info("DUT1 requests: %s", dut1_req_keys)
+        self.log.info("DUT2 requests: %s", dut2_req_keys)
 
-    asserts.assert_equal(
-        len(dut1_aware_ifs), len(sec_configs), "Multiple DUT1 interfaces")
-    asserts.assert_equal(
-        len(dut2_aware_ifs), len(sec_configs), "Multiple DUT2 interfaces")
-    asserts.assert_equal(
-        len(dut1_aware_ipv6), len(sec_configs), "Multiple DUT1 IPv6 addresses")
-    asserts.assert_equal(
-        len(dut2_aware_ipv6), len(sec_configs), "Multiple DUT2 IPv6 addresses")
+        asserts.assert_equal(
+            len(dut1_aware_ifs), len(sec_configs), "Multiple DUT1 interfaces")
+        asserts.assert_equal(
+            len(dut2_aware_ifs), len(sec_configs), "Multiple DUT2 interfaces")
+        asserts.assert_equal(
+            len(dut1_aware_ipv6), len(sec_configs),
+            "Multiple DUT1 IPv6 addresses")
+        asserts.assert_equal(
+            len(dut2_aware_ipv6), len(sec_configs),
+            "Multiple DUT2 IPv6 addresses")
 
-    for i in range(len(sec_configs)):
-      if_name = "%s%d" % (aconsts.AWARE_NDI_PREFIX, i)
-      dut1_ipv6 = autils.get_ipv6_addr(dut1, if_name)
-      dut2_ipv6 = autils.get_ipv6_addr(dut2, if_name)
+        for i in range(len(sec_configs)):
+            if_name = "%s%d" % (aconsts.AWARE_NDI_PREFIX, i)
+            dut1_ipv6 = autils.get_ipv6_addr(dut1, if_name)
+            dut2_ipv6 = autils.get_ipv6_addr(dut2, if_name)
 
-      asserts.assert_equal(
-          dut1_ipv6 is None, if_name not in dut1_aware_ifs,
-          "DUT1 interface %s in unexpected state" % if_name)
-      asserts.assert_equal(
-          dut2_ipv6 is None, if_name not in dut2_aware_ifs,
-          "DUT2 interface %s in unexpected state" % if_name)
+            asserts.assert_equal(
+                dut1_ipv6 is None, if_name not in dut1_aware_ifs,
+                "DUT1 interface %s in unexpected state" % if_name)
+            asserts.assert_equal(
+                dut2_ipv6 is None, if_name not in dut2_aware_ifs,
+                "DUT2 interface %s in unexpected state" % if_name)
 
-    # release requests
-    for dut2_req_key in dut2_req_keys:
-      dut2.droid.connectivityUnregisterNetworkCallback(dut2_req_key)
-    for dut1_req_key in dut1_req_keys:
-      dut1.droid.connectivityUnregisterNetworkCallback(dut1_req_key)
+        # release requests
+        for dut2_req_key in dut2_req_keys:
+            dut2.droid.connectivityUnregisterNetworkCallback(dut2_req_key)
+        for dut1_req_key in dut1_req_keys:
+            dut1.droid.connectivityUnregisterNetworkCallback(dut1_req_key)
 
-  @test_tracker_info(uuid="2d728163-11cc-46ba-a973-c8e1e71397fc")
-  def test_multiple_ndi_open_passphrase(self):
-    """Verify that between 2 DUTs can create 2 NDPs with different security
+    @test_tracker_info(uuid="2d728163-11cc-46ba-a973-c8e1e71397fc")
+    def test_multiple_ndi_open_passphrase(self):
+        """Verify that between 2 DUTs can create 2 NDPs with different security
     configuration (one open, one using passphrase). The result should use two
     different NDIs"""
-    self.run_multiple_ndi([None, self.PASSPHRASE])
+        self.run_multiple_ndi([None, self.PASSPHRASE])
 
-  @test_tracker_info(uuid="5f2c32aa-20b2-41f0-8b1e-d0b68df73ada")
-  def test_multiple_ndi_open_pmk(self):
-    """Verify that between 2 DUTs can create 2 NDPs with different security
+    @test_tracker_info(uuid="5f2c32aa-20b2-41f0-8b1e-d0b68df73ada")
+    def test_multiple_ndi_open_pmk(self):
+        """Verify that between 2 DUTs can create 2 NDPs with different security
     configuration (one open, one using pmk). The result should use two
     different NDIs"""
-    self.run_multiple_ndi([None, self.PMK])
+        self.run_multiple_ndi([None, self.PMK])
 
-  @test_tracker_info(uuid="34467659-bcfb-40cd-ba25-7e50560fca63")
-  def test_multiple_ndi_passphrase_pmk(self):
-    """Verify that between 2 DUTs can create 2 NDPs with different security
+    @test_tracker_info(uuid="34467659-bcfb-40cd-ba25-7e50560fca63")
+    def test_multiple_ndi_passphrase_pmk(self):
+        """Verify that between 2 DUTs can create 2 NDPs with different security
     configuration (one using passphrase, one using pmk). The result should use
     two different NDIs"""
-    self.run_multiple_ndi([self.PASSPHRASE, self.PMK])
+        self.run_multiple_ndi([self.PASSPHRASE, self.PMK])
 
-  @test_tracker_info(uuid="d9194ce6-45b6-41b1-9cc8-ada79968966d")
-  def test_multiple_ndi_passphrases(self):
-    """Verify that between 2 DUTs can create 2 NDPs with different security
+    @test_tracker_info(uuid="d9194ce6-45b6-41b1-9cc8-ada79968966d")
+    def test_multiple_ndi_passphrases(self):
+        """Verify that between 2 DUTs can create 2 NDPs with different security
     configuration (using different passphrases). The result should use two
     different NDIs"""
-    self.run_multiple_ndi([self.PASSPHRASE, self.PASSPHRASE2])
+        self.run_multiple_ndi([self.PASSPHRASE, self.PASSPHRASE2])
 
-  @test_tracker_info(uuid="879df795-62d2-40d4-a862-bd46d8f7e67f")
-  def test_multiple_ndi_pmks(self):
-    """Verify that between 2 DUTs can create 2 NDPs with different security
+    @test_tracker_info(uuid="879df795-62d2-40d4-a862-bd46d8f7e67f")
+    def test_multiple_ndi_pmks(self):
+        """Verify that between 2 DUTs can create 2 NDPs with different security
     configuration (using different PMKS). The result should use two different
     NDIs"""
-    self.run_multiple_ndi([self.PMK, self.PMK2])
+        self.run_multiple_ndi([self.PMK, self.PMK2])
 
-  @test_tracker_info(uuid="397d380a-8e41-466e-9ccb-cf8f413d83ba")
-  def test_multiple_ndi_open_passphrase_flip(self):
-    """Verify that between 2 DUTs can create 2 NDPs with different security
+    @test_tracker_info(uuid="397d380a-8e41-466e-9ccb-cf8f413d83ba")
+    def test_multiple_ndi_open_passphrase_flip(self):
+        """Verify that between 2 DUTs can create 2 NDPs with different security
     configuration (one open, one using passphrase). The result should use two
     different NDIs.
 
     Flip Initiator and Responder roles.
     """
-    self.run_multiple_ndi([None, self.PASSPHRASE], flip_init_resp=True)
+        self.run_multiple_ndi([None, self.PASSPHRASE], flip_init_resp=True)
 
-  @test_tracker_info(uuid="b3a4300b-1514-4cb8-a814-9c2baa449700")
-  def test_multiple_ndi_open_pmk_flip(self):
-    """Verify that between 2 DUTs can create 2 NDPs with different security
+    @test_tracker_info(uuid="b3a4300b-1514-4cb8-a814-9c2baa449700")
+    def test_multiple_ndi_open_pmk_flip(self):
+        """Verify that between 2 DUTs can create 2 NDPs with different security
     configuration (one open, one using pmk). The result should use two
     different NDIs
 
     Flip Initiator and Responder roles.
     """
-    self.run_multiple_ndi([None, self.PMK], flip_init_resp=True)
+        self.run_multiple_ndi([None, self.PMK], flip_init_resp=True)
 
-  @test_tracker_info(uuid="0bfea9e4-e57d-417f-8db4-245741e9bbd5")
-  def test_multiple_ndi_passphrase_pmk_flip(self):
-    """Verify that between 2 DUTs can create 2 NDPs with different security
+    @test_tracker_info(uuid="0bfea9e4-e57d-417f-8db4-245741e9bbd5")
+    def test_multiple_ndi_passphrase_pmk_flip(self):
+        """Verify that between 2 DUTs can create 2 NDPs with different security
     configuration (one using passphrase, one using pmk). The result should use
     two different NDIs
 
     Flip Initiator and Responder roles.
     """
-    self.run_multiple_ndi([self.PASSPHRASE, self.PMK], flip_init_resp=True)
+        self.run_multiple_ndi([self.PASSPHRASE, self.PMK], flip_init_resp=True)
 
-  @test_tracker_info(uuid="74023483-5417-431b-a362-991ad4a03ab8")
-  def test_multiple_ndi_passphrases_flip(self):
-    """Verify that between 2 DUTs can create 2 NDPs with different security
+    @test_tracker_info(uuid="74023483-5417-431b-a362-991ad4a03ab8")
+    def test_multiple_ndi_passphrases_flip(self):
+        """Verify that between 2 DUTs can create 2 NDPs with different security
     configuration (using different passphrases). The result should use two
     different NDIs
 
     Flip Initiator and Responder roles.
     """
-    self.run_multiple_ndi([self.PASSPHRASE, self.PASSPHRASE2],
-                          flip_init_resp=True)
+        self.run_multiple_ndi(
+            [self.PASSPHRASE, self.PASSPHRASE2], flip_init_resp=True)
 
-  @test_tracker_info(uuid="873b2d91-28a1-403f-ae9c-d756bb2f59ee")
-  def test_multiple_ndi_pmks_flip(self):
-    """Verify that between 2 DUTs can create 2 NDPs with different security
+    @test_tracker_info(uuid="873b2d91-28a1-403f-ae9c-d756bb2f59ee")
+    def test_multiple_ndi_pmks_flip(self):
+        """Verify that between 2 DUTs can create 2 NDPs with different security
     configuration (using different PMKS). The result should use two different
     NDIs
 
     Flip Initiator and Responder roles.
     """
-    self.run_multiple_ndi([self.PMK, self.PMK2], flip_init_resp=True)
+        self.run_multiple_ndi([self.PMK, self.PMK2], flip_init_resp=True)
 
-  #######################################
+    #######################################
 
-  @test_tracker_info(uuid="2f10a9df-7fbd-490d-a238-3523f47ab54c")
-  def test_ib_responder_any_usage(self):
-    """Verify that configuring an in-band (Aware discovery) Responder to receive
+    @test_tracker_info(uuid="2f10a9df-7fbd-490d-a238-3523f47ab54c")
+    def test_ib_responder_any_usage(self):
+        """Verify that configuring an in-band (Aware discovery) Responder to receive
     an NDP request from any peer is not permitted by current API level. Override
     API check to validate that possible (i.e. that failure at current API level
     is due to an API check and not some underlying failure).
     """
 
-    # configure all devices to override API check and allow a Responder from ANY
-    for ad in self.android_devices:
-      autils.configure_ndp_allow_any_override(ad, True)
-    self.run_ib_data_path_test(
-        ptype=aconsts.PUBLISH_TYPE_UNSOLICITED,
-        stype=aconsts.SUBSCRIBE_TYPE_PASSIVE,
-        encr_type=self.ENCR_TYPE_OPEN,
-        use_peer_id=False)
+        # configure all devices to override API check and allow a Responder from ANY
+        for ad in self.android_devices:
+            autils.configure_ndp_allow_any_override(ad, True)
+        self.run_ib_data_path_test(
+            ptype=aconsts.PUBLISH_TYPE_UNSOLICITED,
+            stype=aconsts.SUBSCRIBE_TYPE_PASSIVE,
+            encr_type=self.ENCR_TYPE_OPEN,
+            use_peer_id=False)
 
-    # configure all devices to respect API check - i.e. disallow a Responder
-    # from ANY
-    for ad in self.android_devices:
-      autils.configure_ndp_allow_any_override(ad, False)
-    self.run_ib_data_path_test(
-        ptype=aconsts.PUBLISH_TYPE_UNSOLICITED,
-        stype=aconsts.SUBSCRIBE_TYPE_PASSIVE,
-        encr_type=self.ENCR_TYPE_OPEN,
-        use_peer_id=False,
-        expect_failure=True)
+        # configure all devices to respect API check - i.e. disallow a Responder
+        # from ANY
+        for ad in self.android_devices:
+            autils.configure_ndp_allow_any_override(ad, False)
+        self.run_ib_data_path_test(
+            ptype=aconsts.PUBLISH_TYPE_UNSOLICITED,
+            stype=aconsts.SUBSCRIBE_TYPE_PASSIVE,
+            encr_type=self.ENCR_TYPE_OPEN,
+            use_peer_id=False,
+            expect_failure=True)
 
-  @test_tracker_info(uuid="5889cd41-0a72-4b7b-ab82-5b9168b9b5b8")
-  def test_oob_responder_any_usage(self):
-    """Verify that configuring an out-of-band (Aware discovery) Responder to
+    @test_tracker_info(uuid="5889cd41-0a72-4b7b-ab82-5b9168b9b5b8")
+    def test_oob_responder_any_usage(self):
+        """Verify that configuring an out-of-band (Aware discovery) Responder to
     receive an NDP request from any peer is not permitted by current API level.
     Override API check to validate that possible (i.e. that failure at current
     API level is due to an API check and not some underlying failure).
     """
 
-    # configure all devices to override API check and allow a Responder from ANY
-    for ad in self.android_devices:
-      autils.configure_ndp_allow_any_override(ad, True)
-    self.run_oob_data_path_test(
-        encr_type=self.ENCR_TYPE_OPEN,
-        use_peer_id=False)
+        # configure all devices to override API check and allow a Responder from ANY
+        for ad in self.android_devices:
+            autils.configure_ndp_allow_any_override(ad, True)
+        self.run_oob_data_path_test(
+            encr_type=self.ENCR_TYPE_OPEN, use_peer_id=False)
 
-    # configure all devices to respect API check - i.e. disallow a Responder
-    # from ANY
-    for ad in self.android_devices:
-      autils.configure_ndp_allow_any_override(ad, False)
-    self.run_oob_data_path_test(
-        encr_type=self.ENCR_TYPE_OPEN,
-        use_peer_id=False,
-        expect_failure=True)
+        # configure all devices to respect API check - i.e. disallow a Responder
+        # from ANY
+        for ad in self.android_devices:
+            autils.configure_ndp_allow_any_override(ad, False)
+        self.run_oob_data_path_test(
+            encr_type=self.ENCR_TYPE_OPEN,
+            use_peer_id=False,
+            expect_failure=True)
 
-  #######################################
+    #######################################
 
-  def run_multiple_regulatory_domains(self, use_ib, init_domain, resp_domain):
-    """Verify that a data-path setup with two conflicting regulatory domains
+    def run_multiple_regulatory_domains(self, use_ib, init_domain,
+                                        resp_domain):
+        """Verify that a data-path setup with two conflicting regulatory domains
     works (the result should be run in Channel 6 - but that is not tested).
 
     Args:
@@ -1496,86 +1538,86 @@
       init_domain: The regulatory domain of the Initiator/Subscriber.
       resp_domain: The regulator domain of the Responder/Publisher.
     """
-    init_dut = self.android_devices[0]
-    resp_dut = self.android_devices[1]
+        init_dut = self.android_devices[0]
+        resp_dut = self.android_devices[1]
 
-    init_dut.droid.wifiSetCountryCode(init_domain)
-    resp_dut.droid.wifiSetCountryCode(resp_domain)
+        init_dut.droid.wifiSetCountryCode(init_domain)
+        resp_dut.droid.wifiSetCountryCode(resp_domain)
 
-    if use_ib:
-      (resp_req_key, init_req_key, resp_aware_if, init_aware_if, resp_ipv6,
-       init_ipv6) = autils.create_ib_ndp(resp_dut, init_dut,
-                                         autils.create_discovery_config(
-                                           "GoogleTestXyz",
-                                           aconsts.PUBLISH_TYPE_UNSOLICITED),
-                                         autils.create_discovery_config(
-                                           "GoogleTestXyz",
-                                           aconsts.SUBSCRIBE_TYPE_PASSIVE),
-                                         self.device_startup_offset)
-    else:
-      (init_req_key, resp_req_key, init_aware_if, resp_aware_if, init_ipv6,
-       resp_ipv6) = autils.create_oob_ndp(init_dut, resp_dut)
+        if use_ib:
+            (resp_req_key, init_req_key, resp_aware_if, init_aware_if,
+             resp_ipv6, init_ipv6) = autils.create_ib_ndp(
+                 resp_dut, init_dut,
+                 autils.create_discovery_config(
+                     "GoogleTestXyz", aconsts.PUBLISH_TYPE_UNSOLICITED),
+                 autils.create_discovery_config(
+                     "GoogleTestXyz", aconsts.SUBSCRIBE_TYPE_PASSIVE),
+                 self.device_startup_offset)
+        else:
+            (init_req_key, resp_req_key, init_aware_if, resp_aware_if,
+             init_ipv6, resp_ipv6) = autils.create_oob_ndp(init_dut, resp_dut)
 
-    self.log.info("Interface names: I=%s, R=%s", init_aware_if, resp_aware_if)
-    self.log.info("Interface addresses (IPv6): I=%s, R=%s", init_ipv6,
-                  resp_ipv6)
+        self.log.info("Interface names: I=%s, R=%s", init_aware_if,
+                      resp_aware_if)
+        self.log.info("Interface addresses (IPv6): I=%s, R=%s", init_ipv6,
+                      resp_ipv6)
 
-    # clean-up
-    resp_dut.droid.connectivityUnregisterNetworkCallback(resp_req_key)
-    init_dut.droid.connectivityUnregisterNetworkCallback(init_req_key)
+        # clean-up
+        resp_dut.droid.connectivityUnregisterNetworkCallback(resp_req_key)
+        init_dut.droid.connectivityUnregisterNetworkCallback(init_req_key)
 
-  @test_tracker_info(uuid="eff53739-35c5-47a6-81f0-d70b51d89c3b")
-  def test_multiple_regulator_domains_ib_us_jp(self):
-    """Verify data-path setup across multiple regulator domains.
+    @test_tracker_info(uuid="eff53739-35c5-47a6-81f0-d70b51d89c3b")
+    def test_multiple_regulator_domains_ib_us_jp(self):
+        """Verify data-path setup across multiple regulator domains.
 
     - Uses in-band discovery
     - Subscriber=US, Publisher=JP
     """
-    self.run_multiple_regulatory_domains(
-        use_ib=True,
-        init_domain=wutils.WifiEnums.CountryCode.US,
-        resp_domain=wutils.WifiEnums.CountryCode.JAPAN)
+        self.run_multiple_regulatory_domains(
+            use_ib=True,
+            init_domain=wutils.WifiEnums.CountryCode.US,
+            resp_domain=wutils.WifiEnums.CountryCode.JAPAN)
 
-  @test_tracker_info(uuid="19af47cc-3204-40ef-b50f-14cf7b89cf4a")
-  def test_multiple_regulator_domains_ib_jp_us(self):
-    """Verify data-path setup across multiple regulator domains.
+    @test_tracker_info(uuid="19af47cc-3204-40ef-b50f-14cf7b89cf4a")
+    def test_multiple_regulator_domains_ib_jp_us(self):
+        """Verify data-path setup across multiple regulator domains.
 
     - Uses in-band discovery
     - Subscriber=JP, Publisher=US
     """
-    self.run_multiple_regulatory_domains(
-        use_ib=True,
-        init_domain=wutils.WifiEnums.CountryCode.JAPAN,
-        resp_domain=wutils.WifiEnums.CountryCode.US)
+        self.run_multiple_regulatory_domains(
+            use_ib=True,
+            init_domain=wutils.WifiEnums.CountryCode.JAPAN,
+            resp_domain=wutils.WifiEnums.CountryCode.US)
 
-  @test_tracker_info(uuid="65285ab3-977f-4dbd-b663-d5a02f4fc663")
-  def test_multiple_regulator_domains_oob_us_jp(self):
-    """Verify data-path setup across multiple regulator domains.
+    @test_tracker_info(uuid="65285ab3-977f-4dbd-b663-d5a02f4fc663")
+    def test_multiple_regulator_domains_oob_us_jp(self):
+        """Verify data-path setup across multiple regulator domains.
 
     - Uses out-f-band discovery
     - Initiator=US, Responder=JP
     """
-    self.run_multiple_regulatory_domains(
-        use_ib=False,
-        init_domain=wutils.WifiEnums.CountryCode.US,
-        resp_domain=wutils.WifiEnums.CountryCode.JAPAN)
+        self.run_multiple_regulatory_domains(
+            use_ib=False,
+            init_domain=wutils.WifiEnums.CountryCode.US,
+            resp_domain=wutils.WifiEnums.CountryCode.JAPAN)
 
-  @test_tracker_info(uuid="8a417e24-aaf6-44b9-a089-a07c3ba8d954")
-  def test_multiple_regulator_domains_oob_jp_us(self):
-    """Verify data-path setup across multiple regulator domains.
+    @test_tracker_info(uuid="8a417e24-aaf6-44b9-a089-a07c3ba8d954")
+    def test_multiple_regulator_domains_oob_jp_us(self):
+        """Verify data-path setup across multiple regulator domains.
 
     - Uses out-of-band discovery
     - Initiator=JP, Responder=US
     """
-    self.run_multiple_regulatory_domains(
-        use_ib=False,
-        init_domain=wutils.WifiEnums.CountryCode.JAPAN,
-        resp_domain=wutils.WifiEnums.CountryCode.US)
+        self.run_multiple_regulatory_domains(
+            use_ib=False,
+            init_domain=wutils.WifiEnums.CountryCode.JAPAN,
+            resp_domain=wutils.WifiEnums.CountryCode.US)
 
-  ########################################################################
+    ########################################################################
 
-  def run_mix_ib_oob(self, same_request, ib_first, inits_on_same_dut):
-    """Validate that multiple network requests issued using both in-band and
+    def run_mix_ib_oob(self, same_request, ib_first, inits_on_same_dut):
+        """Validate that multiple network requests issued using both in-band and
     out-of-band discovery behave as expected.
 
     The same_request parameter controls whether identical single NDP is
@@ -1591,275 +1633,273 @@
                          otherwise (if False) then the Initiators are run on
                          different devices. Note that Subscribe == Initiator.
     """
-    if not same_request:
-      asserts.skip_if(self.android_devices[0].aware_capabilities[
-                        aconsts.CAP_MAX_NDI_INTERFACES] < 2 or
-                      self.android_devices[1].aware_capabilities[
-                        aconsts.CAP_MAX_NDI_INTERFACES] < 2,
-                      "DUTs do not support enough NDIs")
+        if not same_request:
+            asserts.skip_if(
+                self.android_devices[0]
+                .aware_capabilities[aconsts.CAP_MAX_NDI_INTERFACES] < 2
+                or self.android_devices[1]
+                .aware_capabilities[aconsts.CAP_MAX_NDI_INTERFACES] < 2,
+                "DUTs do not support enough NDIs")
 
-    (p_dut, s_dut, p_id, s_id, p_disc_id, s_disc_id, peer_id_on_sub,
-     peer_id_on_pub_null) = self.set_up_discovery(
-        aconsts.PUBLISH_TYPE_UNSOLICITED, aconsts.SUBSCRIBE_TYPE_PASSIVE, False)
+        (p_dut, s_dut, p_id, s_id, p_disc_id, s_disc_id, peer_id_on_sub,
+         peer_id_on_pub_null) = self.set_up_discovery(
+             aconsts.PUBLISH_TYPE_UNSOLICITED, aconsts.SUBSCRIBE_TYPE_PASSIVE,
+             False)
 
-    p_id2, p_mac = autils.attach_with_identity(p_dut)
-    s_id2, s_mac = autils.attach_with_identity(s_dut)
+        p_id2, p_mac = autils.attach_with_identity(p_dut)
+        s_id2, s_mac = autils.attach_with_identity(s_dut)
 
-    if inits_on_same_dut:
-      resp_dut = p_dut
-      resp_id = p_id2
-      resp_mac = p_mac
+        if inits_on_same_dut:
+            resp_dut = p_dut
+            resp_id = p_id2
+            resp_mac = p_mac
 
-      init_dut = s_dut
-      init_id = s_id2
-      init_mac = s_mac
-    else:
-      resp_dut = s_dut
-      resp_id = s_id2
-      resp_mac = s_mac
+            init_dut = s_dut
+            init_id = s_id2
+            init_mac = s_mac
+        else:
+            resp_dut = s_dut
+            resp_id = s_id2
+            resp_mac = s_mac
 
-      init_dut = p_dut
-      init_id = p_id2
-      init_mac = p_mac
+            init_dut = p_dut
+            init_id = p_id2
+            init_mac = p_mac
 
-    passphrase = None if same_request else self.PASSPHRASE
+        passphrase = None if same_request else self.PASSPHRASE
 
-    if ib_first:
-      # request in-band network (to completion)
-      p_req_key = self.request_network(
-          p_dut,
-          p_dut.droid.wifiAwareCreateNetworkSpecifier(p_disc_id, None))
-      s_req_key = self.request_network(
-          s_dut,
-          s_dut.droid.wifiAwareCreateNetworkSpecifier(s_disc_id,
-                                                      peer_id_on_sub))
+        if ib_first:
+            # request in-band network (to completion)
+            p_req_key = self.request_network(
+                p_dut,
+                p_dut.droid.wifiAwareCreateNetworkSpecifier(p_disc_id, None))
+            s_req_key = self.request_network(
+                s_dut,
+                s_dut.droid.wifiAwareCreateNetworkSpecifier(
+                    s_disc_id, peer_id_on_sub))
 
-      # Publisher & Subscriber: wait for network formation
-      p_net_event = autils.wait_for_event_with_keys(
-          p_dut, cconsts.EVENT_NETWORK_CALLBACK,
-          autils.EVENT_NDP_TIMEOUT,
-          (cconsts.NETWORK_CB_KEY_EVENT,
-           cconsts.NETWORK_CB_LINK_PROPERTIES_CHANGED),
-          (cconsts.NETWORK_CB_KEY_ID, p_req_key))
-      s_net_event = autils.wait_for_event_with_keys(
-          s_dut, cconsts.EVENT_NETWORK_CALLBACK,
-          autils.EVENT_NDP_TIMEOUT,
-          (cconsts.NETWORK_CB_KEY_EVENT,
-           cconsts.NETWORK_CB_LINK_PROPERTIES_CHANGED),
-          (cconsts.NETWORK_CB_KEY_ID, s_req_key))
+            # Publisher & Subscriber: wait for network formation
+            p_net_event = autils.wait_for_event_with_keys(
+                p_dut, cconsts.EVENT_NETWORK_CALLBACK,
+                autils.EVENT_NDP_TIMEOUT,
+                (cconsts.NETWORK_CB_KEY_EVENT,
+                 cconsts.NETWORK_CB_LINK_PROPERTIES_CHANGED),
+                (cconsts.NETWORK_CB_KEY_ID, p_req_key))
+            s_net_event = autils.wait_for_event_with_keys(
+                s_dut, cconsts.EVENT_NETWORK_CALLBACK,
+                autils.EVENT_NDP_TIMEOUT,
+                (cconsts.NETWORK_CB_KEY_EVENT,
+                 cconsts.NETWORK_CB_LINK_PROPERTIES_CHANGED),
+                (cconsts.NETWORK_CB_KEY_ID, s_req_key))
 
-    # request out-of-band network
-    resp_req_key = autils.request_network(resp_dut,
-          resp_dut.droid.wifiAwareCreateNetworkSpecifierOob(
-              resp_id, aconsts.DATA_PATH_RESPONDER, init_mac, passphrase))
-    init_req_key = autils.request_network(init_dut,
-          init_dut.droid.wifiAwareCreateNetworkSpecifierOob(
-              init_id, aconsts.DATA_PATH_INITIATOR, resp_mac, passphrase))
+        # request out-of-band network
+        resp_req_key = autils.request_network(
+            resp_dut,
+            resp_dut.droid.wifiAwareCreateNetworkSpecifierOob(
+                resp_id, aconsts.DATA_PATH_RESPONDER, init_mac, passphrase))
+        init_req_key = autils.request_network(
+            init_dut,
+            init_dut.droid.wifiAwareCreateNetworkSpecifierOob(
+                init_id, aconsts.DATA_PATH_INITIATOR, resp_mac, passphrase))
 
-    resp_net_event = autils.wait_for_event_with_keys(
-        resp_dut, cconsts.EVENT_NETWORK_CALLBACK,
-        autils.EVENT_NDP_TIMEOUT,
-        (cconsts.NETWORK_CB_KEY_EVENT,
-         cconsts.NETWORK_CB_LINK_PROPERTIES_CHANGED),
-        (cconsts.NETWORK_CB_KEY_ID, resp_req_key))
-    init_net_event = autils.wait_for_event_with_keys(
-        init_dut, cconsts.EVENT_NETWORK_CALLBACK,
-        autils.EVENT_NDP_TIMEOUT,
-        (cconsts.NETWORK_CB_KEY_EVENT,
-         cconsts.NETWORK_CB_LINK_PROPERTIES_CHANGED),
-        (cconsts.NETWORK_CB_KEY_ID, init_req_key))
+        resp_net_event = autils.wait_for_event_with_keys(
+            resp_dut, cconsts.EVENT_NETWORK_CALLBACK, autils.EVENT_NDP_TIMEOUT,
+            (cconsts.NETWORK_CB_KEY_EVENT,
+             cconsts.NETWORK_CB_LINK_PROPERTIES_CHANGED),
+            (cconsts.NETWORK_CB_KEY_ID, resp_req_key))
+        init_net_event = autils.wait_for_event_with_keys(
+            init_dut, cconsts.EVENT_NETWORK_CALLBACK, autils.EVENT_NDP_TIMEOUT,
+            (cconsts.NETWORK_CB_KEY_EVENT,
+             cconsts.NETWORK_CB_LINK_PROPERTIES_CHANGED),
+            (cconsts.NETWORK_CB_KEY_ID, init_req_key))
 
-    if not ib_first:
-      # request in-band network (to completion)
-      p_req_key = self.request_network(
-          p_dut,
-          p_dut.droid.wifiAwareCreateNetworkSpecifier(p_disc_id, None))
-      s_req_key = self.request_network(
-          s_dut,
-          s_dut.droid.wifiAwareCreateNetworkSpecifier(s_disc_id,
-                                                      peer_id_on_sub))
+        if not ib_first:
+            # request in-band network (to completion)
+            p_req_key = self.request_network(
+                p_dut,
+                p_dut.droid.wifiAwareCreateNetworkSpecifier(p_disc_id, None))
+            s_req_key = self.request_network(
+                s_dut,
+                s_dut.droid.wifiAwareCreateNetworkSpecifier(
+                    s_disc_id, peer_id_on_sub))
 
-      # Publisher & Subscriber: wait for network formation
-      p_net_event = autils.wait_for_event_with_keys(
-          p_dut, cconsts.EVENT_NETWORK_CALLBACK,
-          autils.EVENT_NDP_TIMEOUT,
-          (cconsts.NETWORK_CB_KEY_EVENT,
-           cconsts.NETWORK_CB_LINK_PROPERTIES_CHANGED),
-          (cconsts.NETWORK_CB_KEY_ID, p_req_key))
-      s_net_event = autils.wait_for_event_with_keys(
-          s_dut, cconsts.EVENT_NETWORK_CALLBACK,
-          autils.EVENT_NDP_TIMEOUT,
-          (cconsts.NETWORK_CB_KEY_EVENT,
-           cconsts.NETWORK_CB_LINK_PROPERTIES_CHANGED),
-          (cconsts.NETWORK_CB_KEY_ID, s_req_key))
+            # Publisher & Subscriber: wait for network formation
+            p_net_event = autils.wait_for_event_with_keys(
+                p_dut, cconsts.EVENT_NETWORK_CALLBACK,
+                autils.EVENT_NDP_TIMEOUT,
+                (cconsts.NETWORK_CB_KEY_EVENT,
+                 cconsts.NETWORK_CB_LINK_PROPERTIES_CHANGED),
+                (cconsts.NETWORK_CB_KEY_ID, p_req_key))
+            s_net_event = autils.wait_for_event_with_keys(
+                s_dut, cconsts.EVENT_NETWORK_CALLBACK,
+                autils.EVENT_NDP_TIMEOUT,
+                (cconsts.NETWORK_CB_KEY_EVENT,
+                 cconsts.NETWORK_CB_LINK_PROPERTIES_CHANGED),
+                (cconsts.NETWORK_CB_KEY_ID, s_req_key))
 
-    # extract net info
-    pub_interface = p_net_event["data"][cconsts.NETWORK_CB_KEY_INTERFACE_NAME]
-    sub_interface = s_net_event["data"][cconsts.NETWORK_CB_KEY_INTERFACE_NAME]
-    resp_interface = resp_net_event["data"][
-      cconsts.NETWORK_CB_KEY_INTERFACE_NAME]
-    init_interface = init_net_event["data"][
-      cconsts.NETWORK_CB_KEY_INTERFACE_NAME]
+        # extract net info
+        pub_interface = p_net_event["data"][
+            cconsts.NETWORK_CB_KEY_INTERFACE_NAME]
+        sub_interface = s_net_event["data"][
+            cconsts.NETWORK_CB_KEY_INTERFACE_NAME]
+        resp_interface = resp_net_event["data"][
+            cconsts.NETWORK_CB_KEY_INTERFACE_NAME]
+        init_interface = init_net_event["data"][
+            cconsts.NETWORK_CB_KEY_INTERFACE_NAME]
 
-    self.log.info(
-        "Interface names: Pub=%s, Sub=%s, Resp=%s, Init=%s", pub_interface,
-        sub_interface, resp_interface, init_interface)
+        self.log.info("Interface names: Pub=%s, Sub=%s, Resp=%s, Init=%s",
+                      pub_interface, sub_interface, resp_interface,
+                      init_interface)
 
-    pub_ipv6 = \
-    p_dut.droid.connectivityGetLinkLocalIpv6Address(pub_interface).split("%")[0]
-    sub_ipv6 = \
-    s_dut.droid.connectivityGetLinkLocalIpv6Address(sub_interface).split("%")[0]
-    resp_ipv6 = \
-    resp_dut.droid.connectivityGetLinkLocalIpv6Address(resp_interface).split(
-      "%")[0]
-    init_ipv6 = \
-    init_dut.droid.connectivityGetLinkLocalIpv6Address(init_interface).split(
-      "%")[0]
+        pub_ipv6 = \
+        p_dut.droid.connectivityGetLinkLocalIpv6Address(pub_interface).split("%")[0]
+        sub_ipv6 = \
+        s_dut.droid.connectivityGetLinkLocalIpv6Address(sub_interface).split("%")[0]
+        resp_ipv6 = \
+        resp_dut.droid.connectivityGetLinkLocalIpv6Address(resp_interface).split(
+          "%")[0]
+        init_ipv6 = \
+        init_dut.droid.connectivityGetLinkLocalIpv6Address(init_interface).split(
+          "%")[0]
 
-    self.log.info(
-      "Interface addresses (IPv6): Pub=%s, Sub=%s, Resp=%s, Init=%s", pub_ipv6,
-      sub_ipv6, resp_ipv6, init_ipv6)
+        self.log.info(
+            "Interface addresses (IPv6): Pub=%s, Sub=%s, Resp=%s, Init=%s",
+            pub_ipv6, sub_ipv6, resp_ipv6, init_ipv6)
 
-    # validate NDP/NDI conditions (using interface names & ipv6)
-    if same_request:
-      asserts.assert_equal(pub_interface,
-         resp_interface if inits_on_same_dut else init_interface,
-         "NDP interfaces don't match on Pub/other")
-      asserts.assert_equal(sub_interface,
-         init_interface if inits_on_same_dut else resp_interface,
-         "NDP interfaces don't match on Sub/other")
+        # validate NDP/NDI conditions (using interface names & ipv6)
+        if same_request:
+            asserts.assert_equal(
+                pub_interface, resp_interface if inits_on_same_dut else
+                init_interface, "NDP interfaces don't match on Pub/other")
+            asserts.assert_equal(
+                sub_interface, init_interface if inits_on_same_dut else
+                resp_interface, "NDP interfaces don't match on Sub/other")
 
-      asserts.assert_equal(pub_ipv6,
-                           resp_ipv6 if inits_on_same_dut else init_ipv6,
-                           "NDP IPv6 don't match on Pub/other")
-      asserts.assert_equal(sub_ipv6,
-                           init_ipv6 if inits_on_same_dut else resp_ipv6,
-                           "NDP IPv6 don't match on Sub/other")
-    else:
-      asserts.assert_false(pub_interface == (
-        resp_interface if inits_on_same_dut else init_interface),
-                           "NDP interfaces match on Pub/other")
-      asserts.assert_false(sub_interface == (
-        init_interface if inits_on_same_dut else resp_interface),
-                           "NDP interfaces match on Sub/other")
+            asserts.assert_equal(pub_ipv6, resp_ipv6
+                                 if inits_on_same_dut else init_ipv6,
+                                 "NDP IPv6 don't match on Pub/other")
+            asserts.assert_equal(sub_ipv6, init_ipv6
+                                 if inits_on_same_dut else resp_ipv6,
+                                 "NDP IPv6 don't match on Sub/other")
+        else:
+            asserts.assert_false(
+                pub_interface == (resp_interface
+                                  if inits_on_same_dut else init_interface),
+                "NDP interfaces match on Pub/other")
+            asserts.assert_false(
+                sub_interface == (init_interface
+                                  if inits_on_same_dut else resp_interface),
+                "NDP interfaces match on Sub/other")
 
-      asserts.assert_false(pub_ipv6 ==
-                           (resp_ipv6 if inits_on_same_dut else init_ipv6),
-                           "NDP IPv6 match on Pub/other")
-      asserts.assert_false(sub_ipv6 ==
-                           (init_ipv6 if inits_on_same_dut else resp_ipv6),
-                           "NDP IPv6 match on Sub/other")
+            asserts.assert_false(
+                pub_ipv6 == (resp_ipv6 if inits_on_same_dut else init_ipv6),
+                "NDP IPv6 match on Pub/other")
+            asserts.assert_false(
+                sub_ipv6 == (init_ipv6 if inits_on_same_dut else resp_ipv6),
+                "NDP IPv6 match on Sub/other")
 
-    # release requests
-    p_dut.droid.connectivityUnregisterNetworkCallback(p_req_key)
-    s_dut.droid.connectivityUnregisterNetworkCallback(s_req_key)
-    resp_dut.droid.connectivityUnregisterNetworkCallback(resp_req_key)
-    init_dut.droid.connectivityUnregisterNetworkCallback(init_req_key)
+        # release requests
+        p_dut.droid.connectivityUnregisterNetworkCallback(p_req_key)
+        s_dut.droid.connectivityUnregisterNetworkCallback(s_req_key)
+        resp_dut.droid.connectivityUnregisterNetworkCallback(resp_req_key)
+        init_dut.droid.connectivityUnregisterNetworkCallback(init_req_key)
 
-  @test_tracker_info(uuid="d8a0839d-4ba0-43f2-af93-3cf1382f9f16")
-  def test_identical_ndps_mix_ib_oob_ib_first_same_polarity(self):
-    """Validate that a single NDP is created for multiple identical requests
+    @test_tracker_info(uuid="d8a0839d-4ba0-43f2-af93-3cf1382f9f16")
+    def test_identical_ndps_mix_ib_oob_ib_first_same_polarity(self):
+        """Validate that a single NDP is created for multiple identical requests
     which are issued through either in-band (ib) or out-of-band (oob) APIs.
 
     The in-band request is issued first. Both Initiators (Sub == Initiator) are
     run on the same device.
     """
-    self.run_mix_ib_oob(same_request=True,
-                        ib_first=True,
-                        inits_on_same_dut=True)
+        self.run_mix_ib_oob(
+            same_request=True, ib_first=True, inits_on_same_dut=True)
 
-  @test_tracker_info(uuid="70bbb811-0bed-4a19-96b3-f2446e777c8a")
-  def test_identical_ndps_mix_ib_oob_oob_first_same_polarity(self):
-    """Validate that a single NDP is created for multiple identical requests
+    @test_tracker_info(uuid="70bbb811-0bed-4a19-96b3-f2446e777c8a")
+    def test_identical_ndps_mix_ib_oob_oob_first_same_polarity(self):
+        """Validate that a single NDP is created for multiple identical requests
     which are issued through either in-band (ib) or out-of-band (oob) APIs.
 
     The out-of-band request is issued first. Both Initiators (Sub == Initiator)
     are run on the same device.
     """
-    self.run_mix_ib_oob(same_request=True,
-                        ib_first=False,
-                        inits_on_same_dut=True)
+        self.run_mix_ib_oob(
+            same_request=True, ib_first=False, inits_on_same_dut=True)
 
-  @test_tracker_info(uuid="d9796da5-f96a-4a51-be0f-89d6f5bfe3ad")
-  def test_identical_ndps_mix_ib_oob_ib_first_diff_polarity(self):
-    """Validate that a single NDP is created for multiple identical requests
+    @test_tracker_info(uuid="d9796da5-f96a-4a51-be0f-89d6f5bfe3ad")
+    def test_identical_ndps_mix_ib_oob_ib_first_diff_polarity(self):
+        """Validate that a single NDP is created for multiple identical requests
     which are issued through either in-band (ib) or out-of-band (oob) APIs.
 
     The in-band request is issued first. Initiators (Sub == Initiator) are
     run on different devices.
     """
-    self.run_mix_ib_oob(same_request=True,
-                        ib_first=True,
-                        inits_on_same_dut=False)
+        self.run_mix_ib_oob(
+            same_request=True, ib_first=True, inits_on_same_dut=False)
 
-  @test_tracker_info(uuid="72b16cbf-53ad-4f98-8dcf-a8cc5fa812e3")
-  def test_identical_ndps_mix_ib_oob_oob_first_diff_polarity(self):
-    """Validate that a single NDP is created for multiple identical requests
+    @test_tracker_info(uuid="72b16cbf-53ad-4f98-8dcf-a8cc5fa812e3")
+    def test_identical_ndps_mix_ib_oob_oob_first_diff_polarity(self):
+        """Validate that a single NDP is created for multiple identical requests
     which are issued through either in-band (ib) or out-of-band (oob) APIs.
 
     The out-of-band request is issued first. Initiators (Sub == Initiator) are
     run on different devices.
     """
-    self.run_mix_ib_oob(same_request=True,
-                        ib_first=False,
-                        inits_on_same_dut=False)
+        self.run_mix_ib_oob(
+            same_request=True, ib_first=False, inits_on_same_dut=False)
 
-  @test_tracker_info(uuid="51f9581e-c5ee-48a7-84d2-adff4876c3d7")
-  def test_multiple_ndis_mix_ib_oob_ib_first_same_polarity(self):
-    """Validate that multiple NDIs are created for NDPs which are requested with
+    @test_tracker_info(uuid="51f9581e-c5ee-48a7-84d2-adff4876c3d7")
+    def test_multiple_ndis_mix_ib_oob_ib_first_same_polarity(self):
+        """Validate that multiple NDIs are created for NDPs which are requested with
     different security configurations. Use a mix of in-band and out-of-band APIs
     to request the different NDPs.
 
     The in-band request is issued first. Initiators (Sub == Initiator) are
     run on the same device.
     """
-    self.run_mix_ib_oob(same_request=False,
-                        ib_first=True,
-                        inits_on_same_dut=True)
+        self.run_mix_ib_oob(
+            same_request=False, ib_first=True, inits_on_same_dut=True)
 
-  @test_tracker_info(uuid="b1e3070e-4d38-4b31-862d-39b82e0f2853")
-  def test_multiple_ndis_mix_ib_oob_oob_first_same_polarity(self):
-    """Validate that multiple NDIs are created for NDPs which are requested with
+    @test_tracker_info(uuid="b1e3070e-4d38-4b31-862d-39b82e0f2853")
+    def test_multiple_ndis_mix_ib_oob_oob_first_same_polarity(self):
+        """Validate that multiple NDIs are created for NDPs which are requested with
     different security configurations. Use a mix of in-band and out-of-band APIs
     to request the different NDPs.
 
     The out-of-band request is issued first. Initiators (Sub == Initiator) are
     run on the same device.
     """
-    self.run_mix_ib_oob(same_request=False,
-                        ib_first=False,
-                        inits_on_same_dut=True)
+        self.run_mix_ib_oob(
+            same_request=False, ib_first=False, inits_on_same_dut=True)
 
-  @test_tracker_info(uuid="b1e3070e-4d38-4b31-862d-39b82e0f2853")
-  def test_multiple_ndis_mix_ib_oob_ib_first_diff_polarity(self):
-    """Validate that multiple NDIs are created for NDPs which are requested with
+    @test_tracker_info(uuid="b1e3070e-4d38-4b31-862d-39b82e0f2853")
+    def test_multiple_ndis_mix_ib_oob_ib_first_diff_polarity(self):
+        """Validate that multiple NDIs are created for NDPs which are requested with
     different security configurations. Use a mix of in-band and out-of-band APIs
     to request the different NDPs.
 
     The in-band request is issued first. Initiators (Sub == Initiator) are
     run on different devices.
     """
-    self.run_mix_ib_oob(same_request=False,
-                        ib_first=True,
-                        inits_on_same_dut=False)
+        self.run_mix_ib_oob(
+            same_request=False, ib_first=True, inits_on_same_dut=False)
 
-  @test_tracker_info(uuid="596caadf-028e-494b-bbce-8304ccec2cbb")
-  def test_multiple_ndis_mix_ib_oob_oob_first_diff_polarity(self):
-    """Validate that multiple NDIs are created for NDPs which are requested with
+    @test_tracker_info(uuid="596caadf-028e-494b-bbce-8304ccec2cbb")
+    def test_multiple_ndis_mix_ib_oob_oob_first_diff_polarity(self):
+        """Validate that multiple NDIs are created for NDPs which are requested with
     different security configurations. Use a mix of in-band and out-of-band APIs
     to request the different NDPs.
 
     The out-of-band request is issued first. Initiators (Sub == Initiator) are
     run on different devices.
     """
-    self.run_mix_ib_oob(same_request=False,
-                        ib_first=False,
-                        inits_on_same_dut=False)
+        self.run_mix_ib_oob(
+            same_request=False, ib_first=False, inits_on_same_dut=False)
 
-  ########################################################################
+    ########################################################################
 
-  def test_ndp_loop(self):
-    """Validate that can create a loop (chain) of N NDPs between N devices,
+    def test_ndp_loop(self):
+        """Validate that can create a loop (chain) of N NDPs between N devices,
     where N >= 3, e.g.
 
     A - B
@@ -1868,58 +1908,63 @@
 
     The NDPs are all OPEN (no encryption).
     """
-    asserts.assert_true(len(self.android_devices) >= 3,
-                        'A minimum of 3 devices is needed to run the test, have %d' %
-                        len(self.android_devices))
+        asserts.assert_true(
+            len(self.android_devices) >= 3,
+            'A minimum of 3 devices is needed to run the test, have %d' % len(
+                self.android_devices))
 
-    duts = self.android_devices
-    loop_len = len(duts)
-    ids = []
-    macs = []
-    reqs = [[], [], []]
-    ifs = [[], [], []]
-    ipv6s = [[], [], []]
+        duts = self.android_devices
+        loop_len = len(duts)
+        ids = []
+        macs = []
+        reqs = [[], [], []]
+        ifs = [[], [], []]
+        ipv6s = [[], [], []]
 
-    for i in range(loop_len):
-      duts[i].pretty_name = chr(ord("A") + i)
+        for i in range(loop_len):
+            duts[i].pretty_name = chr(ord("A") + i)
 
-    # start-up 3 devices (attach w/ identity)
-    for i in range(loop_len):
-      ids.append(duts[i].droid.wifiAwareAttach(True))
-      autils.wait_for_event(duts[i], aconsts.EVENT_CB_ON_ATTACHED)
-      ident_event = autils.wait_for_event(duts[i],
-                                          aconsts.EVENT_CB_ON_IDENTITY_CHANGED)
-      macs.append(ident_event['data']['mac'])
+        # start-up 3 devices (attach w/ identity)
+        for i in range(loop_len):
+            ids.append(duts[i].droid.wifiAwareAttach(True))
+            autils.wait_for_event(duts[i], aconsts.EVENT_CB_ON_ATTACHED)
+            ident_event = autils.wait_for_event(
+                duts[i], aconsts.EVENT_CB_ON_IDENTITY_CHANGED)
+            macs.append(ident_event['data']['mac'])
 
-    # wait for for devices to synchronize with each other - there are no other
-    # mechanisms to make sure this happens for OOB discovery (except retrying
-    # to execute the data-path request)
-    time.sleep(autils.WAIT_FOR_CLUSTER)
+        # wait for for devices to synchronize with each other - there are no other
+        # mechanisms to make sure this happens for OOB discovery (except retrying
+        # to execute the data-path request)
+        time.sleep(autils.WAIT_FOR_CLUSTER)
 
-    # create the N NDPs: i to (i+1) % N
-    for i in range(loop_len):
-      peer_device = (i + 1) % loop_len
+        # create the N NDPs: i to (i+1) % N
+        for i in range(loop_len):
+            peer_device = (i + 1) % loop_len
 
-      (init_req_key, resp_req_key, init_aware_if,
-       resp_aware_if, init_ipv6, resp_ipv6) = autils.create_oob_ndp_on_sessions(
-          duts[i], duts[peer_device],
-          ids[i], macs[i], ids[peer_device], macs[peer_device])
+            (init_req_key, resp_req_key, init_aware_if, resp_aware_if,
+             init_ipv6, resp_ipv6) = autils.create_oob_ndp_on_sessions(
+                 duts[i], duts[peer_device], ids[i], macs[i], ids[peer_device],
+                 macs[peer_device])
 
-      reqs[i].append(init_req_key)
-      reqs[peer_device].append(resp_req_key)
-      ifs[i].append(init_aware_if)
-      ifs[peer_device].append(resp_aware_if)
-      ipv6s[i].append(init_ipv6)
-      ipv6s[peer_device].append(resp_ipv6)
+            reqs[i].append(init_req_key)
+            reqs[peer_device].append(resp_req_key)
+            ifs[i].append(init_aware_if)
+            ifs[peer_device].append(resp_aware_if)
+            ipv6s[i].append(init_ipv6)
+            ipv6s[peer_device].append(resp_ipv6)
 
-    # clean-up
-    for i in range(loop_len):
-      for req in reqs[i]:
-        duts[i].droid.connectivityUnregisterNetworkCallback(req)
+        # clean-up
+        for i in range(loop_len):
+            for req in reqs[i]:
+                duts[i].droid.connectivityUnregisterNetworkCallback(req)
 
-    # info
-    self.log.info("MACs: %s", macs)
-    self.log.info("Interface names: %s", ifs)
-    self.log.info("IPv6 addresses: %s", ipv6s)
-    asserts.explicit_pass("NDP loop test",
-                          extras={"macs": macs, "ifs": ifs, "ipv6s": ipv6s})
+        # info
+        self.log.info("MACs: %s", macs)
+        self.log.info("Interface names: %s", ifs)
+        self.log.info("IPv6 addresses: %s", ipv6s)
+        asserts.explicit_pass(
+            "NDP loop test", extras={
+                "macs": macs,
+                "ifs": ifs,
+                "ipv6s": ipv6s
+            })
diff --git a/acts/tests/google/wifi/aware/functional/DiscoveryTest.py b/acts/tests/google/wifi/aware/functional/DiscoveryTest.py
index c6f75b0..0de6d12 100644
--- a/acts/tests/google/wifi/aware/functional/DiscoveryTest.py
+++ b/acts/tests/google/wifi/aware/functional/DiscoveryTest.py
@@ -25,27 +25,27 @@
 
 
 class DiscoveryTest(AwareBaseTest):
-  """Set of tests for Wi-Fi Aware discovery."""
+    """Set of tests for Wi-Fi Aware discovery."""
 
-  # configuration parameters used by tests
-  PAYLOAD_SIZE_MIN = 0
-  PAYLOAD_SIZE_TYPICAL = 1
-  PAYLOAD_SIZE_MAX = 2
+    # configuration parameters used by tests
+    PAYLOAD_SIZE_MIN = 0
+    PAYLOAD_SIZE_TYPICAL = 1
+    PAYLOAD_SIZE_MAX = 2
 
-  # message strings
-  query_msg = "How are you doing? 你好嗎?"
-  response_msg = "Doing ok - thanks! 做的不錯 - 謝謝!"
+    # message strings
+    query_msg = "How are you doing? 你好嗎?"
+    response_msg = "Doing ok - thanks! 做的不錯 - 謝謝!"
 
-  # message re-transmit counter (increases reliability in open-environment)
-  # Note: reliability of message transmission is tested elsewhere
-  msg_retx_count = 5  # hard-coded max value, internal API
+    # message re-transmit counter (increases reliability in open-environment)
+    # Note: reliability of message transmission is tested elsewhere
+    msg_retx_count = 5  # hard-coded max value, internal API
 
-  def __init__(self, controllers):
-    AwareBaseTest.__init__(self, controllers)
+    def __init__(self, controllers):
+        AwareBaseTest.__init__(self, controllers)
 
-  def create_base_config(self, caps, is_publish, ptype, stype, payload_size,
-                         ttl, term_ind_on, null_match):
-    """Create a base configuration based on input parameters.
+    def create_base_config(self, caps, is_publish, ptype, stype, payload_size,
+                           ttl, term_ind_on, null_match):
+        """Create a base configuration based on input parameters.
 
     Args:
       caps: device capability dictionary
@@ -59,44 +59,48 @@
     Returns:
       publish discovery configuration object.
     """
-    config = {}
-    if is_publish:
-      config[aconsts.DISCOVERY_KEY_DISCOVERY_TYPE] = ptype
-    else:
-      config[aconsts.DISCOVERY_KEY_DISCOVERY_TYPE] = stype
-    config[aconsts.DISCOVERY_KEY_TTL] = ttl
-    config[aconsts.DISCOVERY_KEY_TERM_CB_ENABLED] = term_ind_on
-    if payload_size == self.PAYLOAD_SIZE_MIN:
-      config[aconsts.DISCOVERY_KEY_SERVICE_NAME] = "a"
-      config[aconsts.DISCOVERY_KEY_SSI] = None
-      config[aconsts.DISCOVERY_KEY_MATCH_FILTER_LIST] = []
-    elif payload_size == self.PAYLOAD_SIZE_TYPICAL:
-      config[aconsts.DISCOVERY_KEY_SERVICE_NAME] = "GoogleTestServiceX"
-      if is_publish:
-        config[aconsts.DISCOVERY_KEY_SSI] = string.ascii_letters
-      else:
-        config[aconsts.DISCOVERY_KEY_SSI] = string.ascii_letters[::
+        config = {}
+        if is_publish:
+            config[aconsts.DISCOVERY_KEY_DISCOVERY_TYPE] = ptype
+        else:
+            config[aconsts.DISCOVERY_KEY_DISCOVERY_TYPE] = stype
+        config[aconsts.DISCOVERY_KEY_TTL] = ttl
+        config[aconsts.DISCOVERY_KEY_TERM_CB_ENABLED] = term_ind_on
+        if payload_size == self.PAYLOAD_SIZE_MIN:
+            config[aconsts.DISCOVERY_KEY_SERVICE_NAME] = "a"
+            config[aconsts.DISCOVERY_KEY_SSI] = None
+            config[aconsts.DISCOVERY_KEY_MATCH_FILTER_LIST] = []
+        elif payload_size == self.PAYLOAD_SIZE_TYPICAL:
+            config[aconsts.DISCOVERY_KEY_SERVICE_NAME] = "GoogleTestServiceX"
+            if is_publish:
+                config[aconsts.DISCOVERY_KEY_SSI] = string.ascii_letters
+            else:
+                config[aconsts.
+                       DISCOVERY_KEY_SSI] = string.ascii_letters[::
                                                                  -1]  # reverse
-      config[aconsts.DISCOVERY_KEY_MATCH_FILTER_LIST] = autils.encode_list(
-          [(10).to_bytes(1, byteorder="big"), "hello there string"
-          if not null_match else None,
-           bytes(range(40))])
-    else: # PAYLOAD_SIZE_MAX
-      config[aconsts.DISCOVERY_KEY_SERVICE_NAME] = "VeryLong" + "X" * (
-        caps[aconsts.CAP_MAX_SERVICE_NAME_LEN] - 8)
-      config[aconsts.DISCOVERY_KEY_SSI] = ("P" if is_publish else "S") * caps[
-        aconsts.CAP_MAX_SERVICE_SPECIFIC_INFO_LEN]
-      mf = autils.construct_max_match_filter(
-          caps[aconsts.CAP_MAX_MATCH_FILTER_LEN])
-      if null_match:
-        mf[2] = None
-      config[aconsts.DISCOVERY_KEY_MATCH_FILTER_LIST] = autils.encode_list(mf)
+            config[
+                aconsts.DISCOVERY_KEY_MATCH_FILTER_LIST] = autils.encode_list(
+                    [(10).to_bytes(1, byteorder="big"), "hello there string"
+                     if not null_match else None,
+                     bytes(range(40))])
+        else:  # PAYLOAD_SIZE_MAX
+            config[aconsts.DISCOVERY_KEY_SERVICE_NAME] = "VeryLong" + "X" * (
+                caps[aconsts.CAP_MAX_SERVICE_NAME_LEN] - 8)
+            config[aconsts.DISCOVERY_KEY_SSI] = (
+                "P" if is_publish else
+                "S") * caps[aconsts.CAP_MAX_SERVICE_SPECIFIC_INFO_LEN]
+            mf = autils.construct_max_match_filter(
+                caps[aconsts.CAP_MAX_MATCH_FILTER_LEN])
+            if null_match:
+                mf[2] = None
+            config[aconsts.
+                   DISCOVERY_KEY_MATCH_FILTER_LIST] = autils.encode_list(mf)
 
-    return config
+        return config
 
-  def create_publish_config(self, caps, ptype, payload_size, ttl, term_ind_on,
-                            null_match):
-    """Create a publish configuration based on input parameters.
+    def create_publish_config(self, caps, ptype, payload_size, ttl,
+                              term_ind_on, null_match):
+        """Create a publish configuration based on input parameters.
 
     Args:
       caps: device capability dictionary
@@ -108,12 +112,12 @@
     Returns:
       publish discovery configuration object.
     """
-    return self.create_base_config(caps, True, ptype, None, payload_size, ttl,
-                                   term_ind_on, null_match)
+        return self.create_base_config(caps, True, ptype, None, payload_size,
+                                       ttl, term_ind_on, null_match)
 
-  def create_subscribe_config(self, caps, stype, payload_size, ttl, term_ind_on,
-                              null_match):
-    """Create a subscribe configuration based on input parameters.
+    def create_subscribe_config(self, caps, stype, payload_size, ttl,
+                                term_ind_on, null_match):
+        """Create a subscribe configuration based on input parameters.
 
     Args:
       caps: device capability dictionary
@@ -125,11 +129,11 @@
     Returns:
       subscribe discovery configuration object.
     """
-    return self.create_base_config(caps, False, None, stype, payload_size, ttl,
-                                   term_ind_on, null_match)
+        return self.create_base_config(caps, False, None, stype, payload_size,
+                                       ttl, term_ind_on, null_match)
 
-  def positive_discovery_test_utility(self, ptype, stype, payload_size):
-    """Utility which runs a positive discovery test:
+    def positive_discovery_test_utility(self, ptype, stype, payload_size):
+        """Utility which runs a positive discovery test:
     - Discovery (publish/subscribe) with TTL=0 (non-self-terminating)
     - Exchange messages
     - Update publish/subscribe
@@ -140,150 +144,159 @@
       stype: Subscribe discovery type
       payload_size: One of PAYLOAD_SIZE_* constants - MIN, TYPICAL, MAX
     """
-    p_dut = self.android_devices[0]
-    p_dut.pretty_name = "Publisher"
-    s_dut = self.android_devices[1]
-    s_dut.pretty_name = "Subscriber"
+        p_dut = self.android_devices[0]
+        p_dut.pretty_name = "Publisher"
+        s_dut = self.android_devices[1]
+        s_dut.pretty_name = "Subscriber"
 
-    # Publisher+Subscriber: attach and wait for confirmation
-    p_id = p_dut.droid.wifiAwareAttach(False)
-    autils.wait_for_event(p_dut, aconsts.EVENT_CB_ON_ATTACHED)
-    time.sleep(self.device_startup_offset)
-    s_id = s_dut.droid.wifiAwareAttach(False)
-    autils.wait_for_event(s_dut, aconsts.EVENT_CB_ON_ATTACHED)
+        # Publisher+Subscriber: attach and wait for confirmation
+        p_id = p_dut.droid.wifiAwareAttach(False)
+        autils.wait_for_event(p_dut, aconsts.EVENT_CB_ON_ATTACHED)
+        time.sleep(self.device_startup_offset)
+        s_id = s_dut.droid.wifiAwareAttach(False)
+        autils.wait_for_event(s_dut, aconsts.EVENT_CB_ON_ATTACHED)
 
-    # Publisher: start publish and wait for confirmation
-    p_config = self.create_publish_config(
-        p_dut.aware_capabilities,
-        ptype,
-        payload_size,
-        ttl=0,
-        term_ind_on=False,
-        null_match=False)
-    p_disc_id = p_dut.droid.wifiAwarePublish(p_id, p_config)
-    autils.wait_for_event(p_dut, aconsts.SESSION_CB_ON_PUBLISH_STARTED)
+        # Publisher: start publish and wait for confirmation
+        p_config = self.create_publish_config(
+            p_dut.aware_capabilities,
+            ptype,
+            payload_size,
+            ttl=0,
+            term_ind_on=False,
+            null_match=False)
+        p_disc_id = p_dut.droid.wifiAwarePublish(p_id, p_config)
+        autils.wait_for_event(p_dut, aconsts.SESSION_CB_ON_PUBLISH_STARTED)
 
-    # Subscriber: start subscribe and wait for confirmation
-    s_config = self.create_subscribe_config(
-        s_dut.aware_capabilities,
-        stype,
-        payload_size,
-        ttl=0,
-        term_ind_on=False,
-        null_match=True)
-    s_disc_id = s_dut.droid.wifiAwareSubscribe(s_id, s_config)
-    autils.wait_for_event(s_dut, aconsts.SESSION_CB_ON_SUBSCRIBE_STARTED)
+        # Subscriber: start subscribe and wait for confirmation
+        s_config = self.create_subscribe_config(
+            s_dut.aware_capabilities,
+            stype,
+            payload_size,
+            ttl=0,
+            term_ind_on=False,
+            null_match=True)
+        s_disc_id = s_dut.droid.wifiAwareSubscribe(s_id, s_config)
+        autils.wait_for_event(s_dut, aconsts.SESSION_CB_ON_SUBSCRIBE_STARTED)
 
-    # Subscriber: wait for service discovery
-    discovery_event = autils.wait_for_event(
-        s_dut, aconsts.SESSION_CB_ON_SERVICE_DISCOVERED)
-    peer_id_on_sub = discovery_event["data"][aconsts.SESSION_CB_KEY_PEER_ID]
+        # Subscriber: wait for service discovery
+        discovery_event = autils.wait_for_event(
+            s_dut, aconsts.SESSION_CB_ON_SERVICE_DISCOVERED)
+        peer_id_on_sub = discovery_event["data"][
+            aconsts.SESSION_CB_KEY_PEER_ID]
 
-    # Subscriber: validate contents of discovery:
-    # - SSI: publisher's
-    # - Match filter: UNSOLICITED - publisher, SOLICITED - subscriber
-    autils.assert_equal_strings(
-        bytes(discovery_event["data"][
-            aconsts.SESSION_CB_KEY_SERVICE_SPECIFIC_INFO]).decode("utf-8"),
-        p_config[aconsts.DISCOVERY_KEY_SSI],
-        "Discovery mismatch: service specific info (SSI)")
-    asserts.assert_equal(
-        autils.decode_list(
-            discovery_event["data"][aconsts.SESSION_CB_KEY_MATCH_FILTER_LIST]),
-        autils.decode_list(p_config[aconsts.DISCOVERY_KEY_MATCH_FILTER_LIST]
-                           if ptype == aconsts.PUBLISH_TYPE_UNSOLICITED else
-                           s_config[aconsts.DISCOVERY_KEY_MATCH_FILTER_LIST]),
-        "Discovery mismatch: match filter")
+        # Subscriber: validate contents of discovery:
+        # - SSI: publisher's
+        # - Match filter: UNSOLICITED - publisher, SOLICITED - subscriber
+        autils.assert_equal_strings(
+            bytes(discovery_event["data"][
+                aconsts.SESSION_CB_KEY_SERVICE_SPECIFIC_INFO]).decode("utf-8"),
+            p_config[aconsts.DISCOVERY_KEY_SSI],
+            "Discovery mismatch: service specific info (SSI)")
+        asserts.assert_equal(
+            autils.decode_list(discovery_event["data"][
+                aconsts.SESSION_CB_KEY_MATCH_FILTER_LIST]),
+            autils.decode_list(
+                p_config[aconsts.DISCOVERY_KEY_MATCH_FILTER_LIST]
+                if ptype == aconsts.PUBLISH_TYPE_UNSOLICITED else s_config[
+                    aconsts.DISCOVERY_KEY_MATCH_FILTER_LIST]),
+            "Discovery mismatch: match filter")
 
-    # Subscriber: send message to peer (Publisher)
-    s_dut.droid.wifiAwareSendMessage(s_disc_id, peer_id_on_sub,
-                                     self.get_next_msg_id(), self.query_msg,
-                                     self.msg_retx_count)
-    autils.wait_for_event(s_dut, aconsts.SESSION_CB_ON_MESSAGE_SENT)
+        # Subscriber: send message to peer (Publisher)
+        s_dut.droid.wifiAwareSendMessage(s_disc_id, peer_id_on_sub,
+                                         self.get_next_msg_id(),
+                                         self.query_msg, self.msg_retx_count)
+        autils.wait_for_event(s_dut, aconsts.SESSION_CB_ON_MESSAGE_SENT)
 
-    # Publisher: wait for received message
-    pub_rx_msg_event = autils.wait_for_event(
-        p_dut, aconsts.SESSION_CB_ON_MESSAGE_RECEIVED)
-    peer_id_on_pub = pub_rx_msg_event["data"][aconsts.SESSION_CB_KEY_PEER_ID]
+        # Publisher: wait for received message
+        pub_rx_msg_event = autils.wait_for_event(
+            p_dut, aconsts.SESSION_CB_ON_MESSAGE_RECEIVED)
+        peer_id_on_pub = pub_rx_msg_event["data"][
+            aconsts.SESSION_CB_KEY_PEER_ID]
 
-    # Publisher: validate contents of message
-    asserts.assert_equal(
-        pub_rx_msg_event["data"][aconsts.SESSION_CB_KEY_MESSAGE_AS_STRING],
-        self.query_msg, "Subscriber -> Publisher message corrupted")
+        # Publisher: validate contents of message
+        asserts.assert_equal(
+            pub_rx_msg_event["data"][aconsts.SESSION_CB_KEY_MESSAGE_AS_STRING],
+            self.query_msg, "Subscriber -> Publisher message corrupted")
 
-    # Publisher: send message to peer (Subscriber)
-    p_dut.droid.wifiAwareSendMessage(p_disc_id, peer_id_on_pub,
-                                     self.get_next_msg_id(), self.response_msg,
-                                     self.msg_retx_count)
-    autils.wait_for_event(p_dut, aconsts.SESSION_CB_ON_MESSAGE_SENT)
+        # Publisher: send message to peer (Subscriber)
+        p_dut.droid.wifiAwareSendMessage(p_disc_id, peer_id_on_pub,
+                                         self.get_next_msg_id(),
+                                         self.response_msg,
+                                         self.msg_retx_count)
+        autils.wait_for_event(p_dut, aconsts.SESSION_CB_ON_MESSAGE_SENT)
 
-    # Subscriber: wait for received message
-    sub_rx_msg_event = autils.wait_for_event(
-        s_dut, aconsts.SESSION_CB_ON_MESSAGE_RECEIVED)
+        # Subscriber: wait for received message
+        sub_rx_msg_event = autils.wait_for_event(
+            s_dut, aconsts.SESSION_CB_ON_MESSAGE_RECEIVED)
 
-    # Subscriber: validate contents of message
-    asserts.assert_equal(
-        sub_rx_msg_event["data"][aconsts.SESSION_CB_KEY_PEER_ID],
-        peer_id_on_sub,
-        "Subscriber received message from different peer ID then discovery!?")
-    autils.assert_equal_strings(
-        sub_rx_msg_event["data"][aconsts.SESSION_CB_KEY_MESSAGE_AS_STRING],
-        self.response_msg, "Publisher -> Subscriber message corrupted")
+        # Subscriber: validate contents of message
+        asserts.assert_equal(
+            sub_rx_msg_event["data"][aconsts.SESSION_CB_KEY_PEER_ID],
+            peer_id_on_sub,
+            "Subscriber received message from different peer ID then discovery!?"
+        )
+        autils.assert_equal_strings(
+            sub_rx_msg_event["data"][aconsts.SESSION_CB_KEY_MESSAGE_AS_STRING],
+            self.response_msg, "Publisher -> Subscriber message corrupted")
 
-    # Subscriber: validate that we're not getting another Service Discovery
-    autils.fail_on_event(s_dut, aconsts.SESSION_CB_ON_SERVICE_DISCOVERED)
+        # Subscriber: validate that we're not getting another Service Discovery
+        autils.fail_on_event(s_dut, aconsts.SESSION_CB_ON_SERVICE_DISCOVERED)
 
-    # Publisher: update publish and wait for confirmation
-    p_config[aconsts.DISCOVERY_KEY_SSI] = "something else"
-    p_dut.droid.wifiAwareUpdatePublish(p_disc_id, p_config)
-    autils.wait_for_event(p_dut, aconsts.SESSION_CB_ON_SESSION_CONFIG_UPDATED)
+        # Publisher: update publish and wait for confirmation
+        p_config[aconsts.DISCOVERY_KEY_SSI] = "something else"
+        p_dut.droid.wifiAwareUpdatePublish(p_disc_id, p_config)
+        autils.wait_for_event(p_dut,
+                              aconsts.SESSION_CB_ON_SESSION_CONFIG_UPDATED)
 
-    # Subscriber: expect a new service discovery
-    discovery_event = autils.wait_for_event(
-        s_dut, aconsts.SESSION_CB_ON_SERVICE_DISCOVERED)
+        # Subscriber: expect a new service discovery
+        discovery_event = autils.wait_for_event(
+            s_dut, aconsts.SESSION_CB_ON_SERVICE_DISCOVERED)
 
-    # Subscriber: validate contents of discovery
-    autils.assert_equal_strings(
-        bytes(discovery_event["data"][
-            aconsts.SESSION_CB_KEY_SERVICE_SPECIFIC_INFO]).decode("utf-8"),
-        p_config[aconsts.DISCOVERY_KEY_SSI],
-        "Discovery mismatch (after pub update): service specific info (SSI)")
-    asserts.assert_equal(
-        autils.decode_list(
-            discovery_event["data"][aconsts.SESSION_CB_KEY_MATCH_FILTER_LIST]),
-        autils.decode_list(p_config[aconsts.DISCOVERY_KEY_MATCH_FILTER_LIST]
-                           if ptype == aconsts.PUBLISH_TYPE_UNSOLICITED else
-                           s_config[aconsts.DISCOVERY_KEY_MATCH_FILTER_LIST]),
-        "Discovery mismatch: match filter")
+        # Subscriber: validate contents of discovery
+        autils.assert_equal_strings(
+            bytes(discovery_event["data"][
+                aconsts.SESSION_CB_KEY_SERVICE_SPECIFIC_INFO]).decode("utf-8"),
+            p_config[aconsts.DISCOVERY_KEY_SSI],
+            "Discovery mismatch (after pub update): service specific info (SSI)"
+        )
+        asserts.assert_equal(
+            autils.decode_list(discovery_event["data"][
+                aconsts.SESSION_CB_KEY_MATCH_FILTER_LIST]),
+            autils.decode_list(
+                p_config[aconsts.DISCOVERY_KEY_MATCH_FILTER_LIST]
+                if ptype == aconsts.PUBLISH_TYPE_UNSOLICITED else s_config[
+                    aconsts.DISCOVERY_KEY_MATCH_FILTER_LIST]),
+            "Discovery mismatch: match filter")
 
-    # Subscribe: update subscribe and wait for confirmation
-    s_config = self.create_subscribe_config(
-        s_dut.aware_capabilities,
-        stype,
-        payload_size,
-        ttl=0,
-        term_ind_on=False,
-        null_match=False)
-    s_dut.droid.wifiAwareUpdateSubscribe(s_disc_id, s_config)
-    autils.wait_for_event(s_dut, aconsts.SESSION_CB_ON_SESSION_CONFIG_UPDATED)
+        # Subscribe: update subscribe and wait for confirmation
+        s_config = self.create_subscribe_config(
+            s_dut.aware_capabilities,
+            stype,
+            payload_size,
+            ttl=0,
+            term_ind_on=False,
+            null_match=False)
+        s_dut.droid.wifiAwareUpdateSubscribe(s_disc_id, s_config)
+        autils.wait_for_event(s_dut,
+                              aconsts.SESSION_CB_ON_SESSION_CONFIG_UPDATED)
 
-    # Publisher+Subscriber: Terminate sessions
-    p_dut.droid.wifiAwareDestroyDiscoverySession(p_disc_id)
-    s_dut.droid.wifiAwareDestroyDiscoverySession(s_disc_id)
+        # Publisher+Subscriber: Terminate sessions
+        p_dut.droid.wifiAwareDestroyDiscoverySession(p_disc_id)
+        s_dut.droid.wifiAwareDestroyDiscoverySession(s_disc_id)
 
-    # sleep for timeout period and then verify all 'fail_on_event' together
-    time.sleep(autils.EVENT_TIMEOUT)
+        # sleep for timeout period and then verify all 'fail_on_event' together
+        time.sleep(autils.EVENT_TIMEOUT)
 
-    # verify that there were no other events
-    autils.verify_no_more_events(p_dut, timeout=0)
-    autils.verify_no_more_events(s_dut, timeout=0)
+        # verify that there were no other events
+        autils.verify_no_more_events(p_dut, timeout=0)
+        autils.verify_no_more_events(s_dut, timeout=0)
 
-    # verify that forbidden callbacks aren't called
-    autils.validate_forbidden_callbacks(p_dut, {aconsts.CB_EV_MATCH: 0})
+        # verify that forbidden callbacks aren't called
+        autils.validate_forbidden_callbacks(p_dut, {aconsts.CB_EV_MATCH: 0})
 
-  def verify_discovery_session_term(self, dut, disc_id, config, is_publish,
-                                    term_ind_on):
-    """Utility to verify that the specified discovery session has terminated (by
+    def verify_discovery_session_term(self, dut, disc_id, config, is_publish,
+                                      term_ind_on):
+        """Utility to verify that the specified discovery session has terminated (by
     waiting for the TTL and then attempting to reconfigure).
 
     Args:
@@ -293,41 +306,41 @@
       is_publish: True if the configuration was publish, False if subscribe
       term_ind_on: True if a termination indication is expected, False otherwise
     """
-    # Wait for session termination
-    if term_ind_on:
-      autils.wait_for_event(
-          dut,
-          autils.decorate_event(aconsts.SESSION_CB_ON_SESSION_TERMINATED,
-                                disc_id))
-    else:
-      # can't defer wait to end since in any case have to wait for session to
-      # expire
-      autils.fail_on_event(
-          dut,
-          autils.decorate_event(aconsts.SESSION_CB_ON_SESSION_TERMINATED,
-                                disc_id))
+        # Wait for session termination
+        if term_ind_on:
+            autils.wait_for_event(
+                dut,
+                autils.decorate_event(aconsts.SESSION_CB_ON_SESSION_TERMINATED,
+                                      disc_id))
+        else:
+            # can't defer wait to end since in any case have to wait for session to
+            # expire
+            autils.fail_on_event(
+                dut,
+                autils.decorate_event(aconsts.SESSION_CB_ON_SESSION_TERMINATED,
+                                      disc_id))
 
-    # Validate that session expired by trying to configure it (expect failure)
-    config[aconsts.DISCOVERY_KEY_SSI] = "something else"
-    if is_publish:
-      dut.droid.wifiAwareUpdatePublish(disc_id, config)
-    else:
-      dut.droid.wifiAwareUpdateSubscribe(disc_id, config)
+        # Validate that session expired by trying to configure it (expect failure)
+        config[aconsts.DISCOVERY_KEY_SSI] = "something else"
+        if is_publish:
+            dut.droid.wifiAwareUpdatePublish(disc_id, config)
+        else:
+            dut.droid.wifiAwareUpdateSubscribe(disc_id, config)
 
-    # The response to update discovery session is:
-    # term_ind_on=True: session was cleaned-up so won't get an explicit failure, but won't get a
-    #                   success either. Can check for no SESSION_CB_ON_SESSION_CONFIG_UPDATED but
-    #                   will defer to the end of the test (no events on queue).
-    # term_ind_on=False: session was not cleaned-up (yet). So expect
-    #                    SESSION_CB_ON_SESSION_CONFIG_FAILED.
-    if not term_ind_on:
-      autils.wait_for_event(
-          dut,
-          autils.decorate_event(aconsts.SESSION_CB_ON_SESSION_CONFIG_FAILED,
-                                disc_id))
+        # The response to update discovery session is:
+        # term_ind_on=True: session was cleaned-up so won't get an explicit failure, but won't get a
+        #                   success either. Can check for no SESSION_CB_ON_SESSION_CONFIG_UPDATED but
+        #                   will defer to the end of the test (no events on queue).
+        # term_ind_on=False: session was not cleaned-up (yet). So expect
+        #                    SESSION_CB_ON_SESSION_CONFIG_FAILED.
+        if not term_ind_on:
+            autils.wait_for_event(
+                dut,
+                autils.decorate_event(
+                    aconsts.SESSION_CB_ON_SESSION_CONFIG_FAILED, disc_id))
 
-  def positive_ttl_test_utility(self, is_publish, ptype, stype, term_ind_on):
-    """Utility which runs a positive discovery session TTL configuration test
+    def positive_ttl_test_utility(self, is_publish, ptype, stype, term_ind_on):
+        """Utility which runs a positive discovery session TTL configuration test
 
     Iteration 1: Verify session started with TTL
     Iteration 2: Verify session started without TTL and reconfigured with TTL
@@ -340,119 +353,123 @@
       stype: Subscribe discovery type (used if is_publish is False)
       term_ind_on: Configuration of termination indication
     """
-    SHORT_TTL = 5  # 5 seconds
-    LONG_TTL = 100  # 100 seconds
-    dut = self.android_devices[0]
+        SHORT_TTL = 5  # 5 seconds
+        LONG_TTL = 100  # 100 seconds
+        dut = self.android_devices[0]
 
-    # Attach and wait for confirmation
-    id = dut.droid.wifiAwareAttach(False)
-    autils.wait_for_event(dut, aconsts.EVENT_CB_ON_ATTACHED)
+        # Attach and wait for confirmation
+        id = dut.droid.wifiAwareAttach(False)
+        autils.wait_for_event(dut, aconsts.EVENT_CB_ON_ATTACHED)
 
-    # Iteration 1: Start discovery session with TTL
-    config = self.create_base_config(dut.aware_capabilities, is_publish, ptype,
-                                     stype, self.PAYLOAD_SIZE_TYPICAL,
-                                     SHORT_TTL, term_ind_on, False)
-    if is_publish:
-      disc_id = dut.droid.wifiAwarePublish(id, config, True)
-      autils.wait_for_event(dut,
-                            autils.decorate_event(
-                                aconsts.SESSION_CB_ON_PUBLISH_STARTED, disc_id))
-    else:
-      disc_id = dut.droid.wifiAwareSubscribe(id, config, True)
-      autils.wait_for_event(
-          dut,
-          autils.decorate_event(aconsts.SESSION_CB_ON_SUBSCRIBE_STARTED,
-                                disc_id))
+        # Iteration 1: Start discovery session with TTL
+        config = self.create_base_config(
+            dut.aware_capabilities, is_publish, ptype, stype,
+            self.PAYLOAD_SIZE_TYPICAL, SHORT_TTL, term_ind_on, False)
+        if is_publish:
+            disc_id = dut.droid.wifiAwarePublish(id, config, True)
+            autils.wait_for_event(
+                dut,
+                autils.decorate_event(aconsts.SESSION_CB_ON_PUBLISH_STARTED,
+                                      disc_id))
+        else:
+            disc_id = dut.droid.wifiAwareSubscribe(id, config, True)
+            autils.wait_for_event(
+                dut,
+                autils.decorate_event(aconsts.SESSION_CB_ON_SUBSCRIBE_STARTED,
+                                      disc_id))
 
-    # Wait for session termination & verify
-    self.verify_discovery_session_term(dut, disc_id, config, is_publish,
-                                       term_ind_on)
+        # Wait for session termination & verify
+        self.verify_discovery_session_term(dut, disc_id, config, is_publish,
+                                           term_ind_on)
 
-    # Iteration 2: Start a discovery session without TTL
-    config = self.create_base_config(dut.aware_capabilities, is_publish, ptype,
-                                     stype, self.PAYLOAD_SIZE_TYPICAL, 0,
-                                     term_ind_on, False)
-    if is_publish:
-      disc_id = dut.droid.wifiAwarePublish(id, config, True)
-      autils.wait_for_event(dut,
-                            autils.decorate_event(
-                                aconsts.SESSION_CB_ON_PUBLISH_STARTED, disc_id))
-    else:
-      disc_id = dut.droid.wifiAwareSubscribe(id, config, True)
-      autils.wait_for_event(
-          dut,
-          autils.decorate_event(aconsts.SESSION_CB_ON_SUBSCRIBE_STARTED,
-                                disc_id))
+        # Iteration 2: Start a discovery session without TTL
+        config = self.create_base_config(
+            dut.aware_capabilities, is_publish, ptype, stype,
+            self.PAYLOAD_SIZE_TYPICAL, 0, term_ind_on, False)
+        if is_publish:
+            disc_id = dut.droid.wifiAwarePublish(id, config, True)
+            autils.wait_for_event(
+                dut,
+                autils.decorate_event(aconsts.SESSION_CB_ON_PUBLISH_STARTED,
+                                      disc_id))
+        else:
+            disc_id = dut.droid.wifiAwareSubscribe(id, config, True)
+            autils.wait_for_event(
+                dut,
+                autils.decorate_event(aconsts.SESSION_CB_ON_SUBSCRIBE_STARTED,
+                                      disc_id))
 
-    # Update with a TTL
-    config = self.create_base_config(dut.aware_capabilities, is_publish, ptype,
-                                     stype, self.PAYLOAD_SIZE_TYPICAL,
-                                     SHORT_TTL, term_ind_on, False)
-    if is_publish:
-      dut.droid.wifiAwareUpdatePublish(disc_id, config)
-    else:
-      dut.droid.wifiAwareUpdateSubscribe(disc_id, config)
-    autils.wait_for_event(
-        dut,
-        autils.decorate_event(aconsts.SESSION_CB_ON_SESSION_CONFIG_UPDATED,
-                              disc_id))
+        # Update with a TTL
+        config = self.create_base_config(
+            dut.aware_capabilities, is_publish, ptype, stype,
+            self.PAYLOAD_SIZE_TYPICAL, SHORT_TTL, term_ind_on, False)
+        if is_publish:
+            dut.droid.wifiAwareUpdatePublish(disc_id, config)
+        else:
+            dut.droid.wifiAwareUpdateSubscribe(disc_id, config)
+        autils.wait_for_event(
+            dut,
+            autils.decorate_event(aconsts.SESSION_CB_ON_SESSION_CONFIG_UPDATED,
+                                  disc_id))
 
-    # Wait for session termination & verify
-    self.verify_discovery_session_term(dut, disc_id, config, is_publish,
-                                       term_ind_on)
+        # Wait for session termination & verify
+        self.verify_discovery_session_term(dut, disc_id, config, is_publish,
+                                           term_ind_on)
 
-    # Iteration 3: Start a discovery session with (long) TTL
-    config = self.create_base_config(dut.aware_capabilities, is_publish, ptype,
-                                     stype, self.PAYLOAD_SIZE_TYPICAL, LONG_TTL,
-                                     term_ind_on, False)
-    if is_publish:
-      disc_id = dut.droid.wifiAwarePublish(id, config, True)
-      autils.wait_for_event(dut,
-                            autils.decorate_event(
-                                aconsts.SESSION_CB_ON_PUBLISH_STARTED, disc_id))
-    else:
-      disc_id = dut.droid.wifiAwareSubscribe(id, config, True)
-      autils.wait_for_event(
-          dut,
-          autils.decorate_event(aconsts.SESSION_CB_ON_SUBSCRIBE_STARTED,
-                                disc_id))
+        # Iteration 3: Start a discovery session with (long) TTL
+        config = self.create_base_config(
+            dut.aware_capabilities, is_publish, ptype, stype,
+            self.PAYLOAD_SIZE_TYPICAL, LONG_TTL, term_ind_on, False)
+        if is_publish:
+            disc_id = dut.droid.wifiAwarePublish(id, config, True)
+            autils.wait_for_event(
+                dut,
+                autils.decorate_event(aconsts.SESSION_CB_ON_PUBLISH_STARTED,
+                                      disc_id))
+        else:
+            disc_id = dut.droid.wifiAwareSubscribe(id, config, True)
+            autils.wait_for_event(
+                dut,
+                autils.decorate_event(aconsts.SESSION_CB_ON_SUBSCRIBE_STARTED,
+                                      disc_id))
 
-    # Update with a TTL
-    config = self.create_base_config(dut.aware_capabilities, is_publish, ptype,
-                                     stype, self.PAYLOAD_SIZE_TYPICAL,
-                                     SHORT_TTL, term_ind_on, False)
-    if is_publish:
-      dut.droid.wifiAwareUpdatePublish(disc_id, config)
-    else:
-      dut.droid.wifiAwareUpdateSubscribe(disc_id, config)
-    autils.wait_for_event(
-        dut,
-        autils.decorate_event(aconsts.SESSION_CB_ON_SESSION_CONFIG_UPDATED,
-                              disc_id))
+        # Update with a TTL
+        config = self.create_base_config(
+            dut.aware_capabilities, is_publish, ptype, stype,
+            self.PAYLOAD_SIZE_TYPICAL, SHORT_TTL, term_ind_on, False)
+        if is_publish:
+            dut.droid.wifiAwareUpdatePublish(disc_id, config)
+        else:
+            dut.droid.wifiAwareUpdateSubscribe(disc_id, config)
+        autils.wait_for_event(
+            dut,
+            autils.decorate_event(aconsts.SESSION_CB_ON_SESSION_CONFIG_UPDATED,
+                                  disc_id))
 
-    # Wait for session termination & verify
-    self.verify_discovery_session_term(dut, disc_id, config, is_publish,
-                                       term_ind_on)
+        # Wait for session termination & verify
+        self.verify_discovery_session_term(dut, disc_id, config, is_publish,
+                                           term_ind_on)
 
-    # verify that there were no other events
-    autils.verify_no_more_events(dut)
+        # verify that there were no other events
+        autils.verify_no_more_events(dut)
 
-    # verify that forbidden callbacks aren't called
-    if not term_ind_on:
-      autils.validate_forbidden_callbacks(dut, {
-          aconsts.CB_EV_PUBLISH_TERMINATED: 0,
-          aconsts.CB_EV_SUBSCRIBE_TERMINATED: 0
-      })
+        # verify that forbidden callbacks aren't called
+        if not term_ind_on:
+            autils.validate_forbidden_callbacks(
+                dut, {
+                    aconsts.CB_EV_PUBLISH_TERMINATED: 0,
+                    aconsts.CB_EV_SUBSCRIBE_TERMINATED: 0
+                })
 
-  def discovery_mismatch_test_utility(self,
-                                      is_expected_to_pass,
-                                      p_type,
-                                      s_type,
-                                      p_service_name=None,
-                                      s_service_name=None,
-                                      p_mf_1=None,
-                                      s_mf_1=None):
-    """Utility which runs the negative discovery test for mismatched service
+    def discovery_mismatch_test_utility(self,
+                                        is_expected_to_pass,
+                                        p_type,
+                                        s_type,
+                                        p_service_name=None,
+                                        s_service_name=None,
+                                        p_mf_1=None,
+                                        s_mf_1=None):
+        """Utility which runs the negative discovery test for mismatched service
     configs.
 
     Args:
@@ -464,378 +481,379 @@
       p_mf_1: Publish match filter element [1] (or None to leave unchanged)
       s_mf_1: Subscribe match filter element [1] (or None to leave unchanged)
     """
-    p_dut = self.android_devices[0]
-    p_dut.pretty_name = "Publisher"
-    s_dut = self.android_devices[1]
-    s_dut.pretty_name = "Subscriber"
+        p_dut = self.android_devices[0]
+        p_dut.pretty_name = "Publisher"
+        s_dut = self.android_devices[1]
+        s_dut.pretty_name = "Subscriber"
 
-    # create configurations
-    p_config = self.create_publish_config(
-        p_dut.aware_capabilities,
-        p_type,
-        self.PAYLOAD_SIZE_TYPICAL,
-        ttl=0,
-        term_ind_on=False,
-        null_match=False)
-    if p_service_name is not None:
-      p_config[aconsts.DISCOVERY_KEY_SERVICE_NAME] = p_service_name
-    if p_mf_1 is not None:
-      p_config[aconsts.DISCOVERY_KEY_MATCH_FILTER_LIST] = autils.encode_list(
-          [(10).to_bytes(1, byteorder="big"),
-           p_mf_1,
-           bytes(range(40))])
-    s_config = self.create_publish_config(
-        s_dut.aware_capabilities,
-        s_type,
-        self.PAYLOAD_SIZE_TYPICAL,
-        ttl=0,
-        term_ind_on=False,
-        null_match=False)
-    if s_service_name is not None:
-      s_config[aconsts.DISCOVERY_KEY_SERVICE_NAME] = s_service_name
-    if s_mf_1 is not None:
-      s_config[aconsts.DISCOVERY_KEY_MATCH_FILTER_LIST] = autils.encode_list(
-          [(10).to_bytes(1, byteorder="big"),
-           s_mf_1,
-           bytes(range(40))])
+        # create configurations
+        p_config = self.create_publish_config(
+            p_dut.aware_capabilities,
+            p_type,
+            self.PAYLOAD_SIZE_TYPICAL,
+            ttl=0,
+            term_ind_on=False,
+            null_match=False)
+        if p_service_name is not None:
+            p_config[aconsts.DISCOVERY_KEY_SERVICE_NAME] = p_service_name
+        if p_mf_1 is not None:
+            p_config[
+                aconsts.DISCOVERY_KEY_MATCH_FILTER_LIST] = autils.encode_list(
+                    [(10).to_bytes(1, byteorder="big"), p_mf_1,
+                     bytes(range(40))])
+        s_config = self.create_publish_config(
+            s_dut.aware_capabilities,
+            s_type,
+            self.PAYLOAD_SIZE_TYPICAL,
+            ttl=0,
+            term_ind_on=False,
+            null_match=False)
+        if s_service_name is not None:
+            s_config[aconsts.DISCOVERY_KEY_SERVICE_NAME] = s_service_name
+        if s_mf_1 is not None:
+            s_config[
+                aconsts.DISCOVERY_KEY_MATCH_FILTER_LIST] = autils.encode_list(
+                    [(10).to_bytes(1, byteorder="big"), s_mf_1,
+                     bytes(range(40))])
 
-    p_id = p_dut.droid.wifiAwareAttach(False)
-    autils.wait_for_event(p_dut, aconsts.EVENT_CB_ON_ATTACHED)
-    time.sleep(self.device_startup_offset)
-    s_id = s_dut.droid.wifiAwareAttach(False)
-    autils.wait_for_event(s_dut, aconsts.EVENT_CB_ON_ATTACHED)
+        p_id = p_dut.droid.wifiAwareAttach(False)
+        autils.wait_for_event(p_dut, aconsts.EVENT_CB_ON_ATTACHED)
+        time.sleep(self.device_startup_offset)
+        s_id = s_dut.droid.wifiAwareAttach(False)
+        autils.wait_for_event(s_dut, aconsts.EVENT_CB_ON_ATTACHED)
 
-    # Publisher: start publish and wait for confirmation
-    p_disc_id = p_dut.droid.wifiAwarePublish(p_id, p_config)
-    autils.wait_for_event(p_dut, aconsts.SESSION_CB_ON_PUBLISH_STARTED)
+        # Publisher: start publish and wait for confirmation
+        p_disc_id = p_dut.droid.wifiAwarePublish(p_id, p_config)
+        autils.wait_for_event(p_dut, aconsts.SESSION_CB_ON_PUBLISH_STARTED)
 
-    # Subscriber: start subscribe and wait for confirmation
-    s_disc_id = s_dut.droid.wifiAwareSubscribe(s_id, s_config)
-    autils.wait_for_event(s_dut, aconsts.SESSION_CB_ON_SUBSCRIBE_STARTED)
+        # Subscriber: start subscribe and wait for confirmation
+        s_disc_id = s_dut.droid.wifiAwareSubscribe(s_id, s_config)
+        autils.wait_for_event(s_dut, aconsts.SESSION_CB_ON_SUBSCRIBE_STARTED)
 
-    # Subscriber: fail on service discovery
-    if is_expected_to_pass:
-      autils.wait_for_event(s_dut, aconsts.SESSION_CB_ON_SERVICE_DISCOVERED)
-    else:
-      autils.fail_on_event(s_dut, aconsts.SESSION_CB_ON_SERVICE_DISCOVERED)
+        # Subscriber: fail on service discovery
+        if is_expected_to_pass:
+            autils.wait_for_event(s_dut,
+                                  aconsts.SESSION_CB_ON_SERVICE_DISCOVERED)
+        else:
+            autils.fail_on_event(s_dut,
+                                 aconsts.SESSION_CB_ON_SERVICE_DISCOVERED)
 
-    # Publisher+Subscriber: Terminate sessions
-    p_dut.droid.wifiAwareDestroyDiscoverySession(p_disc_id)
-    s_dut.droid.wifiAwareDestroyDiscoverySession(s_disc_id)
+        # Publisher+Subscriber: Terminate sessions
+        p_dut.droid.wifiAwareDestroyDiscoverySession(p_disc_id)
+        s_dut.droid.wifiAwareDestroyDiscoverySession(s_disc_id)
 
-    # verify that there were no other events (including terminations)
-    time.sleep(autils.EVENT_TIMEOUT)
-    autils.verify_no_more_events(p_dut, timeout=0)
-    autils.verify_no_more_events(s_dut, timeout=0)
+        # verify that there were no other events (including terminations)
+        time.sleep(autils.EVENT_TIMEOUT)
+        autils.verify_no_more_events(p_dut, timeout=0)
+        autils.verify_no_more_events(s_dut, timeout=0)
 
+    #######################################
+    # Positive tests key:
+    #
+    # names is: test_<pub_type>_<sub_type>_<size>
+    # where:
+    #
+    # pub_type: Type of publish discovery session: unsolicited or solicited.
+    # sub_type: Type of subscribe discovery session: passive or active.
+    # size: Size of payload fields (service name, service specific info, and match
+    # filter: typical, max, or min.
+    #######################################
 
-  #######################################
-  # Positive tests key:
-  #
-  # names is: test_<pub_type>_<sub_type>_<size>
-  # where:
-  #
-  # pub_type: Type of publish discovery session: unsolicited or solicited.
-  # sub_type: Type of subscribe discovery session: passive or active.
-  # size: Size of payload fields (service name, service specific info, and match
-  # filter: typical, max, or min.
-  #######################################
-
-  @test_tracker_info(uuid="954ebbde-ed2b-4f04-9e68-88239187d69d")
-  def test_positive_unsolicited_passive_typical(self):
-    """Functional test case / Discovery test cases / positive test case:
+    @test_tracker_info(uuid="954ebbde-ed2b-4f04-9e68-88239187d69d")
+    def test_positive_unsolicited_passive_typical(self):
+        """Functional test case / Discovery test cases / positive test case:
     - Solicited publish + passive subscribe
     - Typical payload fields size
 
     Verifies that discovery and message exchange succeeds.
     """
-    self.positive_discovery_test_utility(
-        ptype=aconsts.PUBLISH_TYPE_UNSOLICITED,
-        stype=aconsts.SUBSCRIBE_TYPE_PASSIVE,
-        payload_size=self.PAYLOAD_SIZE_TYPICAL)
+        self.positive_discovery_test_utility(
+            ptype=aconsts.PUBLISH_TYPE_UNSOLICITED,
+            stype=aconsts.SUBSCRIBE_TYPE_PASSIVE,
+            payload_size=self.PAYLOAD_SIZE_TYPICAL)
 
-  @test_tracker_info(uuid="67fb22bb-6985-4345-95a4-90b76681a58b")
-  def test_positive_unsolicited_passive_min(self):
-    """Functional test case / Discovery test cases / positive test case:
+    @test_tracker_info(uuid="67fb22bb-6985-4345-95a4-90b76681a58b")
+    def test_positive_unsolicited_passive_min(self):
+        """Functional test case / Discovery test cases / positive test case:
     - Solicited publish + passive subscribe
     - Minimal payload fields size
 
     Verifies that discovery and message exchange succeeds.
     """
-    self.positive_discovery_test_utility(
-        ptype=aconsts.PUBLISH_TYPE_UNSOLICITED,
-        stype=aconsts.SUBSCRIBE_TYPE_PASSIVE,
-        payload_size=self.PAYLOAD_SIZE_MIN)
+        self.positive_discovery_test_utility(
+            ptype=aconsts.PUBLISH_TYPE_UNSOLICITED,
+            stype=aconsts.SUBSCRIBE_TYPE_PASSIVE,
+            payload_size=self.PAYLOAD_SIZE_MIN)
 
-  @test_tracker_info(uuid="a02a47b9-41bb-47bb-883b-921024a2c30d")
-  def test_positive_unsolicited_passive_max(self):
-    """Functional test case / Discovery test cases / positive test case:
+    @test_tracker_info(uuid="a02a47b9-41bb-47bb-883b-921024a2c30d")
+    def test_positive_unsolicited_passive_max(self):
+        """Functional test case / Discovery test cases / positive test case:
     - Solicited publish + passive subscribe
     - Maximal payload fields size
 
     Verifies that discovery and message exchange succeeds.
     """
-    self.positive_discovery_test_utility(
-        ptype=aconsts.PUBLISH_TYPE_UNSOLICITED,
-        stype=aconsts.SUBSCRIBE_TYPE_PASSIVE,
-        payload_size=self.PAYLOAD_SIZE_MAX)
+        self.positive_discovery_test_utility(
+            ptype=aconsts.PUBLISH_TYPE_UNSOLICITED,
+            stype=aconsts.SUBSCRIBE_TYPE_PASSIVE,
+            payload_size=self.PAYLOAD_SIZE_MAX)
 
-  @test_tracker_info(uuid="586c657f-2388-4e7a-baee-9bce2f3d1a16")
-  def test_positive_solicited_active_typical(self):
-    """Functional test case / Discovery test cases / positive test case:
+    @test_tracker_info(uuid="586c657f-2388-4e7a-baee-9bce2f3d1a16")
+    def test_positive_solicited_active_typical(self):
+        """Functional test case / Discovery test cases / positive test case:
     - Unsolicited publish + active subscribe
     - Typical payload fields size
 
     Verifies that discovery and message exchange succeeds.
     """
-    self.positive_discovery_test_utility(
-        ptype=aconsts.PUBLISH_TYPE_SOLICITED,
-        stype=aconsts.SUBSCRIBE_TYPE_ACTIVE,
-        payload_size=self.PAYLOAD_SIZE_TYPICAL)
+        self.positive_discovery_test_utility(
+            ptype=aconsts.PUBLISH_TYPE_SOLICITED,
+            stype=aconsts.SUBSCRIBE_TYPE_ACTIVE,
+            payload_size=self.PAYLOAD_SIZE_TYPICAL)
 
-  @test_tracker_info(uuid="5369e4ff-f406-48c5-b41a-df38ec340146")
-  def test_positive_solicited_active_min(self):
-    """Functional test case / Discovery test cases / positive test case:
+    @test_tracker_info(uuid="5369e4ff-f406-48c5-b41a-df38ec340146")
+    def test_positive_solicited_active_min(self):
+        """Functional test case / Discovery test cases / positive test case:
     - Unsolicited publish + active subscribe
     - Minimal payload fields size
 
     Verifies that discovery and message exchange succeeds.
     """
-    self.positive_discovery_test_utility(
-        ptype=aconsts.PUBLISH_TYPE_SOLICITED,
-        stype=aconsts.SUBSCRIBE_TYPE_ACTIVE,
-        payload_size=self.PAYLOAD_SIZE_MIN)
+        self.positive_discovery_test_utility(
+            ptype=aconsts.PUBLISH_TYPE_SOLICITED,
+            stype=aconsts.SUBSCRIBE_TYPE_ACTIVE,
+            payload_size=self.PAYLOAD_SIZE_MIN)
 
-  @test_tracker_info(uuid="634c6eb8-2c4f-42bd-9bbb-d874d0ec22f3")
-  def test_positive_solicited_active_max(self):
-    """Functional test case / Discovery test cases / positive test case:
+    @test_tracker_info(uuid="634c6eb8-2c4f-42bd-9bbb-d874d0ec22f3")
+    def test_positive_solicited_active_max(self):
+        """Functional test case / Discovery test cases / positive test case:
     - Unsolicited publish + active subscribe
     - Maximal payload fields size
 
     Verifies that discovery and message exchange succeeds.
     """
-    self.positive_discovery_test_utility(
-        ptype=aconsts.PUBLISH_TYPE_SOLICITED,
-        stype=aconsts.SUBSCRIBE_TYPE_ACTIVE,
-        payload_size=self.PAYLOAD_SIZE_MAX)
+        self.positive_discovery_test_utility(
+            ptype=aconsts.PUBLISH_TYPE_SOLICITED,
+            stype=aconsts.SUBSCRIBE_TYPE_ACTIVE,
+            payload_size=self.PAYLOAD_SIZE_MAX)
 
-  #######################################
-  # TTL tests key:
-  #
-  # names is: test_ttl_<pub_type|sub_type>_<term_ind>
-  # where:
-  #
-  # pub_type: Type of publish discovery session: unsolicited or solicited.
-  # sub_type: Type of subscribe discovery session: passive or active.
-  # term_ind: ind_on or ind_off
-  #######################################
+    #######################################
+    # TTL tests key:
+    #
+    # names is: test_ttl_<pub_type|sub_type>_<term_ind>
+    # where:
+    #
+    # pub_type: Type of publish discovery session: unsolicited or solicited.
+    # sub_type: Type of subscribe discovery session: passive or active.
+    # term_ind: ind_on or ind_off
+    #######################################
 
-  @test_tracker_info(uuid="9d7e758e-e0e2-4550-bcee-bfb6a2bff63e")
-  def test_ttl_unsolicited_ind_on(self):
-    """Functional test case / Discovery test cases / TTL test case:
+    @test_tracker_info(uuid="9d7e758e-e0e2-4550-bcee-bfb6a2bff63e")
+    def test_ttl_unsolicited_ind_on(self):
+        """Functional test case / Discovery test cases / TTL test case:
     - Unsolicited publish
     - Termination indication enabled
     """
-    self.positive_ttl_test_utility(
-        is_publish=True,
-        ptype=aconsts.PUBLISH_TYPE_UNSOLICITED,
-        stype=None,
-        term_ind_on=True)
+        self.positive_ttl_test_utility(
+            is_publish=True,
+            ptype=aconsts.PUBLISH_TYPE_UNSOLICITED,
+            stype=None,
+            term_ind_on=True)
 
-  @test_tracker_info(uuid="48fd69bc-cc2a-4f65-a0a1-63d7c1720702")
-  def test_ttl_unsolicited_ind_off(self):
-    """Functional test case / Discovery test cases / TTL test case:
+    @test_tracker_info(uuid="48fd69bc-cc2a-4f65-a0a1-63d7c1720702")
+    def test_ttl_unsolicited_ind_off(self):
+        """Functional test case / Discovery test cases / TTL test case:
     - Unsolicited publish
     - Termination indication disabled
     """
-    self.positive_ttl_test_utility(
-        is_publish=True,
-        ptype=aconsts.PUBLISH_TYPE_UNSOLICITED,
-        stype=None,
-        term_ind_on=False)
+        self.positive_ttl_test_utility(
+            is_publish=True,
+            ptype=aconsts.PUBLISH_TYPE_UNSOLICITED,
+            stype=None,
+            term_ind_on=False)
 
-  @test_tracker_info(uuid="afb75fc1-9ba7-446a-b5ed-7cd37ab51b1c")
-  def test_ttl_solicited_ind_on(self):
-    """Functional test case / Discovery test cases / TTL test case:
+    @test_tracker_info(uuid="afb75fc1-9ba7-446a-b5ed-7cd37ab51b1c")
+    def test_ttl_solicited_ind_on(self):
+        """Functional test case / Discovery test cases / TTL test case:
     - Solicited publish
     - Termination indication enabled
     """
-    self.positive_ttl_test_utility(
-        is_publish=True,
-        ptype=aconsts.PUBLISH_TYPE_SOLICITED,
-        stype=None,
-        term_ind_on=True)
+        self.positive_ttl_test_utility(
+            is_publish=True,
+            ptype=aconsts.PUBLISH_TYPE_SOLICITED,
+            stype=None,
+            term_ind_on=True)
 
-  @test_tracker_info(uuid="703311a6-e444-4055-94ee-ea9b9b71799e")
-  def test_ttl_solicited_ind_off(self):
-    """Functional test case / Discovery test cases / TTL test case:
+    @test_tracker_info(uuid="703311a6-e444-4055-94ee-ea9b9b71799e")
+    def test_ttl_solicited_ind_off(self):
+        """Functional test case / Discovery test cases / TTL test case:
     - Solicited publish
     - Termination indication disabled
     """
-    self.positive_ttl_test_utility(
-        is_publish=True,
-        ptype=aconsts.PUBLISH_TYPE_SOLICITED,
-        stype=None,
-        term_ind_on=False)
+        self.positive_ttl_test_utility(
+            is_publish=True,
+            ptype=aconsts.PUBLISH_TYPE_SOLICITED,
+            stype=None,
+            term_ind_on=False)
 
-  @test_tracker_info(uuid="38a541c4-ff55-4387-87b7-4d940489da9d")
-  def test_ttl_passive_ind_on(self):
-    """Functional test case / Discovery test cases / TTL test case:
+    @test_tracker_info(uuid="38a541c4-ff55-4387-87b7-4d940489da9d")
+    def test_ttl_passive_ind_on(self):
+        """Functional test case / Discovery test cases / TTL test case:
     - Passive subscribe
     - Termination indication enabled
     """
-    self.positive_ttl_test_utility(
-        is_publish=False,
-        ptype=None,
-        stype=aconsts.SUBSCRIBE_TYPE_PASSIVE,
-        term_ind_on=True)
+        self.positive_ttl_test_utility(
+            is_publish=False,
+            ptype=None,
+            stype=aconsts.SUBSCRIBE_TYPE_PASSIVE,
+            term_ind_on=True)
 
-  @test_tracker_info(uuid="ba971e12-b0ca-417c-a1b5-9451598de47d")
-  def test_ttl_passive_ind_off(self):
-    """Functional test case / Discovery test cases / TTL test case:
+    @test_tracker_info(uuid="ba971e12-b0ca-417c-a1b5-9451598de47d")
+    def test_ttl_passive_ind_off(self):
+        """Functional test case / Discovery test cases / TTL test case:
     - Passive subscribe
     - Termination indication disabled
     """
-    self.positive_ttl_test_utility(
-        is_publish=False,
-        ptype=None,
-        stype=aconsts.SUBSCRIBE_TYPE_PASSIVE,
-        term_ind_on=False)
+        self.positive_ttl_test_utility(
+            is_publish=False,
+            ptype=None,
+            stype=aconsts.SUBSCRIBE_TYPE_PASSIVE,
+            term_ind_on=False)
 
-  @test_tracker_info(uuid="7b5d96f2-2415-4b98-9a51-32957f0679a0")
-  def test_ttl_active_ind_on(self):
-    """Functional test case / Discovery test cases / TTL test case:
+    @test_tracker_info(uuid="7b5d96f2-2415-4b98-9a51-32957f0679a0")
+    def test_ttl_active_ind_on(self):
+        """Functional test case / Discovery test cases / TTL test case:
     - Active subscribe
     - Termination indication enabled
     """
-    self.positive_ttl_test_utility(
-        is_publish=False,
-        ptype=None,
-        stype=aconsts.SUBSCRIBE_TYPE_ACTIVE,
-        term_ind_on=True)
+        self.positive_ttl_test_utility(
+            is_publish=False,
+            ptype=None,
+            stype=aconsts.SUBSCRIBE_TYPE_ACTIVE,
+            term_ind_on=True)
 
-  @test_tracker_info(uuid="c9268eca-0a30-42dd-8e6c-b8b0b84697fb")
-  def test_ttl_active_ind_off(self):
-    """Functional test case / Discovery test cases / TTL test case:
+    @test_tracker_info(uuid="c9268eca-0a30-42dd-8e6c-b8b0b84697fb")
+    def test_ttl_active_ind_off(self):
+        """Functional test case / Discovery test cases / TTL test case:
     - Active subscribe
     - Termination indication disabled
     """
-    self.positive_ttl_test_utility(
-        is_publish=False,
-        ptype=None,
-        stype=aconsts.SUBSCRIBE_TYPE_ACTIVE,
-        term_ind_on=False)
+        self.positive_ttl_test_utility(
+            is_publish=False,
+            ptype=None,
+            stype=aconsts.SUBSCRIBE_TYPE_ACTIVE,
+            term_ind_on=False)
 
-  #######################################
-  # Mismatched service name tests key:
-  #
-  # names is: test_mismatch_service_name_<pub_type>_<sub_type>
-  # where:
-  #
-  # pub_type: Type of publish discovery session: unsolicited or solicited.
-  # sub_type: Type of subscribe discovery session: passive or active.
-  #######################################
+    #######################################
+    # Mismatched service name tests key:
+    #
+    # names is: test_mismatch_service_name_<pub_type>_<sub_type>
+    # where:
+    #
+    # pub_type: Type of publish discovery session: unsolicited or solicited.
+    # sub_type: Type of subscribe discovery session: passive or active.
+    #######################################
 
-  @test_tracker_info(uuid="175415e9-7d07-40d0-95f0-3a5f91ea4711")
-  def test_mismatch_service_name_unsolicited_passive(self):
-    """Functional test case / Discovery test cases / Mismatch service name
+    @test_tracker_info(uuid="175415e9-7d07-40d0-95f0-3a5f91ea4711")
+    def test_mismatch_service_name_unsolicited_passive(self):
+        """Functional test case / Discovery test cases / Mismatch service name
     - Unsolicited publish
     - Passive subscribe
     """
-    self.discovery_mismatch_test_utility(
-        is_expected_to_pass=False,
-        p_type=aconsts.PUBLISH_TYPE_UNSOLICITED,
-        s_type=aconsts.SUBSCRIBE_TYPE_PASSIVE,
-        p_service_name="GoogleTestServiceXXX",
-        s_service_name="GoogleTestServiceYYY")
+        self.discovery_mismatch_test_utility(
+            is_expected_to_pass=False,
+            p_type=aconsts.PUBLISH_TYPE_UNSOLICITED,
+            s_type=aconsts.SUBSCRIBE_TYPE_PASSIVE,
+            p_service_name="GoogleTestServiceXXX",
+            s_service_name="GoogleTestServiceYYY")
 
-  @test_tracker_info(uuid="c22a54ce-9e46-47a5-ac44-831faf93d317")
-  def test_mismatch_service_name_solicited_active(self):
-    """Functional test case / Discovery test cases / Mismatch service name
+    @test_tracker_info(uuid="c22a54ce-9e46-47a5-ac44-831faf93d317")
+    def test_mismatch_service_name_solicited_active(self):
+        """Functional test case / Discovery test cases / Mismatch service name
     - Solicited publish
     - Active subscribe
     """
-    self.discovery_mismatch_test_utility(
-        is_expected_to_pass=False,
-        p_type=aconsts.PUBLISH_TYPE_SOLICITED,
-        s_type=aconsts.SUBSCRIBE_TYPE_ACTIVE,
-        p_service_name="GoogleTestServiceXXX",
-        s_service_name="GoogleTestServiceYYY")
+        self.discovery_mismatch_test_utility(
+            is_expected_to_pass=False,
+            p_type=aconsts.PUBLISH_TYPE_SOLICITED,
+            s_type=aconsts.SUBSCRIBE_TYPE_ACTIVE,
+            p_service_name="GoogleTestServiceXXX",
+            s_service_name="GoogleTestServiceYYY")
 
-  #######################################
-  # Mismatched discovery session type tests key:
-  #
-  # names is: test_mismatch_service_type_<pub_type>_<sub_type>
-  # where:
-  #
-  # pub_type: Type of publish discovery session: unsolicited or solicited.
-  # sub_type: Type of subscribe discovery session: passive or active.
-  #######################################
+    #######################################
+    # Mismatched discovery session type tests key:
+    #
+    # names is: test_mismatch_service_type_<pub_type>_<sub_type>
+    # where:
+    #
+    # pub_type: Type of publish discovery session: unsolicited or solicited.
+    # sub_type: Type of subscribe discovery session: passive or active.
+    #######################################
 
-  @test_tracker_info(uuid="4806f631-d9eb-45fd-9e75-24674962770f")
-  def test_mismatch_service_type_unsolicited_active(self):
-    """Functional test case / Discovery test cases / Mismatch service name
+    @test_tracker_info(uuid="4806f631-d9eb-45fd-9e75-24674962770f")
+    def test_mismatch_service_type_unsolicited_active(self):
+        """Functional test case / Discovery test cases / Mismatch service name
     - Unsolicited publish
     - Active subscribe
     """
-    self.discovery_mismatch_test_utility(
-        is_expected_to_pass=True,
-        p_type=aconsts.PUBLISH_TYPE_UNSOLICITED,
-        s_type=aconsts.SUBSCRIBE_TYPE_ACTIVE)
+        self.discovery_mismatch_test_utility(
+            is_expected_to_pass=True,
+            p_type=aconsts.PUBLISH_TYPE_UNSOLICITED,
+            s_type=aconsts.SUBSCRIBE_TYPE_ACTIVE)
 
-  @test_tracker_info(uuid="12d648fd-b8fa-4c0f-9467-95e2366047de")
-  def test_mismatch_service_type_solicited_passive(self):
-    """Functional test case / Discovery test cases / Mismatch service name
+    @test_tracker_info(uuid="12d648fd-b8fa-4c0f-9467-95e2366047de")
+    def test_mismatch_service_type_solicited_passive(self):
+        """Functional test case / Discovery test cases / Mismatch service name
     - Unsolicited publish
     - Active subscribe
     """
-    self.discovery_mismatch_test_utility(
-        is_expected_to_pass=False,
-        p_type=aconsts.PUBLISH_TYPE_SOLICITED,
-        s_type=aconsts.SUBSCRIBE_TYPE_PASSIVE)
+        self.discovery_mismatch_test_utility(
+            is_expected_to_pass=False,
+            p_type=aconsts.PUBLISH_TYPE_SOLICITED,
+            s_type=aconsts.SUBSCRIBE_TYPE_PASSIVE)
 
-  #######################################
-  # Mismatched discovery match filter tests key:
-  #
-  # names is: test_mismatch_match_filter_<pub_type>_<sub_type>
-  # where:
-  #
-  # pub_type: Type of publish discovery session: unsolicited or solicited.
-  # sub_type: Type of subscribe discovery session: passive or active.
-  #######################################
+    #######################################
+    # Mismatched discovery match filter tests key:
+    #
+    # names is: test_mismatch_match_filter_<pub_type>_<sub_type>
+    # where:
+    #
+    # pub_type: Type of publish discovery session: unsolicited or solicited.
+    # sub_type: Type of subscribe discovery session: passive or active.
+    #######################################
 
-  @test_tracker_info(uuid="d98454cb-64af-4266-8fed-f0b545a2d7c4")
-  def test_mismatch_match_filter_unsolicited_passive(self):
-    """Functional test case / Discovery test cases / Mismatch match filter
+    @test_tracker_info(uuid="d98454cb-64af-4266-8fed-f0b545a2d7c4")
+    def test_mismatch_match_filter_unsolicited_passive(self):
+        """Functional test case / Discovery test cases / Mismatch match filter
     - Unsolicited publish
     - Passive subscribe
     """
-    self.discovery_mismatch_test_utility(
-        is_expected_to_pass=False,
-        p_type=aconsts.PUBLISH_TYPE_UNSOLICITED,
-        s_type=aconsts.SUBSCRIBE_TYPE_PASSIVE,
-        p_mf_1="hello there string",
-        s_mf_1="goodbye there string")
+        self.discovery_mismatch_test_utility(
+            is_expected_to_pass=False,
+            p_type=aconsts.PUBLISH_TYPE_UNSOLICITED,
+            s_type=aconsts.SUBSCRIBE_TYPE_PASSIVE,
+            p_mf_1="hello there string",
+            s_mf_1="goodbye there string")
 
-  @test_tracker_info(uuid="663c1008-ae11-4e1a-87c7-c311d83f481c")
-  def test_mismatch_match_filter_solicited_active(self):
-    """Functional test case / Discovery test cases / Mismatch match filter
+    @test_tracker_info(uuid="663c1008-ae11-4e1a-87c7-c311d83f481c")
+    def test_mismatch_match_filter_solicited_active(self):
+        """Functional test case / Discovery test cases / Mismatch match filter
     - Solicited publish
     - Active subscribe
     """
-    self.discovery_mismatch_test_utility(
-        is_expected_to_pass=False,
-        p_type=aconsts.PUBLISH_TYPE_SOLICITED,
-        s_type=aconsts.SUBSCRIBE_TYPE_ACTIVE,
-        p_mf_1="hello there string",
-        s_mf_1="goodbye there string")
+        self.discovery_mismatch_test_utility(
+            is_expected_to_pass=False,
+            p_type=aconsts.PUBLISH_TYPE_SOLICITED,
+            s_type=aconsts.SUBSCRIBE_TYPE_ACTIVE,
+            p_mf_1="hello there string",
+            s_mf_1="goodbye there string")
 
-  #######################################
-  # Multiple concurrent services
-  #######################################
+    #######################################
+    # Multiple concurrent services
+    #######################################
 
-  def run_multiple_concurrent_services(self, type_x, type_y):
-    """Validate multiple identical discovery services running on both devices:
+    def run_multiple_concurrent_services(self, type_x, type_y):
+        """Validate multiple identical discovery services running on both devices:
     - DUT1 & DUT2 running Publish for X
     - DUT1 & DUT2 running Publish for Y
     - DUT1 Subscribes for X
@@ -849,117 +867,118 @@
       type_x, type_y: A list of [ptype, stype] of the publish and subscribe
                       types for services X and Y respectively.
     """
-    dut1 = self.android_devices[0]
-    dut2 = self.android_devices[1]
+        dut1 = self.android_devices[0]
+        dut2 = self.android_devices[1]
 
-    X_SERVICE_NAME = "ServiceXXX"
-    Y_SERVICE_NAME = "ServiceYYY"
+        X_SERVICE_NAME = "ServiceXXX"
+        Y_SERVICE_NAME = "ServiceYYY"
 
-    asserts.skip_if(dut1.aware_capabilities[aconsts.CAP_MAX_PUBLISHES] < 2 or
-                    dut2.aware_capabilities[aconsts.CAP_MAX_PUBLISHES] < 2,
-                    "Devices do not support 2 publish sessions")
+        asserts.skip_if(
+            dut1.aware_capabilities[aconsts.CAP_MAX_PUBLISHES] < 2
+            or dut2.aware_capabilities[aconsts.CAP_MAX_PUBLISHES] < 2,
+            "Devices do not support 2 publish sessions")
 
-    # attach and wait for confirmation
-    id1 = dut1.droid.wifiAwareAttach(False)
-    autils.wait_for_event(dut1, aconsts.EVENT_CB_ON_ATTACHED)
-    time.sleep(self.device_startup_offset)
-    id2 = dut2.droid.wifiAwareAttach(False)
-    autils.wait_for_event(dut2, aconsts.EVENT_CB_ON_ATTACHED)
+        # attach and wait for confirmation
+        id1 = dut1.droid.wifiAwareAttach(False)
+        autils.wait_for_event(dut1, aconsts.EVENT_CB_ON_ATTACHED)
+        time.sleep(self.device_startup_offset)
+        id2 = dut2.droid.wifiAwareAttach(False)
+        autils.wait_for_event(dut2, aconsts.EVENT_CB_ON_ATTACHED)
 
-    # DUT1 & DUT2: start publishing both X & Y services and wait for
-    # confirmations
-    dut1_x_pid = dut1.droid.wifiAwarePublish(id1,
-                                             autils.create_discovery_config(
-                                               X_SERVICE_NAME, type_x[0]))
-    event = autils.wait_for_event(dut1, aconsts.SESSION_CB_ON_PUBLISH_STARTED)
-    asserts.assert_equal(event["data"][aconsts.SESSION_CB_KEY_SESSION_ID],
-                         dut1_x_pid,
-                         "Unexpected DUT1 X publish session discovery ID")
+        # DUT1 & DUT2: start publishing both X & Y services and wait for
+        # confirmations
+        dut1_x_pid = dut1.droid.wifiAwarePublish(
+            id1, autils.create_discovery_config(X_SERVICE_NAME, type_x[0]))
+        event = autils.wait_for_event(dut1,
+                                      aconsts.SESSION_CB_ON_PUBLISH_STARTED)
+        asserts.assert_equal(event["data"][aconsts.SESSION_CB_KEY_SESSION_ID],
+                             dut1_x_pid,
+                             "Unexpected DUT1 X publish session discovery ID")
 
-    dut1_y_pid = dut1.droid.wifiAwarePublish(id1,
-                                             autils.create_discovery_config(
-                                               Y_SERVICE_NAME, type_y[0]))
-    event = autils.wait_for_event(dut1, aconsts.SESSION_CB_ON_PUBLISH_STARTED)
-    asserts.assert_equal(event["data"][aconsts.SESSION_CB_KEY_SESSION_ID],
-                         dut1_y_pid,
-                         "Unexpected DUT1 Y publish session discovery ID")
+        dut1_y_pid = dut1.droid.wifiAwarePublish(
+            id1, autils.create_discovery_config(Y_SERVICE_NAME, type_y[0]))
+        event = autils.wait_for_event(dut1,
+                                      aconsts.SESSION_CB_ON_PUBLISH_STARTED)
+        asserts.assert_equal(event["data"][aconsts.SESSION_CB_KEY_SESSION_ID],
+                             dut1_y_pid,
+                             "Unexpected DUT1 Y publish session discovery ID")
 
-    dut2_x_pid = dut2.droid.wifiAwarePublish(id2,
-                                             autils.create_discovery_config(
-                                                 X_SERVICE_NAME, type_x[0]))
-    event = autils.wait_for_event(dut2, aconsts.SESSION_CB_ON_PUBLISH_STARTED)
-    asserts.assert_equal(event["data"][aconsts.SESSION_CB_KEY_SESSION_ID],
-                         dut2_x_pid,
-                         "Unexpected DUT2 X publish session discovery ID")
+        dut2_x_pid = dut2.droid.wifiAwarePublish(
+            id2, autils.create_discovery_config(X_SERVICE_NAME, type_x[0]))
+        event = autils.wait_for_event(dut2,
+                                      aconsts.SESSION_CB_ON_PUBLISH_STARTED)
+        asserts.assert_equal(event["data"][aconsts.SESSION_CB_KEY_SESSION_ID],
+                             dut2_x_pid,
+                             "Unexpected DUT2 X publish session discovery ID")
 
-    dut2_y_pid = dut2.droid.wifiAwarePublish(id2,
-                                             autils.create_discovery_config(
-                                                 Y_SERVICE_NAME, type_y[0]))
-    event = autils.wait_for_event(dut2, aconsts.SESSION_CB_ON_PUBLISH_STARTED)
-    asserts.assert_equal(event["data"][aconsts.SESSION_CB_KEY_SESSION_ID],
-                         dut2_y_pid,
-                         "Unexpected DUT2 Y publish session discovery ID")
+        dut2_y_pid = dut2.droid.wifiAwarePublish(
+            id2, autils.create_discovery_config(Y_SERVICE_NAME, type_y[0]))
+        event = autils.wait_for_event(dut2,
+                                      aconsts.SESSION_CB_ON_PUBLISH_STARTED)
+        asserts.assert_equal(event["data"][aconsts.SESSION_CB_KEY_SESSION_ID],
+                             dut2_y_pid,
+                             "Unexpected DUT2 Y publish session discovery ID")
 
-    # DUT1: start subscribing for X
-    dut1_x_sid = dut1.droid.wifiAwareSubscribe(id1,
-                                               autils.create_discovery_config(
-                                                   X_SERVICE_NAME, type_x[1]))
-    autils.wait_for_event(dut1, aconsts.SESSION_CB_ON_SUBSCRIBE_STARTED)
+        # DUT1: start subscribing for X
+        dut1_x_sid = dut1.droid.wifiAwareSubscribe(
+            id1, autils.create_discovery_config(X_SERVICE_NAME, type_x[1]))
+        autils.wait_for_event(dut1, aconsts.SESSION_CB_ON_SUBSCRIBE_STARTED)
 
-    # DUT2: start subscribing for Y
-    dut2_y_sid = dut2.droid.wifiAwareSubscribe(id2,
-                                               autils.create_discovery_config(
-                                                   Y_SERVICE_NAME, type_y[1]))
-    autils.wait_for_event(dut2, aconsts.SESSION_CB_ON_SUBSCRIBE_STARTED)
+        # DUT2: start subscribing for Y
+        dut2_y_sid = dut2.droid.wifiAwareSubscribe(
+            id2, autils.create_discovery_config(Y_SERVICE_NAME, type_y[1]))
+        autils.wait_for_event(dut2, aconsts.SESSION_CB_ON_SUBSCRIBE_STARTED)
 
-    # DUT1 & DUT2: wait for service discovery
-    event = autils.wait_for_event(dut1,
-                                  aconsts.SESSION_CB_ON_SERVICE_DISCOVERED)
-    asserts.assert_equal(event["data"][aconsts.SESSION_CB_KEY_SESSION_ID],
-                         dut1_x_sid,
-                         "Unexpected DUT1 X subscribe session discovery ID")
-    dut1_peer_id_for_dut2_x = event["data"][aconsts.SESSION_CB_KEY_PEER_ID]
+        # DUT1 & DUT2: wait for service discovery
+        event = autils.wait_for_event(dut1,
+                                      aconsts.SESSION_CB_ON_SERVICE_DISCOVERED)
+        asserts.assert_equal(
+            event["data"][aconsts.SESSION_CB_KEY_SESSION_ID], dut1_x_sid,
+            "Unexpected DUT1 X subscribe session discovery ID")
+        dut1_peer_id_for_dut2_x = event["data"][aconsts.SESSION_CB_KEY_PEER_ID]
 
-    event = autils.wait_for_event(dut2,
-                                  aconsts.SESSION_CB_ON_SERVICE_DISCOVERED)
-    asserts.assert_equal(event["data"][aconsts.SESSION_CB_KEY_SESSION_ID],
-                         dut2_y_sid,
-                         "Unexpected DUT2 Y subscribe session discovery ID")
-    dut2_peer_id_for_dut1_y = event["data"][aconsts.SESSION_CB_KEY_PEER_ID]
+        event = autils.wait_for_event(dut2,
+                                      aconsts.SESSION_CB_ON_SERVICE_DISCOVERED)
+        asserts.assert_equal(
+            event["data"][aconsts.SESSION_CB_KEY_SESSION_ID], dut2_y_sid,
+            "Unexpected DUT2 Y subscribe session discovery ID")
+        dut2_peer_id_for_dut1_y = event["data"][aconsts.SESSION_CB_KEY_PEER_ID]
 
-    # DUT1.X send message to DUT2
-    x_msg = "Hello X on DUT2!"
-    dut1.droid.wifiAwareSendMessage(dut1_x_sid, dut1_peer_id_for_dut2_x,
-                                     self.get_next_msg_id(), x_msg,
-                                     self.msg_retx_count)
-    autils.wait_for_event(dut1, aconsts.SESSION_CB_ON_MESSAGE_SENT)
-    event = autils.wait_for_event(dut2, aconsts.SESSION_CB_ON_MESSAGE_RECEIVED)
-    asserts.assert_equal(event["data"][aconsts.SESSION_CB_KEY_SESSION_ID],
-                         dut2_x_pid,
-                        "Unexpected publish session ID on DUT2 for meesage "
-                        "received on service X")
-    asserts.assert_equal(
-        event["data"][aconsts.SESSION_CB_KEY_MESSAGE_AS_STRING], x_msg,
-        "Message on service X from DUT1 to DUT2 not received correctly")
+        # DUT1.X send message to DUT2
+        x_msg = "Hello X on DUT2!"
+        dut1.droid.wifiAwareSendMessage(dut1_x_sid, dut1_peer_id_for_dut2_x,
+                                        self.get_next_msg_id(), x_msg,
+                                        self.msg_retx_count)
+        autils.wait_for_event(dut1, aconsts.SESSION_CB_ON_MESSAGE_SENT)
+        event = autils.wait_for_event(dut2,
+                                      aconsts.SESSION_CB_ON_MESSAGE_RECEIVED)
+        asserts.assert_equal(
+            event["data"][aconsts.SESSION_CB_KEY_SESSION_ID], dut2_x_pid,
+            "Unexpected publish session ID on DUT2 for meesage "
+            "received on service X")
+        asserts.assert_equal(
+            event["data"][aconsts.SESSION_CB_KEY_MESSAGE_AS_STRING], x_msg,
+            "Message on service X from DUT1 to DUT2 not received correctly")
 
-    # DUT2.Y send message to DUT1
-    y_msg = "Hello Y on DUT1!"
-    dut2.droid.wifiAwareSendMessage(dut2_y_sid, dut2_peer_id_for_dut1_y,
-                                    self.get_next_msg_id(), y_msg,
-                                    self.msg_retx_count)
-    autils.wait_for_event(dut2, aconsts.SESSION_CB_ON_MESSAGE_SENT)
-    event = autils.wait_for_event(dut1, aconsts.SESSION_CB_ON_MESSAGE_RECEIVED)
-    asserts.assert_equal(event["data"][aconsts.SESSION_CB_KEY_SESSION_ID],
-                         dut1_y_pid,
-                         "Unexpected publish session ID on DUT1 for meesage "
-                         "received on service Y")
-    asserts.assert_equal(
-        event["data"][aconsts.SESSION_CB_KEY_MESSAGE_AS_STRING], y_msg,
-        "Message on service Y from DUT2 to DUT1 not received correctly")
+        # DUT2.Y send message to DUT1
+        y_msg = "Hello Y on DUT1!"
+        dut2.droid.wifiAwareSendMessage(dut2_y_sid, dut2_peer_id_for_dut1_y,
+                                        self.get_next_msg_id(), y_msg,
+                                        self.msg_retx_count)
+        autils.wait_for_event(dut2, aconsts.SESSION_CB_ON_MESSAGE_SENT)
+        event = autils.wait_for_event(dut1,
+                                      aconsts.SESSION_CB_ON_MESSAGE_RECEIVED)
+        asserts.assert_equal(
+            event["data"][aconsts.SESSION_CB_KEY_SESSION_ID], dut1_y_pid,
+            "Unexpected publish session ID on DUT1 for meesage "
+            "received on service Y")
+        asserts.assert_equal(
+            event["data"][aconsts.SESSION_CB_KEY_MESSAGE_AS_STRING], y_msg,
+            "Message on service Y from DUT2 to DUT1 not received correctly")
 
-  @test_tracker_info(uuid="eef80cf3-1fd2-4526-969b-6af2dce785d7")
-  def test_multiple_concurrent_services_both_unsolicited_passive(self):
-    """Validate multiple concurrent discovery sessions running on both devices.
+    @test_tracker_info(uuid="eef80cf3-1fd2-4526-969b-6af2dce785d7")
+    def test_multiple_concurrent_services_both_unsolicited_passive(self):
+        """Validate multiple concurrent discovery sessions running on both devices.
     - DUT1 & DUT2 running Publish for X
     - DUT1 & DUT2 running Publish for Y
     - DUT1 Subscribes for X
@@ -971,13 +990,19 @@
     Note: test requires that devices support 2 publish sessions concurrently.
     The test will be skipped if the devices are not capable.
     """
-    self.run_multiple_concurrent_services(
-      type_x=[aconsts.PUBLISH_TYPE_UNSOLICITED, aconsts.SUBSCRIBE_TYPE_PASSIVE],
-      type_y=[aconsts.PUBLISH_TYPE_UNSOLICITED, aconsts.SUBSCRIBE_TYPE_PASSIVE])
+        self.run_multiple_concurrent_services(
+            type_x=[
+                aconsts.PUBLISH_TYPE_UNSOLICITED,
+                aconsts.SUBSCRIBE_TYPE_PASSIVE
+            ],
+            type_y=[
+                aconsts.PUBLISH_TYPE_UNSOLICITED,
+                aconsts.SUBSCRIBE_TYPE_PASSIVE
+            ])
 
-  @test_tracker_info(uuid="46739f04-ab2b-4556-b1a4-9aa2774869b5")
-  def test_multiple_concurrent_services_both_solicited_active(self):
-    """Validate multiple concurrent discovery sessions running on both devices.
+    @test_tracker_info(uuid="46739f04-ab2b-4556-b1a4-9aa2774869b5")
+    def test_multiple_concurrent_services_both_solicited_active(self):
+        """Validate multiple concurrent discovery sessions running on both devices.
     - DUT1 & DUT2 running Publish for X
     - DUT1 & DUT2 running Publish for Y
     - DUT1 Subscribes for X
@@ -989,13 +1014,17 @@
     Note: test requires that devices support 2 publish sessions concurrently.
     The test will be skipped if the devices are not capable.
     """
-    self.run_multiple_concurrent_services(
-      type_x=[aconsts.PUBLISH_TYPE_SOLICITED, aconsts.SUBSCRIBE_TYPE_ACTIVE],
-      type_y=[aconsts.PUBLISH_TYPE_SOLICITED, aconsts.SUBSCRIBE_TYPE_ACTIVE])
+        self.run_multiple_concurrent_services(
+            type_x=[
+                aconsts.PUBLISH_TYPE_SOLICITED, aconsts.SUBSCRIBE_TYPE_ACTIVE
+            ],
+            type_y=[
+                aconsts.PUBLISH_TYPE_SOLICITED, aconsts.SUBSCRIBE_TYPE_ACTIVE
+            ])
 
-  @test_tracker_info(uuid="5f8f7fd2-4a0e-4cca-8cbb-6d54353f2baa")
-  def test_multiple_concurrent_services_mix_unsolicited_solicited(self):
-    """Validate multiple concurrent discovery sessions running on both devices.
+    @test_tracker_info(uuid="5f8f7fd2-4a0e-4cca-8cbb-6d54353f2baa")
+    def test_multiple_concurrent_services_mix_unsolicited_solicited(self):
+        """Validate multiple concurrent discovery sessions running on both devices.
     - DUT1 & DUT2 running Publish for X
     - DUT1 & DUT2 running Publish for Y
     - DUT1 Subscribes for X
@@ -1008,28 +1037,33 @@
     Note: test requires that devices support 2 publish sessions concurrently.
     The test will be skipped if the devices are not capable.
     """
-    self.run_multiple_concurrent_services(
-      type_x=[aconsts.PUBLISH_TYPE_UNSOLICITED, aconsts.SUBSCRIBE_TYPE_PASSIVE],
-      type_y=[aconsts.PUBLISH_TYPE_SOLICITED, aconsts.SUBSCRIBE_TYPE_ACTIVE])
+        self.run_multiple_concurrent_services(
+            type_x=[
+                aconsts.PUBLISH_TYPE_UNSOLICITED,
+                aconsts.SUBSCRIBE_TYPE_PASSIVE
+            ],
+            type_y=[
+                aconsts.PUBLISH_TYPE_SOLICITED, aconsts.SUBSCRIBE_TYPE_ACTIVE
+            ])
 
-  #########################################################
+    #########################################################
 
-  @test_tracker_info(uuid="908ec896-fc7a-4ee4-b633-a2f042b74448")
-  def test_upper_lower_service_name_equivalence(self):
-    """Validate that Service Name is case-insensitive. Publish a service name
+    @test_tracker_info(uuid="908ec896-fc7a-4ee4-b633-a2f042b74448")
+    def test_upper_lower_service_name_equivalence(self):
+        """Validate that Service Name is case-insensitive. Publish a service name
     with mixed case, subscribe to the same service name with alternative case
     and verify that discovery happens."""
-    p_dut = self.android_devices[0]
-    s_dut = self.android_devices[1]
+        p_dut = self.android_devices[0]
+        s_dut = self.android_devices[1]
 
-    pub_service_name = "GoogleAbCdEf"
-    sub_service_name = "GoogleaBcDeF"
+        pub_service_name = "GoogleAbCdEf"
+        sub_service_name = "GoogleaBcDeF"
 
-    autils.create_discovery_pair(p_dut, s_dut,
-                               p_config=autils.create_discovery_config(
-                                 pub_service_name,
-                                 aconsts.PUBLISH_TYPE_UNSOLICITED),
-                               s_config=autils.create_discovery_config(
-                                 sub_service_name,
-                                 aconsts.SUBSCRIBE_TYPE_PASSIVE),
-                               device_startup_offset=self.device_startup_offset)
+        autils.create_discovery_pair(
+            p_dut,
+            s_dut,
+            p_config=autils.create_discovery_config(
+                pub_service_name, aconsts.PUBLISH_TYPE_UNSOLICITED),
+            s_config=autils.create_discovery_config(
+                sub_service_name, aconsts.SUBSCRIBE_TYPE_PASSIVE),
+            device_startup_offset=self.device_startup_offset)
diff --git a/acts/tests/google/wifi/aware/functional/MacRandomTest.py b/acts/tests/google/wifi/aware/functional/MacRandomTest.py
index af1503b..0278d6d 100644
--- a/acts/tests/google/wifi/aware/functional/MacRandomTest.py
+++ b/acts/tests/google/wifi/aware/functional/MacRandomTest.py
@@ -25,115 +25,118 @@
 
 
 class MacRandomTest(AwareBaseTest):
-  """Set of tests for Wi-Fi Aware MAC address randomization of NMI (NAN
+    """Set of tests for Wi-Fi Aware MAC address randomization of NMI (NAN
   management interface) and NDI (NAN data interface)."""
 
-  NUM_ITERATIONS = 10
+    NUM_ITERATIONS = 10
 
-  # number of second to 'reasonably' wait to make sure that devices synchronize
-  # with each other - useful for OOB test cases, where the OOB discovery would
-  # take some time
-  WAIT_FOR_CLUSTER = 5
+    # number of second to 'reasonably' wait to make sure that devices synchronize
+    # with each other - useful for OOB test cases, where the OOB discovery would
+    # take some time
+    WAIT_FOR_CLUSTER = 5
 
-  def __init__(self, controllers):
-    AwareBaseTest.__init__(self, controllers)
+    def __init__(self, controllers):
+        AwareBaseTest.__init__(self, controllers)
 
-  def request_network(self, dut, ns):
-    """Request a Wi-Fi Aware network.
+    def request_network(self, dut, ns):
+        """Request a Wi-Fi Aware network.
 
     Args:
       dut: Device
       ns: Network specifier
     Returns: the request key
     """
-    network_req = {"TransportType": 5, "NetworkSpecifier": ns}
-    return dut.droid.connectivityRequestWifiAwareNetwork(network_req)
+        network_req = {"TransportType": 5, "NetworkSpecifier": ns}
+        return dut.droid.connectivityRequestWifiAwareNetwork(network_req)
 
-  ##########################################################################
+    ##########################################################################
 
-  @test_tracker_info(uuid="09964368-146a-48e4-9f33-6a319f9eeadc")
-  def test_nmi_ndi_randomization_on_enable(self):
-    """Validate randomization of the NMI (NAN management interface) and all NDIs
+    @test_tracker_info(uuid="09964368-146a-48e4-9f33-6a319f9eeadc")
+    def test_nmi_ndi_randomization_on_enable(self):
+        """Validate randomization of the NMI (NAN management interface) and all NDIs
     (NAN data-interface) on each enable/disable cycle"""
-    dut = self.android_devices[0]
+        dut = self.android_devices[0]
 
-    # re-enable randomization interval (since if disabled it may also disable
-    # the 'randomize on enable' feature).
-    autils.configure_mac_random_interval(dut, 1800)
+        # re-enable randomization interval (since if disabled it may also disable
+        # the 'randomize on enable' feature).
+        autils.configure_mac_random_interval(dut, 1800)
 
-    # DUT: attach and wait for confirmation & identity 10 times
-    mac_addresses = {}
-    for i in range(self.NUM_ITERATIONS):
-      id = dut.droid.wifiAwareAttach(True)
-      autils.wait_for_event(dut, aconsts.EVENT_CB_ON_ATTACHED)
-      ident_event = autils.wait_for_event(dut,
-                                          aconsts.EVENT_CB_ON_IDENTITY_CHANGED)
+        # DUT: attach and wait for confirmation & identity 10 times
+        mac_addresses = {}
+        for i in range(self.NUM_ITERATIONS):
+            id = dut.droid.wifiAwareAttach(True)
+            autils.wait_for_event(dut, aconsts.EVENT_CB_ON_ATTACHED)
+            ident_event = autils.wait_for_event(
+                dut, aconsts.EVENT_CB_ON_IDENTITY_CHANGED)
 
-      # process NMI
-      mac = ident_event["data"]["mac"]
-      dut.log.info("NMI=%s", mac)
-      if mac in mac_addresses:
-        mac_addresses[mac] = mac_addresses[mac] + 1
-      else:
-        mac_addresses[mac] = 1
+            # process NMI
+            mac = ident_event["data"]["mac"]
+            dut.log.info("NMI=%s", mac)
+            if mac in mac_addresses:
+                mac_addresses[mac] = mac_addresses[mac] + 1
+            else:
+                mac_addresses[mac] = 1
 
-      # process NDIs
-      time.sleep(5) # wait for NDI creation to complete
-      for j in range(dut.aware_capabilities[aconsts.CAP_MAX_NDI_INTERFACES]):
-        ndi_interface = "%s%d" % (aconsts.AWARE_NDI_PREFIX, j)
-        ndi_mac = autils.get_mac_addr(dut, ndi_interface)
-        dut.log.info("NDI %s=%s", ndi_interface, ndi_mac)
-        if ndi_mac in mac_addresses:
-          mac_addresses[ndi_mac] = mac_addresses[ndi_mac] + 1
-        else:
-          mac_addresses[ndi_mac] = 1
+            # process NDIs
+            time.sleep(5)  # wait for NDI creation to complete
+            for j in range(
+                    dut.aware_capabilities[aconsts.CAP_MAX_NDI_INTERFACES]):
+                ndi_interface = "%s%d" % (aconsts.AWARE_NDI_PREFIX, j)
+                ndi_mac = autils.get_mac_addr(dut, ndi_interface)
+                dut.log.info("NDI %s=%s", ndi_interface, ndi_mac)
+                if ndi_mac in mac_addresses:
+                    mac_addresses[ndi_mac] = mac_addresses[ndi_mac] + 1
+                else:
+                    mac_addresses[ndi_mac] = 1
 
-      dut.droid.wifiAwareDestroy(id)
+            dut.droid.wifiAwareDestroy(id)
 
-    # Test for uniqueness
-    for mac in mac_addresses.keys():
-      if mac_addresses[mac] != 1:
-        asserts.fail("MAC address %s repeated %d times (all=%s)" % (mac,
-                     mac_addresses[mac], mac_addresses))
+        # Test for uniqueness
+        for mac in mac_addresses.keys():
+            if mac_addresses[mac] != 1:
+                asserts.fail("MAC address %s repeated %d times (all=%s)" %
+                             (mac, mac_addresses[mac], mac_addresses))
 
-    # Verify that infra interface (e.g. wlan0) MAC address is not used for NMI
-    infra_mac = autils.get_wifi_mac_address(dut)
-    asserts.assert_false(
-        infra_mac in mac_addresses,
-        "Infrastructure MAC address (%s) is used for Aware NMI (all=%s)" %
-        (infra_mac, mac_addresses))
+        # Verify that infra interface (e.g. wlan0) MAC address is not used for NMI
+        infra_mac = autils.get_wifi_mac_address(dut)
+        asserts.assert_false(
+            infra_mac in mac_addresses,
+            "Infrastructure MAC address (%s) is used for Aware NMI (all=%s)" %
+            (infra_mac, mac_addresses))
 
-  @test_tracker_info(uuid="0fb0b5d8-d9cb-4e37-b9af-51811be5670d")
-  def test_nmi_randomization_on_interval(self):
-    """Validate randomization of the NMI (NAN management interface) on a set
+    @test_tracker_info(uuid="0fb0b5d8-d9cb-4e37-b9af-51811be5670d")
+    def test_nmi_randomization_on_interval(self):
+        """Validate randomization of the NMI (NAN management interface) on a set
     interval. Default value is 30 minutes - change to a small value to allow
     testing in real-time"""
-    RANDOM_INTERVAL = 120 # minimal value in current implementation
+        RANDOM_INTERVAL = 120  # minimal value in current implementation
 
-    dut = self.android_devices[0]
+        dut = self.android_devices[0]
 
-    # set randomization interval to 120 seconds
-    autils.configure_mac_random_interval(dut, RANDOM_INTERVAL)
+        # set randomization interval to 120 seconds
+        autils.configure_mac_random_interval(dut, RANDOM_INTERVAL)
 
-    # attach and wait for first identity
-    id = dut.droid.wifiAwareAttach(True)
-    autils.wait_for_event(dut, aconsts.EVENT_CB_ON_ATTACHED)
-    ident_event = autils.wait_for_event(dut,
-                                        aconsts.EVENT_CB_ON_IDENTITY_CHANGED)
-    mac1 = ident_event["data"]["mac"]
+        # attach and wait for first identity
+        id = dut.droid.wifiAwareAttach(True)
+        autils.wait_for_event(dut, aconsts.EVENT_CB_ON_ATTACHED)
+        ident_event = autils.wait_for_event(
+            dut, aconsts.EVENT_CB_ON_IDENTITY_CHANGED)
+        mac1 = ident_event["data"]["mac"]
 
-    # wait for second identity callback
-    # Note: exact randomization interval is not critical, just approximate,
-    # hence giving a few more seconds.
-    ident_event = autils.wait_for_event(dut,
-                                        aconsts.EVENT_CB_ON_IDENTITY_CHANGED,
-                                        timeout=RANDOM_INTERVAL + 5)
-    mac2 = ident_event["data"]["mac"]
+        # wait for second identity callback
+        # Note: exact randomization interval is not critical, just approximate,
+        # hence giving a few more seconds.
+        ident_event = autils.wait_for_event(
+            dut,
+            aconsts.EVENT_CB_ON_IDENTITY_CHANGED,
+            timeout=RANDOM_INTERVAL + 5)
+        mac2 = ident_event["data"]["mac"]
 
-    # validate MAC address is randomized
-    asserts.assert_false(
-        mac1 == mac2,
-        "Randomized MAC addresses (%s, %s) should be different" % (mac1, mac2))
+        # validate MAC address is randomized
+        asserts.assert_false(
+            mac1 == mac2,
+            "Randomized MAC addresses (%s, %s) should be different" % (mac1,
+                                                                       mac2))
 
-    # clean-up
-    dut.droid.wifiAwareDestroy(id)
+        # clean-up
+        dut.droid.wifiAwareDestroy(id)
diff --git a/acts/tests/google/wifi/aware/functional/MatchFilterTest.py b/acts/tests/google/wifi/aware/functional/MatchFilterTest.py
index 170b31b..bf88582 100644
--- a/acts/tests/google/wifi/aware/functional/MatchFilterTest.py
+++ b/acts/tests/google/wifi/aware/functional/MatchFilterTest.py
@@ -26,49 +26,43 @@
 
 
 class MatchFilterTest(AwareBaseTest):
-  """Set of tests for Wi-Fi Aware Discovery Match Filter behavior. These all
+    """Set of tests for Wi-Fi Aware Discovery Match Filter behavior. These all
   use examples from Appendix H of the Wi-Fi Aware standard."""
 
-  SERVICE_NAME = "GoogleTestServiceMFMFMF"
+    SERVICE_NAME = "GoogleTestServiceMFMFMF"
 
-  MF_NNNNN = bytes([0x0, 0x0, 0x0, 0x0, 0x0])
-  MF_12345 = bytes([0x1, 0x1, 0x1, 0x2, 0x1, 0x3, 0x1, 0x4, 0x1, 0x5])
-  MF_12145 = bytes([0x1, 0x1, 0x1, 0x2, 0x1, 0x1, 0x1, 0x4, 0x1, 0x5])
-  MF_1N3N5 = bytes([0x1, 0x1, 0x0, 0x1, 0x3, 0x0, 0x1, 0x5])
-  MF_N23N5 = bytes([0x0, 0x1, 0x2, 0x1, 0x3, 0x0, 0x1, 0x5])
-  MF_N2N4 = bytes([0x0, 0x1, 0x2, 0x0, 0x1, 0x4])
-  MF_1N3N = bytes([0x1, 0x1, 0x0, 0x1, 0x3, 0x0])
+    MF_NNNNN = bytes([0x0, 0x0, 0x0, 0x0, 0x0])
+    MF_12345 = bytes([0x1, 0x1, 0x1, 0x2, 0x1, 0x3, 0x1, 0x4, 0x1, 0x5])
+    MF_12145 = bytes([0x1, 0x1, 0x1, 0x2, 0x1, 0x1, 0x1, 0x4, 0x1, 0x5])
+    MF_1N3N5 = bytes([0x1, 0x1, 0x0, 0x1, 0x3, 0x0, 0x1, 0x5])
+    MF_N23N5 = bytes([0x0, 0x1, 0x2, 0x1, 0x3, 0x0, 0x1, 0x5])
+    MF_N2N4 = bytes([0x0, 0x1, 0x2, 0x0, 0x1, 0x4])
+    MF_1N3N = bytes([0x1, 0x1, 0x0, 0x1, 0x3, 0x0])
 
-  # Set of sample match filters from the spec. There is a set of matched
-  # filters:
-  # - Filter 1
-  # - Filter 2
-  # - Expected to match if the Subscriber uses Filter 1 as Tx and the Publisher
-  #   uses Filter 2 as Rx (implies Solicited/Active)
-  # - (the reverse) Expected to match if the Publisher uses Filter 1 as Tx and
-  #   the Subscriber uses Filter 2 as Rx (implies Unsolicited/Passive)
-  match_filters = [
-    [None, None, True, True],
-    [None, MF_NNNNN, True, True],
-    [MF_NNNNN, None, True, True],
-    [None, MF_12345, True, False],
-    [MF_12345, None, False, True],
-    [MF_NNNNN, MF_12345, True, True],
-    [MF_12345, MF_NNNNN, True, True],
-    [MF_12345, MF_12345, True, True],
-    [MF_12345, MF_12145, False, False],
-    [MF_1N3N5, MF_12345, True,True],
-    [MF_12345, MF_N23N5, True, True],
-    [MF_N2N4, MF_12345, True, False],
-    [MF_12345, MF_1N3N, False, True]
-  ]
+    # Set of sample match filters from the spec. There is a set of matched
+    # filters:
+    # - Filter 1
+    # - Filter 2
+    # - Expected to match if the Subscriber uses Filter 1 as Tx and the Publisher
+    #   uses Filter 2 as Rx (implies Solicited/Active)
+    # - (the reverse) Expected to match if the Publisher uses Filter 1 as Tx and
+    #   the Subscriber uses Filter 2 as Rx (implies Unsolicited/Passive)
+    match_filters = [[None, None, True, True], [None, MF_NNNNN, True, True], [
+        MF_NNNNN, None, True, True
+    ], [None, MF_12345, True, False], [MF_12345, None, False, True], [
+        MF_NNNNN, MF_12345, True, True
+    ], [MF_12345, MF_NNNNN, True, True], [MF_12345, MF_12345, True, True], [
+        MF_12345, MF_12145, False, False
+    ], [MF_1N3N5, MF_12345, True, True], [MF_12345, MF_N23N5, True, True],
+                     [MF_N2N4, MF_12345, True,
+                      False], [MF_12345, MF_1N3N, False, True]]
 
-  def __init__(self, controllers):
-    AwareBaseTest.__init__(self, controllers)
+    def __init__(self, controllers):
+        AwareBaseTest.__init__(self, controllers)
 
-  def run_discovery(self, p_dut, s_dut, p_mf, s_mf, do_unsolicited_passive,
-      expect_discovery):
-    """Creates a discovery session (publish and subscribe) with the specified
+    def run_discovery(self, p_dut, s_dut, p_mf, s_mf, do_unsolicited_passive,
+                      expect_discovery):
+        """Creates a discovery session (publish and subscribe) with the specified
     configuration.
 
     Args:
@@ -81,114 +75,119 @@
       expect_discovery: True if service should be discovered, False otherwise.
     Returns: True on success, False on failure (based on expect_discovery arg)
     """
-    # Encode the match filters
-    p_mf = base64.b64encode(p_mf).decode("utf-8") if p_mf is not None else None
-    s_mf = base64.b64encode(s_mf).decode("utf-8") if s_mf is not None else None
+        # Encode the match filters
+        p_mf = base64.b64encode(p_mf).decode(
+            "utf-8") if p_mf is not None else None
+        s_mf = base64.b64encode(s_mf).decode(
+            "utf-8") if s_mf is not None else None
 
-    # Publisher+Subscriber: attach and wait for confirmation
-    p_id = p_dut.droid.wifiAwareAttach()
-    autils.wait_for_event(p_dut, aconsts.EVENT_CB_ON_ATTACHED)
-    time.sleep(self.device_startup_offset)
-    s_id = s_dut.droid.wifiAwareAttach()
-    autils.wait_for_event(s_dut, aconsts.EVENT_CB_ON_ATTACHED)
+        # Publisher+Subscriber: attach and wait for confirmation
+        p_id = p_dut.droid.wifiAwareAttach()
+        autils.wait_for_event(p_dut, aconsts.EVENT_CB_ON_ATTACHED)
+        time.sleep(self.device_startup_offset)
+        s_id = s_dut.droid.wifiAwareAttach()
+        autils.wait_for_event(s_dut, aconsts.EVENT_CB_ON_ATTACHED)
 
-    # Publisher: start publish and wait for confirmation
-    p_dut.droid.wifiAwarePublish(p_id,
-                                 autils.create_discovery_config(
-                                     self.SERVICE_NAME,
-                                     d_type=aconsts.PUBLISH_TYPE_UNSOLICITED
-                                     if do_unsolicited_passive else
-                                     aconsts.PUBLISH_TYPE_SOLICITED,
-                                     match_filter=p_mf))
-    autils.wait_for_event(p_dut, aconsts.SESSION_CB_ON_PUBLISH_STARTED)
+        # Publisher: start publish and wait for confirmation
+        p_dut.droid.wifiAwarePublish(
+            p_id,
+            autils.create_discovery_config(
+                self.SERVICE_NAME,
+                d_type=aconsts.PUBLISH_TYPE_UNSOLICITED
+                if do_unsolicited_passive else aconsts.PUBLISH_TYPE_SOLICITED,
+                match_filter=p_mf))
+        autils.wait_for_event(p_dut, aconsts.SESSION_CB_ON_PUBLISH_STARTED)
 
-    # Subscriber: start subscribe and wait for confirmation
-    s_dut.droid.wifiAwareSubscribe(s_id,
-                                   autils.create_discovery_config(
-                                       self.SERVICE_NAME,
-                                       d_type=aconsts.SUBSCRIBE_TYPE_PASSIVE
-                                       if do_unsolicited_passive else
-                                       aconsts.SUBSCRIBE_TYPE_ACTIVE,
-                                       match_filter=s_mf))
-    autils.wait_for_event(s_dut, aconsts.SESSION_CB_ON_SUBSCRIBE_STARTED)
+        # Subscriber: start subscribe and wait for confirmation
+        s_dut.droid.wifiAwareSubscribe(
+            s_id,
+            autils.create_discovery_config(
+                self.SERVICE_NAME,
+                d_type=aconsts.SUBSCRIBE_TYPE_PASSIVE
+                if do_unsolicited_passive else aconsts.SUBSCRIBE_TYPE_ACTIVE,
+                match_filter=s_mf))
+        autils.wait_for_event(s_dut, aconsts.SESSION_CB_ON_SUBSCRIBE_STARTED)
 
-    # Subscriber: wait or fail on service discovery
-    event = None
-    try:
-      event = s_dut.ed.pop_event(aconsts.SESSION_CB_ON_SERVICE_DISCOVERED,
-                                 autils.EVENT_TIMEOUT)
-      s_dut.log.info("[Subscriber] SESSION_CB_ON_SERVICE_DISCOVERED: %s", event)
-    except queue.Empty:
-      s_dut.log.info("[Subscriber] No SESSION_CB_ON_SERVICE_DISCOVERED")
+        # Subscriber: wait or fail on service discovery
+        event = None
+        try:
+            event = s_dut.ed.pop_event(
+                aconsts.SESSION_CB_ON_SERVICE_DISCOVERED, autils.EVENT_TIMEOUT)
+            s_dut.log.info("[Subscriber] SESSION_CB_ON_SERVICE_DISCOVERED: %s",
+                           event)
+        except queue.Empty:
+            s_dut.log.info("[Subscriber] No SESSION_CB_ON_SERVICE_DISCOVERED")
 
-    # clean-up
-    p_dut.droid.wifiAwareDestroy(p_id)
-    s_dut.droid.wifiAwareDestroy(s_id)
+        # clean-up
+        p_dut.droid.wifiAwareDestroy(p_id)
+        s_dut.droid.wifiAwareDestroy(s_id)
 
-    if expect_discovery:
-      return event is not None
-    else:
-      return event is None
+        if expect_discovery:
+            return event is not None
+        else:
+            return event is None
 
-  def run_match_filters_per_spec(self, do_unsolicited_passive):
-    """Validate all the match filter combinations in the Wi-Fi Aware spec,
+    def run_match_filters_per_spec(self, do_unsolicited_passive):
+        """Validate all the match filter combinations in the Wi-Fi Aware spec,
     Appendix H.
 
     Args:
       do_unsolicited_passive: True to run the Unsolicited/Passive tests, False
                               to run the Solicited/Active tests.
     """
-    p_dut = self.android_devices[0]
-    p_dut.pretty_name = "Publisher"
-    s_dut = self.android_devices[1]
-    s_dut.pretty_name = "Subscriber"
+        p_dut = self.android_devices[0]
+        p_dut.pretty_name = "Publisher"
+        s_dut = self.android_devices[1]
+        s_dut.pretty_name = "Subscriber"
 
-    fails = []
-    for i in range(len(self.match_filters)):
-      test_info = self.match_filters[i]
-      if do_unsolicited_passive:
-        pub_type = "Unsolicited"
-        sub_type = "Passive"
-        pub_mf = test_info[0]
-        sub_mf = test_info[1]
-        expect_discovery = test_info[3]
-      else:
-        pub_type = "Solicited"
-        sub_type = "Active"
-        pub_mf = test_info[1]
-        sub_mf = test_info[0]
-        expect_discovery = test_info[2]
+        fails = []
+        for i in range(len(self.match_filters)):
+            test_info = self.match_filters[i]
+            if do_unsolicited_passive:
+                pub_type = "Unsolicited"
+                sub_type = "Passive"
+                pub_mf = test_info[0]
+                sub_mf = test_info[1]
+                expect_discovery = test_info[3]
+            else:
+                pub_type = "Solicited"
+                sub_type = "Active"
+                pub_mf = test_info[1]
+                sub_mf = test_info[0]
+                expect_discovery = test_info[2]
 
-      self.log.info("Test #%d: %s Pub MF=%s, %s Sub MF=%s: Discovery %s", i,
-                    pub_type, pub_mf, sub_type, sub_mf, "EXPECTED"
-                    if test_info[2] else "UNEXPECTED")
-      result = self.run_discovery(
-          p_dut,
-          s_dut,
-          p_mf=pub_mf,
-          s_mf=sub_mf,
-          do_unsolicited_passive=do_unsolicited_passive,
-          expect_discovery=expect_discovery)
-      self.log.info("Test #%d %s Pub/%s Sub %s", i, pub_type, sub_type, "PASS"
-                    if result else "FAIL")
-      if not result:
-        fails.append(i)
+            self.log.info("Test #%d: %s Pub MF=%s, %s Sub MF=%s: Discovery %s",
+                          i, pub_type, pub_mf, sub_type, sub_mf, "EXPECTED"
+                          if test_info[2] else "UNEXPECTED")
+            result = self.run_discovery(
+                p_dut,
+                s_dut,
+                p_mf=pub_mf,
+                s_mf=sub_mf,
+                do_unsolicited_passive=do_unsolicited_passive,
+                expect_discovery=expect_discovery)
+            self.log.info("Test #%d %s Pub/%s Sub %s", i, pub_type, sub_type,
+                          "PASS" if result else "FAIL")
+            if not result:
+                fails.append(i)
 
-    asserts.assert_true(
-        len(fails) == 0, "Some match filter tests are failing", extras=fails)
+        asserts.assert_true(
+            len(fails) == 0,
+            "Some match filter tests are failing",
+            extras={"data": fails})
 
-  ###############################################################
+    ###############################################################
 
-  @test_tracker_info(uuid="bd734f8c-895a-4cf9-820f-ec5060517fe9")
-  def test_match_filters_per_spec_unsolicited_passive(self):
-    """Validate all the match filter combinations in the Wi-Fi Aware spec,
+    @test_tracker_info(uuid="bd734f8c-895a-4cf9-820f-ec5060517fe9")
+    def test_match_filters_per_spec_unsolicited_passive(self):
+        """Validate all the match filter combinations in the Wi-Fi Aware spec,
     Appendix H for Unsolicited Publish (tx filter) Passive Subscribe (rx
     filter)"""
-    self.run_match_filters_per_spec(do_unsolicited_passive=True)
+        self.run_match_filters_per_spec(do_unsolicited_passive=True)
 
-  @test_tracker_info(uuid="6560124d-69e5-49ff-a7e5-3cb305983723")
-  def test_match_filters_per_spec_solicited_active(self):
-    """Validate all the match filter combinations in the Wi-Fi Aware spec,
+    @test_tracker_info(uuid="6560124d-69e5-49ff-a7e5-3cb305983723")
+    def test_match_filters_per_spec_solicited_active(self):
+        """Validate all the match filter combinations in the Wi-Fi Aware spec,
     Appendix H for Solicited Publish (rx filter) Active Subscribe (tx
     filter)"""
-    self.run_match_filters_per_spec(do_unsolicited_passive=False)
+        self.run_match_filters_per_spec(do_unsolicited_passive=False)
diff --git a/acts/tests/google/wifi/aware/functional/MessageTest.py b/acts/tests/google/wifi/aware/functional/MessageTest.py
index 194ed6d..7fb3e8e 100644
--- a/acts/tests/google/wifi/aware/functional/MessageTest.py
+++ b/acts/tests/google/wifi/aware/functional/MessageTest.py
@@ -25,21 +25,21 @@
 
 
 class MessageTest(AwareBaseTest):
-  """Set of tests for Wi-Fi Aware L2 (layer 2) message exchanges."""
+    """Set of tests for Wi-Fi Aware L2 (layer 2) message exchanges."""
 
-  # configuration parameters used by tests
-  PAYLOAD_SIZE_MIN = 0
-  PAYLOAD_SIZE_TYPICAL = 1
-  PAYLOAD_SIZE_MAX = 2
+    # configuration parameters used by tests
+    PAYLOAD_SIZE_MIN = 0
+    PAYLOAD_SIZE_TYPICAL = 1
+    PAYLOAD_SIZE_MAX = 2
 
-  NUM_MSGS_NO_QUEUE = 10
-  NUM_MSGS_QUEUE_DEPTH_MULT = 2  # number of messages = mult * queue depth
+    NUM_MSGS_NO_QUEUE = 10
+    NUM_MSGS_QUEUE_DEPTH_MULT = 2  # number of messages = mult * queue depth
 
-  def __init__(self, controllers):
-    AwareBaseTest.__init__(self, controllers)
+    def __init__(self, controllers):
+        AwareBaseTest.__init__(self, controllers)
 
-  def create_msg(self, caps, payload_size, id):
-    """Creates a message string of the specified size containing the input id.
+    def create_msg(self, caps, payload_size, id):
+        """Creates a message string of the specified size containing the input id.
 
     Args:
       caps: Device capabilities.
@@ -50,17 +50,17 @@
 
     Returns: A string of the requested size, optionally containing the id.
     """
-    if payload_size == self.PAYLOAD_SIZE_MIN:
-      # arbitrarily return a None or an empty string (equivalent messages)
-      return None if id % 2 == 0 else ""
-    elif payload_size == self.PAYLOAD_SIZE_TYPICAL:
-      return "*** ID=%d ***" % id + string.ascii_uppercase
-    else:  # PAYLOAD_SIZE_MAX
-      return "*** ID=%4d ***" % id + "M" * (
-          caps[aconsts.CAP_MAX_SERVICE_SPECIFIC_INFO_LEN] - 15)
+        if payload_size == self.PAYLOAD_SIZE_MIN:
+            # arbitrarily return a None or an empty string (equivalent messages)
+            return None if id % 2 == 0 else ""
+        elif payload_size == self.PAYLOAD_SIZE_TYPICAL:
+            return "*** ID=%d ***" % id + string.ascii_uppercase
+        else:  # PAYLOAD_SIZE_MAX
+            return "*** ID=%4d ***" % id + "M" * (
+                caps[aconsts.CAP_MAX_SERVICE_SPECIFIC_INFO_LEN] - 15)
 
-  def create_config(self, is_publish, extra_diff=None):
-    """Create a base configuration based on input parameters.
+    def create_config(self, is_publish, extra_diff=None):
+        """Create a base configuration based on input parameters.
 
     Args:
       is_publish: True for publish, False for subscribe sessions.
@@ -70,19 +70,21 @@
     Returns:
       publish discovery configuration object.
     """
-    config = {}
-    if is_publish:
-      config[aconsts.
-             DISCOVERY_KEY_DISCOVERY_TYPE] = aconsts.PUBLISH_TYPE_UNSOLICITED
-    else:
-      config[
-          aconsts.DISCOVERY_KEY_DISCOVERY_TYPE] = aconsts.SUBSCRIBE_TYPE_PASSIVE
-    config[aconsts.DISCOVERY_KEY_SERVICE_NAME] = "GoogleTestServiceX" + (
-        extra_diff if extra_diff is not None else "")
-    return config
+        config = {}
+        if is_publish:
+            config[
+                aconsts.
+                DISCOVERY_KEY_DISCOVERY_TYPE] = aconsts.PUBLISH_TYPE_UNSOLICITED
+        else:
+            config[
+                aconsts.
+                DISCOVERY_KEY_DISCOVERY_TYPE] = aconsts.SUBSCRIBE_TYPE_PASSIVE
+        config[aconsts.DISCOVERY_KEY_SERVICE_NAME] = "GoogleTestServiceX" + (
+            extra_diff if extra_diff is not None else "")
+        return config
 
-  def prep_message_exchange(self, extra_diff=None):
-    """Creates a discovery session (publish and subscribe), and waits for
+    def prep_message_exchange(self, extra_diff=None):
+        """Creates a discovery session (publish and subscribe), and waits for
     service discovery - at that point the sessions are ready for message
     exchange.
 
@@ -90,115 +92,126 @@
       extra_diff: String to add to service name: allows differentiating
                   discovery sessions.
     """
-    p_dut = self.android_devices[0]
-    p_dut.pretty_name = "Publisher"
-    s_dut = self.android_devices[1]
-    s_dut.pretty_name = "Subscriber"
+        p_dut = self.android_devices[0]
+        p_dut.pretty_name = "Publisher"
+        s_dut = self.android_devices[1]
+        s_dut.pretty_name = "Subscriber"
 
-    # if differentiating (multiple) sessions then should decorate events with id
-    use_id = extra_diff is not None
+        # if differentiating (multiple) sessions then should decorate events with id
+        use_id = extra_diff is not None
 
-    # Publisher+Subscriber: attach and wait for confirmation
-    p_id = p_dut.droid.wifiAwareAttach(False, None, use_id)
-    autils.wait_for_event(p_dut, aconsts.EVENT_CB_ON_ATTACHED
-                          if not use_id else autils.decorate_event(
-                              aconsts.EVENT_CB_ON_ATTACHED, p_id))
-    time.sleep(self.device_startup_offset)
-    s_id = s_dut.droid.wifiAwareAttach(False, None, use_id)
-    autils.wait_for_event(s_dut, aconsts.EVENT_CB_ON_ATTACHED
-                          if not use_id else autils.decorate_event(
-                              aconsts.EVENT_CB_ON_ATTACHED, s_id))
+        # Publisher+Subscriber: attach and wait for confirmation
+        p_id = p_dut.droid.wifiAwareAttach(False, None, use_id)
+        autils.wait_for_event(
+            p_dut, aconsts.EVENT_CB_ON_ATTACHED
+            if not use_id else autils.decorate_event(
+                aconsts.EVENT_CB_ON_ATTACHED, p_id))
+        time.sleep(self.device_startup_offset)
+        s_id = s_dut.droid.wifiAwareAttach(False, None, use_id)
+        autils.wait_for_event(
+            s_dut, aconsts.EVENT_CB_ON_ATTACHED
+            if not use_id else autils.decorate_event(
+                aconsts.EVENT_CB_ON_ATTACHED, s_id))
 
-    # Publisher: start publish and wait for confirmation
-    p_disc_id = p_dut.droid.wifiAwarePublish(p_id,
-                                             self.create_config(
-                                                 True, extra_diff=extra_diff),
-                                             use_id)
-    autils.wait_for_event(p_dut, aconsts.SESSION_CB_ON_PUBLISH_STARTED
-                          if not use_id else autils.decorate_event(
-                              aconsts.SESSION_CB_ON_PUBLISH_STARTED, p_disc_id))
+        # Publisher: start publish and wait for confirmation
+        p_disc_id = p_dut.droid.wifiAwarePublish(
+            p_id, self.create_config(True, extra_diff=extra_diff), use_id)
+        autils.wait_for_event(
+            p_dut, aconsts.SESSION_CB_ON_PUBLISH_STARTED
+            if not use_id else autils.decorate_event(
+                aconsts.SESSION_CB_ON_PUBLISH_STARTED, p_disc_id))
 
-    # Subscriber: start subscribe and wait for confirmation
-    s_disc_id = s_dut.droid.wifiAwareSubscribe(
-        s_id, self.create_config(False, extra_diff=extra_diff), use_id)
-    autils.wait_for_event(s_dut, aconsts.SESSION_CB_ON_SUBSCRIBE_STARTED
-                          if not use_id else autils.decorate_event(
-                              aconsts.SESSION_CB_ON_SUBSCRIBE_STARTED,
-                              s_disc_id))
+        # Subscriber: start subscribe and wait for confirmation
+        s_disc_id = s_dut.droid.wifiAwareSubscribe(
+            s_id, self.create_config(False, extra_diff=extra_diff), use_id)
+        autils.wait_for_event(
+            s_dut, aconsts.SESSION_CB_ON_SUBSCRIBE_STARTED
+            if not use_id else autils.decorate_event(
+                aconsts.SESSION_CB_ON_SUBSCRIBE_STARTED, s_disc_id))
 
-    # Subscriber: wait for service discovery
-    discovery_event = autils.wait_for_event(
-        s_dut, aconsts.SESSION_CB_ON_SERVICE_DISCOVERED
-        if not use_id else autils.decorate_event(
-            aconsts.SESSION_CB_ON_SERVICE_DISCOVERED, s_disc_id))
-    peer_id_on_sub = discovery_event["data"][aconsts.SESSION_CB_KEY_PEER_ID]
+        # Subscriber: wait for service discovery
+        discovery_event = autils.wait_for_event(
+            s_dut, aconsts.SESSION_CB_ON_SERVICE_DISCOVERED
+            if not use_id else autils.decorate_event(
+                aconsts.SESSION_CB_ON_SERVICE_DISCOVERED, s_disc_id))
+        peer_id_on_sub = discovery_event["data"][
+            aconsts.SESSION_CB_KEY_PEER_ID]
 
-    return {
-        "p_dut": p_dut,
-        "s_dut": s_dut,
-        "p_id": p_id,
-        "s_id": s_id,
-        "p_disc_id": p_disc_id,
-        "s_disc_id": s_disc_id,
-        "peer_id_on_sub": peer_id_on_sub
-    }
+        return {
+            "p_dut": p_dut,
+            "s_dut": s_dut,
+            "p_id": p_id,
+            "s_id": s_id,
+            "p_disc_id": p_disc_id,
+            "s_disc_id": s_disc_id,
+            "peer_id_on_sub": peer_id_on_sub
+        }
 
-  def run_message_no_queue(self, payload_size):
-    """Validate L2 message exchange between publisher & subscriber with no
+    def run_message_no_queue(self, payload_size):
+        """Validate L2 message exchange between publisher & subscriber with no
     queueing - i.e. wait for an ACK on each message before sending the next
     message.
 
     Args:
       payload_size: min, typical, or max (PAYLOAD_SIZE_xx).
     """
-    discovery_info = self.prep_message_exchange()
-    p_dut = discovery_info["p_dut"]
-    s_dut = discovery_info["s_dut"]
-    p_disc_id = discovery_info["p_disc_id"]
-    s_disc_id = discovery_info["s_disc_id"]
-    peer_id_on_sub = discovery_info["peer_id_on_sub"]
+        discovery_info = self.prep_message_exchange()
+        p_dut = discovery_info["p_dut"]
+        s_dut = discovery_info["s_dut"]
+        p_disc_id = discovery_info["p_disc_id"]
+        s_disc_id = discovery_info["s_disc_id"]
+        peer_id_on_sub = discovery_info["peer_id_on_sub"]
 
-    for i in range(self.NUM_MSGS_NO_QUEUE):
-      msg = self.create_msg(s_dut.aware_capabilities, payload_size, i)
-      msg_id = self.get_next_msg_id()
-      s_dut.droid.wifiAwareSendMessage(s_disc_id, peer_id_on_sub, msg_id, msg,
-                                       0)
-      tx_event = autils.wait_for_event(s_dut,
-                                       aconsts.SESSION_CB_ON_MESSAGE_SENT)
-      rx_event = autils.wait_for_event(p_dut,
-                                       aconsts.SESSION_CB_ON_MESSAGE_RECEIVED)
-      asserts.assert_equal(msg_id,
-                           tx_event["data"][aconsts.SESSION_CB_KEY_MESSAGE_ID],
-                           "Subscriber -> Publisher message ID corrupted")
-      autils.assert_equal_strings(
-          msg, rx_event["data"][aconsts.SESSION_CB_KEY_MESSAGE_AS_STRING],
-          "Subscriber -> Publisher message %d corrupted" % i)
+        for i in range(self.NUM_MSGS_NO_QUEUE):
+            msg = self.create_msg(s_dut.aware_capabilities, payload_size, i)
+            msg_id = self.get_next_msg_id()
+            s_dut.droid.wifiAwareSendMessage(s_disc_id, peer_id_on_sub, msg_id,
+                                             msg, 0)
+            tx_event = autils.wait_for_event(
+                s_dut, aconsts.SESSION_CB_ON_MESSAGE_SENT)
+            rx_event = autils.wait_for_event(
+                p_dut, aconsts.SESSION_CB_ON_MESSAGE_RECEIVED)
+            asserts.assert_equal(
+                msg_id, tx_event["data"][aconsts.SESSION_CB_KEY_MESSAGE_ID],
+                "Subscriber -> Publisher message ID corrupted")
+            autils.assert_equal_strings(
+                msg,
+                rx_event["data"][aconsts.SESSION_CB_KEY_MESSAGE_AS_STRING],
+                "Subscriber -> Publisher message %d corrupted" % i)
 
-    peer_id_on_pub = rx_event["data"][aconsts.SESSION_CB_KEY_PEER_ID]
-    for i in range(self.NUM_MSGS_NO_QUEUE):
-      msg = self.create_msg(s_dut.aware_capabilities, payload_size, 1000 + i)
-      msg_id = self.get_next_msg_id()
-      p_dut.droid.wifiAwareSendMessage(p_disc_id, peer_id_on_pub, msg_id, msg,
-                                       0)
-      tx_event = autils.wait_for_event(p_dut,
-                                       aconsts.SESSION_CB_ON_MESSAGE_SENT)
-      rx_event = autils.wait_for_event(s_dut,
-                                       aconsts.SESSION_CB_ON_MESSAGE_RECEIVED)
-      asserts.assert_equal(msg_id,
-                           tx_event["data"][aconsts.SESSION_CB_KEY_MESSAGE_ID],
-                           "Publisher -> Subscriber message ID corrupted")
-      autils.assert_equal_strings(
-          msg, rx_event["data"][aconsts.SESSION_CB_KEY_MESSAGE_AS_STRING],
-          "Publisher -> Subscriber message %d corrupted" % i)
+        peer_id_on_pub = rx_event["data"][aconsts.SESSION_CB_KEY_PEER_ID]
+        for i in range(self.NUM_MSGS_NO_QUEUE):
+            msg = self.create_msg(s_dut.aware_capabilities, payload_size,
+                                  1000 + i)
+            msg_id = self.get_next_msg_id()
+            p_dut.droid.wifiAwareSendMessage(p_disc_id, peer_id_on_pub, msg_id,
+                                             msg, 0)
+            tx_event = autils.wait_for_event(
+                p_dut, aconsts.SESSION_CB_ON_MESSAGE_SENT)
+            rx_event = autils.wait_for_event(
+                s_dut, aconsts.SESSION_CB_ON_MESSAGE_RECEIVED)
+            asserts.assert_equal(
+                msg_id, tx_event["data"][aconsts.SESSION_CB_KEY_MESSAGE_ID],
+                "Publisher -> Subscriber message ID corrupted")
+            autils.assert_equal_strings(
+                msg,
+                rx_event["data"][aconsts.SESSION_CB_KEY_MESSAGE_AS_STRING],
+                "Publisher -> Subscriber message %d corrupted" % i)
 
-    # verify there are no more events
-    time.sleep(autils.EVENT_TIMEOUT)
-    autils.verify_no_more_events(p_dut, timeout=0)
-    autils.verify_no_more_events(s_dut, timeout=0)
+        # verify there are no more events
+        time.sleep(autils.EVENT_TIMEOUT)
+        autils.verify_no_more_events(p_dut, timeout=0)
+        autils.verify_no_more_events(s_dut, timeout=0)
 
-  def wait_for_messages(self, tx_msgs, tx_msg_ids, tx_disc_id, rx_disc_id,
-                        tx_dut, rx_dut, are_msgs_empty=False):
-    """Validate that all expected messages are transmitted correctly and
+    def wait_for_messages(self,
+                          tx_msgs,
+                          tx_msg_ids,
+                          tx_disc_id,
+                          rx_disc_id,
+                          tx_dut,
+                          rx_dut,
+                          are_msgs_empty=False):
+        """Validate that all expected messages are transmitted correctly and
     received as expected. Method is called after the messages are sent into
     the transmission queue.
 
@@ -216,104 +229,105 @@
 
     Returns: the peer ID from any of the received messages
     """
-    # peer id on receiver
-    peer_id_on_rx = None
+        # peer id on receiver
+        peer_id_on_rx = None
 
-    # wait for all messages to be transmitted
-    still_to_be_tx = len(tx_msg_ids)
-    while still_to_be_tx != 0:
-      tx_event = autils.wait_for_event(
-          tx_dut, aconsts.SESSION_CB_ON_MESSAGE_SENT
-          if tx_disc_id is None else autils.decorate_event(
-              aconsts.SESSION_CB_ON_MESSAGE_SENT, tx_disc_id))
-      tx_msg_id = tx_event["data"][aconsts.SESSION_CB_KEY_MESSAGE_ID]
-      tx_msg_ids[tx_msg_id] = tx_msg_ids[tx_msg_id] + 1
-      if tx_msg_ids[tx_msg_id] == 1:
-        still_to_be_tx = still_to_be_tx - 1
+        # wait for all messages to be transmitted
+        still_to_be_tx = len(tx_msg_ids)
+        while still_to_be_tx != 0:
+            tx_event = autils.wait_for_event(
+                tx_dut, aconsts.SESSION_CB_ON_MESSAGE_SENT
+                if tx_disc_id is None else autils.decorate_event(
+                    aconsts.SESSION_CB_ON_MESSAGE_SENT, tx_disc_id))
+            tx_msg_id = tx_event["data"][aconsts.SESSION_CB_KEY_MESSAGE_ID]
+            tx_msg_ids[tx_msg_id] = tx_msg_ids[tx_msg_id] + 1
+            if tx_msg_ids[tx_msg_id] == 1:
+                still_to_be_tx = still_to_be_tx - 1
 
-    # check for any duplicate transmit notifications
-    asserts.assert_equal(
-        len(tx_msg_ids),
-        sum(tx_msg_ids.values()),
-        "Duplicate transmit message IDs: %s" % tx_msg_ids)
+        # check for any duplicate transmit notifications
+        asserts.assert_equal(
+            len(tx_msg_ids), sum(tx_msg_ids.values()),
+            "Duplicate transmit message IDs: %s" % tx_msg_ids)
 
-    # wait for all messages to be received
-    still_to_be_rx = len(tx_msg_ids)
-    while still_to_be_rx != 0:
-      rx_event = autils.wait_for_event(
-          rx_dut, aconsts.SESSION_CB_ON_MESSAGE_RECEIVED
-          if rx_disc_id is None else autils.decorate_event(
-              aconsts.SESSION_CB_ON_MESSAGE_RECEIVED, rx_disc_id))
-      peer_id_on_rx = rx_event["data"][aconsts.SESSION_CB_KEY_PEER_ID]
-      if are_msgs_empty:
-        still_to_be_rx = still_to_be_rx - 1
-      else:
-        rx_msg = rx_event["data"][aconsts.SESSION_CB_KEY_MESSAGE_AS_STRING]
-        asserts.assert_true(
-            rx_msg in tx_msgs,
-            "Received a message we did not send!? -- '%s'" % rx_msg)
-        tx_msgs[rx_msg] = tx_msgs[rx_msg] + 1
-        if tx_msgs[rx_msg] == 1:
-          still_to_be_rx = still_to_be_rx - 1
+        # wait for all messages to be received
+        still_to_be_rx = len(tx_msg_ids)
+        while still_to_be_rx != 0:
+            rx_event = autils.wait_for_event(
+                rx_dut, aconsts.SESSION_CB_ON_MESSAGE_RECEIVED
+                if rx_disc_id is None else autils.decorate_event(
+                    aconsts.SESSION_CB_ON_MESSAGE_RECEIVED, rx_disc_id))
+            peer_id_on_rx = rx_event["data"][aconsts.SESSION_CB_KEY_PEER_ID]
+            if are_msgs_empty:
+                still_to_be_rx = still_to_be_rx - 1
+            else:
+                rx_msg = rx_event["data"][
+                    aconsts.SESSION_CB_KEY_MESSAGE_AS_STRING]
+                asserts.assert_true(
+                    rx_msg in tx_msgs,
+                    "Received a message we did not send!? -- '%s'" % rx_msg)
+                tx_msgs[rx_msg] = tx_msgs[rx_msg] + 1
+                if tx_msgs[rx_msg] == 1:
+                    still_to_be_rx = still_to_be_rx - 1
 
-    # check for any duplicate received messages
-    if not are_msgs_empty:
-      asserts.assert_equal(
-          len(tx_msgs),
-          sum(tx_msgs.values()), "Duplicate transmit messages: %s" % tx_msgs)
+        # check for any duplicate received messages
+        if not are_msgs_empty:
+            asserts.assert_equal(
+                len(tx_msgs), sum(tx_msgs.values()),
+                "Duplicate transmit messages: %s" % tx_msgs)
 
-    return peer_id_on_rx
+        return peer_id_on_rx
 
-  def run_message_with_queue(self, payload_size):
-    """Validate L2 message exchange between publisher & subscriber with
+    def run_message_with_queue(self, payload_size):
+        """Validate L2 message exchange between publisher & subscriber with
     queueing - i.e. transmit all messages and then wait for ACKs.
 
     Args:
       payload_size: min, typical, or max (PAYLOAD_SIZE_xx).
     """
-    discovery_info = self.prep_message_exchange()
-    p_dut = discovery_info["p_dut"]
-    s_dut = discovery_info["s_dut"]
-    p_disc_id = discovery_info["p_disc_id"]
-    s_disc_id = discovery_info["s_disc_id"]
-    peer_id_on_sub = discovery_info["peer_id_on_sub"]
+        discovery_info = self.prep_message_exchange()
+        p_dut = discovery_info["p_dut"]
+        s_dut = discovery_info["s_dut"]
+        p_disc_id = discovery_info["p_disc_id"]
+        s_disc_id = discovery_info["s_disc_id"]
+        peer_id_on_sub = discovery_info["peer_id_on_sub"]
 
-    msgs = {}
-    msg_ids = {}
-    for i in range(
-        self.NUM_MSGS_QUEUE_DEPTH_MULT *
-        s_dut.aware_capabilities[aconsts.CAP_MAX_QUEUED_TRANSMIT_MESSAGES]):
-      msg = self.create_msg(s_dut.aware_capabilities, payload_size, i)
-      msg_id = self.get_next_msg_id()
-      msgs[msg] = 0
-      msg_ids[msg_id] = 0
-      s_dut.droid.wifiAwareSendMessage(s_disc_id, peer_id_on_sub, msg_id, msg,
-                                       0)
-    peer_id_on_pub = self.wait_for_messages(
-        msgs, msg_ids, None, None, s_dut, p_dut,
-        payload_size == self.PAYLOAD_SIZE_MIN)
+        msgs = {}
+        msg_ids = {}
+        for i in range(
+                self.NUM_MSGS_QUEUE_DEPTH_MULT * s_dut.
+                aware_capabilities[aconsts.CAP_MAX_QUEUED_TRANSMIT_MESSAGES]):
+            msg = self.create_msg(s_dut.aware_capabilities, payload_size, i)
+            msg_id = self.get_next_msg_id()
+            msgs[msg] = 0
+            msg_ids[msg_id] = 0
+            s_dut.droid.wifiAwareSendMessage(s_disc_id, peer_id_on_sub, msg_id,
+                                             msg, 0)
+        peer_id_on_pub = self.wait_for_messages(
+            msgs, msg_ids, None, None, s_dut, p_dut,
+            payload_size == self.PAYLOAD_SIZE_MIN)
 
-    msgs = {}
-    msg_ids = {}
-    for i in range(
-            self.NUM_MSGS_QUEUE_DEPTH_MULT *
-            p_dut.aware_capabilities[aconsts.CAP_MAX_QUEUED_TRANSMIT_MESSAGES]):
-      msg = self.create_msg(p_dut.aware_capabilities, payload_size, 1000 + i)
-      msg_id = self.get_next_msg_id()
-      msgs[msg] = 0
-      msg_ids[msg_id] = 0
-      p_dut.droid.wifiAwareSendMessage(p_disc_id, peer_id_on_pub, msg_id, msg,
-                                       0)
-    self.wait_for_messages(msgs, msg_ids, None, None, p_dut, s_dut,
-                           payload_size == self.PAYLOAD_SIZE_MIN)
+        msgs = {}
+        msg_ids = {}
+        for i in range(
+                self.NUM_MSGS_QUEUE_DEPTH_MULT * p_dut.
+                aware_capabilities[aconsts.CAP_MAX_QUEUED_TRANSMIT_MESSAGES]):
+            msg = self.create_msg(p_dut.aware_capabilities, payload_size,
+                                  1000 + i)
+            msg_id = self.get_next_msg_id()
+            msgs[msg] = 0
+            msg_ids[msg_id] = 0
+            p_dut.droid.wifiAwareSendMessage(p_disc_id, peer_id_on_pub, msg_id,
+                                             msg, 0)
+        self.wait_for_messages(msgs, msg_ids, None, None, p_dut, s_dut,
+                               payload_size == self.PAYLOAD_SIZE_MIN)
 
-    # verify there are no more events
-    time.sleep(autils.EVENT_TIMEOUT)
-    autils.verify_no_more_events(p_dut, timeout=0)
-    autils.verify_no_more_events(s_dut, timeout=0)
+        # verify there are no more events
+        time.sleep(autils.EVENT_TIMEOUT)
+        autils.verify_no_more_events(p_dut, timeout=0)
+        autils.verify_no_more_events(s_dut, timeout=0)
 
-  def run_message_multi_session_with_queue(self, payload_size):
-    """Validate L2 message exchange between publishers & subscribers with
+    def run_message_multi_session_with_queue(self, payload_size):
+        """Validate L2 message exchange between publishers & subscribers with
     queueing - i.e. transmit all messages and then wait for ACKs. Uses 2
     discovery sessions running concurrently and validates that messages
     arrive at the correct destination.
@@ -321,125 +335,128 @@
     Args:
       payload_size: min, typical, or max (PAYLOAD_SIZE_xx)
     """
-    discovery_info1 = self.prep_message_exchange(extra_diff="-111")
-    p_dut = discovery_info1["p_dut"] # same for both sessions
-    s_dut = discovery_info1["s_dut"] # same for both sessions
-    p_disc_id1 = discovery_info1["p_disc_id"]
-    s_disc_id1 = discovery_info1["s_disc_id"]
-    peer_id_on_sub1 = discovery_info1["peer_id_on_sub"]
+        discovery_info1 = self.prep_message_exchange(extra_diff="-111")
+        p_dut = discovery_info1["p_dut"]  # same for both sessions
+        s_dut = discovery_info1["s_dut"]  # same for both sessions
+        p_disc_id1 = discovery_info1["p_disc_id"]
+        s_disc_id1 = discovery_info1["s_disc_id"]
+        peer_id_on_sub1 = discovery_info1["peer_id_on_sub"]
 
-    discovery_info2 = self.prep_message_exchange(extra_diff="-222")
-    p_disc_id2 = discovery_info2["p_disc_id"]
-    s_disc_id2 = discovery_info2["s_disc_id"]
-    peer_id_on_sub2 = discovery_info2["peer_id_on_sub"]
+        discovery_info2 = self.prep_message_exchange(extra_diff="-222")
+        p_disc_id2 = discovery_info2["p_disc_id"]
+        s_disc_id2 = discovery_info2["s_disc_id"]
+        peer_id_on_sub2 = discovery_info2["peer_id_on_sub"]
 
-    msgs1 = {}
-    msg_ids1 = {}
-    msgs2 = {}
-    msg_ids2 = {}
-    for i in range(
-            self.NUM_MSGS_QUEUE_DEPTH_MULT *
-            s_dut.aware_capabilities[aconsts.CAP_MAX_QUEUED_TRANSMIT_MESSAGES]):
-      msg1 = self.create_msg(s_dut.aware_capabilities, payload_size, i)
-      msg_id1 = self.get_next_msg_id()
-      msgs1[msg1] = 0
-      msg_ids1[msg_id1] = 0
-      s_dut.droid.wifiAwareSendMessage(s_disc_id1, peer_id_on_sub1, msg_id1,
-                                       msg1, 0)
-      msg2 = self.create_msg(s_dut.aware_capabilities, payload_size, 100 + i)
-      msg_id2 = self.get_next_msg_id()
-      msgs2[msg2] = 0
-      msg_ids2[msg_id2] = 0
-      s_dut.droid.wifiAwareSendMessage(s_disc_id2, peer_id_on_sub2, msg_id2,
-                                       msg2, 0)
+        msgs1 = {}
+        msg_ids1 = {}
+        msgs2 = {}
+        msg_ids2 = {}
+        for i in range(
+                self.NUM_MSGS_QUEUE_DEPTH_MULT * s_dut.
+                aware_capabilities[aconsts.CAP_MAX_QUEUED_TRANSMIT_MESSAGES]):
+            msg1 = self.create_msg(s_dut.aware_capabilities, payload_size, i)
+            msg_id1 = self.get_next_msg_id()
+            msgs1[msg1] = 0
+            msg_ids1[msg_id1] = 0
+            s_dut.droid.wifiAwareSendMessage(s_disc_id1, peer_id_on_sub1,
+                                             msg_id1, msg1, 0)
+            msg2 = self.create_msg(s_dut.aware_capabilities, payload_size,
+                                   100 + i)
+            msg_id2 = self.get_next_msg_id()
+            msgs2[msg2] = 0
+            msg_ids2[msg_id2] = 0
+            s_dut.droid.wifiAwareSendMessage(s_disc_id2, peer_id_on_sub2,
+                                             msg_id2, msg2, 0)
 
-    peer_id_on_pub1 = self.wait_for_messages(
-        msgs1, msg_ids1, s_disc_id1, p_disc_id1, s_dut, p_dut,
-        payload_size == self.PAYLOAD_SIZE_MIN)
-    peer_id_on_pub2 = self.wait_for_messages(
-        msgs2, msg_ids2, s_disc_id2, p_disc_id2, s_dut, p_dut,
-        payload_size == self.PAYLOAD_SIZE_MIN)
+        peer_id_on_pub1 = self.wait_for_messages(
+            msgs1, msg_ids1, s_disc_id1, p_disc_id1, s_dut, p_dut,
+            payload_size == self.PAYLOAD_SIZE_MIN)
+        peer_id_on_pub2 = self.wait_for_messages(
+            msgs2, msg_ids2, s_disc_id2, p_disc_id2, s_dut, p_dut,
+            payload_size == self.PAYLOAD_SIZE_MIN)
 
-    msgs1 = {}
-    msg_ids1 = {}
-    msgs2 = {}
-    msg_ids2 = {}
-    for i in range(
-            self.NUM_MSGS_QUEUE_DEPTH_MULT *
-            p_dut.aware_capabilities[aconsts.CAP_MAX_QUEUED_TRANSMIT_MESSAGES]):
-      msg1 = self.create_msg(p_dut.aware_capabilities, payload_size, 1000 + i)
-      msg_id1 = self.get_next_msg_id()
-      msgs1[msg1] = 0
-      msg_ids1[msg_id1] = 0
-      p_dut.droid.wifiAwareSendMessage(p_disc_id1, peer_id_on_pub1, msg_id1,
-                                       msg1, 0)
-      msg2 = self.create_msg(p_dut.aware_capabilities, payload_size, 1100 + i)
-      msg_id2 = self.get_next_msg_id()
-      msgs2[msg2] = 0
-      msg_ids2[msg_id2] = 0
-      p_dut.droid.wifiAwareSendMessage(p_disc_id2, peer_id_on_pub2, msg_id2,
-                                       msg2, 0)
+        msgs1 = {}
+        msg_ids1 = {}
+        msgs2 = {}
+        msg_ids2 = {}
+        for i in range(
+                self.NUM_MSGS_QUEUE_DEPTH_MULT * p_dut.
+                aware_capabilities[aconsts.CAP_MAX_QUEUED_TRANSMIT_MESSAGES]):
+            msg1 = self.create_msg(p_dut.aware_capabilities, payload_size,
+                                   1000 + i)
+            msg_id1 = self.get_next_msg_id()
+            msgs1[msg1] = 0
+            msg_ids1[msg_id1] = 0
+            p_dut.droid.wifiAwareSendMessage(p_disc_id1, peer_id_on_pub1,
+                                             msg_id1, msg1, 0)
+            msg2 = self.create_msg(p_dut.aware_capabilities, payload_size,
+                                   1100 + i)
+            msg_id2 = self.get_next_msg_id()
+            msgs2[msg2] = 0
+            msg_ids2[msg_id2] = 0
+            p_dut.droid.wifiAwareSendMessage(p_disc_id2, peer_id_on_pub2,
+                                             msg_id2, msg2, 0)
 
-    self.wait_for_messages(msgs1, msg_ids1, p_disc_id1, s_disc_id1, p_dut,
-                           s_dut, payload_size == self.PAYLOAD_SIZE_MIN)
-    self.wait_for_messages(msgs2, msg_ids2, p_disc_id2, s_disc_id2, p_dut,
-                           s_dut, payload_size == self.PAYLOAD_SIZE_MIN)
+        self.wait_for_messages(msgs1, msg_ids1, p_disc_id1, s_disc_id1, p_dut,
+                               s_dut, payload_size == self.PAYLOAD_SIZE_MIN)
+        self.wait_for_messages(msgs2, msg_ids2, p_disc_id2, s_disc_id2, p_dut,
+                               s_dut, payload_size == self.PAYLOAD_SIZE_MIN)
 
-    # verify there are no more events
-    time.sleep(autils.EVENT_TIMEOUT)
-    autils.verify_no_more_events(p_dut, timeout=0)
-    autils.verify_no_more_events(s_dut, timeout=0)
+        # verify there are no more events
+        time.sleep(autils.EVENT_TIMEOUT)
+        autils.verify_no_more_events(p_dut, timeout=0)
+        autils.verify_no_more_events(s_dut, timeout=0)
 
-  ############################################################################
+    ############################################################################
 
-  @test_tracker_info(uuid="a8cd0512-b279-425f-93cf-949ddba22c7a")
-  def test_message_no_queue_min(self):
-    """Functional / Message / No queue
+    @test_tracker_info(uuid="a8cd0512-b279-425f-93cf-949ddba22c7a")
+    def test_message_no_queue_min(self):
+        """Functional / Message / No queue
     - Minimal payload size (None or "")
     """
-    self.run_message_no_queue(self.PAYLOAD_SIZE_MIN)
+        self.run_message_no_queue(self.PAYLOAD_SIZE_MIN)
 
-  @test_tracker_info(uuid="2c26170a-5d0a-4cf4-b0b9-56ef03f5dcf4")
-  def test_message_no_queue_typical(self):
-    """Functional / Message / No queue
+    @test_tracker_info(uuid="2c26170a-5d0a-4cf4-b0b9-56ef03f5dcf4")
+    def test_message_no_queue_typical(self):
+        """Functional / Message / No queue
     - Typical payload size
     """
-    self.run_message_no_queue(self.PAYLOAD_SIZE_TYPICAL)
+        self.run_message_no_queue(self.PAYLOAD_SIZE_TYPICAL)
 
-  @test_tracker_info(uuid="c984860c-b62d-4d9b-8bce-4d894ea3bfbe")
-  def test_message_no_queue_max(self):
-    """Functional / Message / No queue
+    @test_tracker_info(uuid="c984860c-b62d-4d9b-8bce-4d894ea3bfbe")
+    def test_message_no_queue_max(self):
+        """Functional / Message / No queue
     - Max payload size (based on device capabilities)
     """
-    self.run_message_no_queue(self.PAYLOAD_SIZE_MAX)
+        self.run_message_no_queue(self.PAYLOAD_SIZE_MAX)
 
-  @test_tracker_info(uuid="3f06de73-31ab-4e0c-bc6f-59abdaf87f4f")
-  def test_message_with_queue_min(self):
-    """Functional / Message / With queue
+    @test_tracker_info(uuid="3f06de73-31ab-4e0c-bc6f-59abdaf87f4f")
+    def test_message_with_queue_min(self):
+        """Functional / Message / With queue
     - Minimal payload size (none or "")
     """
-    self.run_message_with_queue(self.PAYLOAD_SIZE_MIN)
+        self.run_message_with_queue(self.PAYLOAD_SIZE_MIN)
 
-  @test_tracker_info(uuid="9b7f5bd8-b0b1-479e-8e4b-9db0bb56767b")
-  def test_message_with_queue_typical(self):
-    """Functional / Message / With queue
+    @test_tracker_info(uuid="9b7f5bd8-b0b1-479e-8e4b-9db0bb56767b")
+    def test_message_with_queue_typical(self):
+        """Functional / Message / With queue
     - Typical payload size
     """
-    self.run_message_with_queue(self.PAYLOAD_SIZE_TYPICAL)
+        self.run_message_with_queue(self.PAYLOAD_SIZE_TYPICAL)
 
-  @test_tracker_info(uuid="4f9a6dce-3050-4e6a-a143-53592c6c7c28")
-  def test_message_with_queue_max(self):
-    """Functional / Message / With queue
+    @test_tracker_info(uuid="4f9a6dce-3050-4e6a-a143-53592c6c7c28")
+    def test_message_with_queue_max(self):
+        """Functional / Message / With queue
     - Max payload size (based on device capabilities)
     """
-    self.run_message_with_queue(self.PAYLOAD_SIZE_MAX)
+        self.run_message_with_queue(self.PAYLOAD_SIZE_MAX)
 
-  @test_tracker_info(uuid="4cece232-0983-4d6b-90a9-1bb9314b64f0")
-  def test_message_with_multiple_discovery_sessions_typical(self):
-    """Functional / Message / Multiple sessions
+    @test_tracker_info(uuid="4cece232-0983-4d6b-90a9-1bb9314b64f0")
+    def test_message_with_multiple_discovery_sessions_typical(self):
+        """Functional / Message / Multiple sessions
 
      Sets up 2 discovery sessions on 2 devices. Sends a message in each
      direction on each discovery session and verifies that reaches expected
      destination.
     """
-    self.run_message_multi_session_with_queue(self.PAYLOAD_SIZE_TYPICAL)
+        self.run_message_multi_session_with_queue(self.PAYLOAD_SIZE_TYPICAL)
diff --git a/acts/tests/google/wifi/aware/functional/NonConcurrencyTest.py b/acts/tests/google/wifi/aware/functional/NonConcurrencyTest.py
index d0b83cf..c418c69 100644
--- a/acts/tests/google/wifi/aware/functional/NonConcurrencyTest.py
+++ b/acts/tests/google/wifi/aware/functional/NonConcurrencyTest.py
@@ -22,94 +22,94 @@
 
 
 class NonConcurrencyTest(AwareBaseTest):
-  """Tests lack of concurrency scenarios Wi-Fi Aware with WFD (p2p) and
+    """Tests lack of concurrency scenarios Wi-Fi Aware with WFD (p2p) and
   SoftAP
 
   Note: these tests should be modified if the concurrency behavior changes!"""
 
-  SERVICE_NAME = "GoogleTestXYZ"
-  TETHER_SSID = "GoogleTestSoftApXYZ"
+    SERVICE_NAME = "GoogleTestXYZ"
+    TETHER_SSID = "GoogleTestSoftApXYZ"
 
-  def __init__(self, controllers):
-    AwareBaseTest.__init__(self, controllers)
+    def __init__(self, controllers):
+        AwareBaseTest.__init__(self, controllers)
 
-  def teardown_test(self):
-    AwareBaseTest.teardown_test(self)
-    for ad in self.android_devices:
-      ad.droid.wifiP2pClose()
+    def teardown_test(self):
+        AwareBaseTest.teardown_test(self)
+        for ad in self.android_devices:
+            ad.droid.wifiP2pClose()
 
-  def run_aware_then_incompat_service(self, is_p2p):
-    """Run test to validate that a running Aware session terminates when an
+    def run_aware_then_incompat_service(self, is_p2p):
+        """Run test to validate that a running Aware session terminates when an
     Aware-incompatible service is started.
 
     Args:
       is_p2p: True for p2p, False for SoftAP
     """
-    dut = self.android_devices[0]
+        dut = self.android_devices[0]
 
-    # start Aware
-    id = dut.droid.wifiAwareAttach()
-    autils.wait_for_event(dut, aconsts.EVENT_CB_ON_ATTACHED)
+        # start Aware
+        id = dut.droid.wifiAwareAttach()
+        autils.wait_for_event(dut, aconsts.EVENT_CB_ON_ATTACHED)
 
-    # start other service
-    if is_p2p:
-      dut.droid.wifiP2pInitialize()
-    else:
-      wutils.start_wifi_tethering(dut, self.TETHER_SSID, password=None)
+        # start other service
+        if is_p2p:
+            dut.droid.wifiP2pInitialize()
+        else:
+            wutils.start_wifi_tethering(dut, self.TETHER_SSID, password=None)
 
-    # expect an announcement about Aware non-availability
-    autils.wait_for_event(dut, aconsts.BROADCAST_WIFI_AWARE_NOT_AVAILABLE)
+        # expect an announcement about Aware non-availability
+        autils.wait_for_event(dut, aconsts.BROADCAST_WIFI_AWARE_NOT_AVAILABLE)
 
-    # local clean-up
-    if not is_p2p:
-      wutils.stop_wifi_tethering(dut)
+        # local clean-up
+        if not is_p2p:
+            wutils.stop_wifi_tethering(dut)
 
-  def run_incompat_service_then_aware(self, is_p2p):
-    """Validate that if an Aware-incompatible service is already up then any
+    def run_incompat_service_then_aware(self, is_p2p):
+        """Validate that if an Aware-incompatible service is already up then any
     Aware operation fails"""
-    dut = self.android_devices[0]
+        dut = self.android_devices[0]
 
-    # start other service
-    if is_p2p:
-      dut.droid.wifiP2pInitialize()
-    else:
-      wutils.start_wifi_tethering(dut, self.TETHER_SSID, password=None)
+        # start other service
+        if is_p2p:
+            dut.droid.wifiP2pInitialize()
+        else:
+            wutils.start_wifi_tethering(dut, self.TETHER_SSID, password=None)
 
-    # expect an announcement about Aware non-availability
-    autils.wait_for_event(dut, aconsts.BROADCAST_WIFI_AWARE_NOT_AVAILABLE)
+        # expect an announcement about Aware non-availability
+        autils.wait_for_event(dut, aconsts.BROADCAST_WIFI_AWARE_NOT_AVAILABLE)
 
-    # try starting anyway (expect failure)
-    dut.droid.wifiAwareAttach()
-    autils.wait_for_event(dut, aconsts.EVENT_CB_ON_ATTACH_FAILED)
+        # try starting anyway (expect failure)
+        dut.droid.wifiAwareAttach()
+        autils.wait_for_event(dut, aconsts.EVENT_CB_ON_ATTACH_FAILED)
 
-    # stop other service
-    if is_p2p:
-      dut.droid.wifiP2pClose()
-    else:
-      wutils.stop_wifi_tethering(dut)
+        # stop other service
+        if is_p2p:
+            dut.droid.wifiP2pClose()
+        else:
+            wutils.stop_wifi_tethering(dut)
 
-    # expect an announcement about Aware availability
-    autils.wait_for_event(dut, aconsts.BROADCAST_WIFI_AWARE_AVAILABLE)
+        # expect an announcement about Aware availability
+        autils.wait_for_event(dut, aconsts.BROADCAST_WIFI_AWARE_AVAILABLE)
 
-    # try starting Aware
-    dut.droid.wifiAwareAttach()
-    autils.wait_for_event(dut, aconsts.EVENT_CB_ON_ATTACHED)
+        # try starting Aware
+        dut.droid.wifiAwareAttach()
+        autils.wait_for_event(dut, aconsts.EVENT_CB_ON_ATTACHED)
 
-  ##########################################################################
+    ##########################################################################
 
-  def test_run_p2p_then_aware(self):
-    """Validate that if p2p is already up then any Aware operation fails"""
-    self.run_incompat_service_then_aware(is_p2p=True)
+    def test_run_p2p_then_aware(self):
+        """Validate that if p2p is already up then any Aware operation fails"""
+        self.run_incompat_service_then_aware(is_p2p=True)
 
-  def test_run_aware_then_p2p(self):
-    """Validate that a running Aware session terminates when p2p is started"""
-    self.run_aware_then_incompat_service(is_p2p=True)
+    def test_run_aware_then_p2p(self):
+        """Validate that a running Aware session terminates when p2p is started"""
+        self.run_aware_then_incompat_service(is_p2p=True)
 
-  def test_run_softap_then_aware(self):
-    """Validate that if SoftAp is already up then any Aware operation fails"""
-    self.run_incompat_service_then_aware(is_p2p=False)
+    def test_run_softap_then_aware(self):
+        """Validate that if SoftAp is already up then any Aware operation fails"""
+        self.run_incompat_service_then_aware(is_p2p=False)
 
-  def test_run_aware_then_softap(self):
-    """Validate that a running Aware session terminates when softAp is
+    def test_run_aware_then_softap(self):
+        """Validate that a running Aware session terminates when softAp is
     started"""
-    self.run_aware_then_incompat_service(is_p2p=False)
+        self.run_aware_then_incompat_service(is_p2p=False)
diff --git a/acts/tests/google/wifi/aware/functional/ProtocolsTest.py b/acts/tests/google/wifi/aware/functional/ProtocolsTest.py
index 97a61b6..2e26d22 100644
--- a/acts/tests/google/wifi/aware/functional/ProtocolsTest.py
+++ b/acts/tests/google/wifi/aware/functional/ProtocolsTest.py
@@ -23,233 +23,243 @@
 
 
 class ProtocolsTest(AwareBaseTest):
-  """Set of tests for Wi-Fi Aware data-paths: validating protocols running on
+    """Set of tests for Wi-Fi Aware data-paths: validating protocols running on
   top of a data-path"""
 
-  SERVICE_NAME = "GoogleTestServiceXY"
+    SERVICE_NAME = "GoogleTestServiceXY"
 
-  def __init__(self, controllers):
-    AwareBaseTest.__init__(self, controllers)
+    def __init__(self, controllers):
+        AwareBaseTest.__init__(self, controllers)
 
-  def run_ping6(self, dut, peer_ipv6, dut_if):
-    """Run a ping6 over the specified device/link
+    def run_ping6(self, dut, peer_ipv6, dut_if):
+        """Run a ping6 over the specified device/link
 
     Args:
       dut: Device on which to execute ping6
       peer_ipv6: IPv6 address of the peer to ping
       dut_if: interface name on the dut
     """
-    cmd = "ping6 -c 3 -W 5 %s%%%s" % (peer_ipv6, dut_if)
-    results = dut.adb.shell(cmd)
-    self.log.info("cmd='%s' -> '%s'", cmd, results)
-    if results == "":
-      asserts.fail("ping6 empty results - seems like a failure")
+        cmd = "ping6 -c 3 -W 5 %s%%%s" % (peer_ipv6, dut_if)
+        results = dut.adb.shell(cmd)
+        self.log.info("cmd='%s' -> '%s'", cmd, results)
+        if results == "":
+            asserts.fail("ping6 empty results - seems like a failure")
 
-  ########################################################################
+    ########################################################################
 
-  @test_tracker_info(uuid="ce103067-7fdd-4379-9a2b-d238959f1d53")
-  def test_ping6_oob(self):
-    """Validate that ping6 works correctly on an NDP created using OOB (out-of
+    @test_tracker_info(uuid="ce103067-7fdd-4379-9a2b-d238959f1d53")
+    def test_ping6_oob(self):
+        """Validate that ping6 works correctly on an NDP created using OOB (out-of
     band) discovery"""
-    init_dut = self.android_devices[0]
-    resp_dut = self.android_devices[1]
+        init_dut = self.android_devices[0]
+        resp_dut = self.android_devices[1]
 
-    # create NDP
-    (init_req_key, resp_req_key, init_aware_if, resp_aware_if, init_ipv6,
-     resp_ipv6) = autils.create_oob_ndp(init_dut, resp_dut)
-    self.log.info("Interface names: I=%s, R=%s", init_aware_if, resp_aware_if)
-    self.log.info("Interface addresses (IPv6): I=%s, R=%s", init_ipv6,
-                  resp_ipv6)
+        # create NDP
+        (init_req_key, resp_req_key, init_aware_if, resp_aware_if, init_ipv6,
+         resp_ipv6) = autils.create_oob_ndp(init_dut, resp_dut)
+        self.log.info("Interface names: I=%s, R=%s", init_aware_if,
+                      resp_aware_if)
+        self.log.info("Interface addresses (IPv6): I=%s, R=%s", init_ipv6,
+                      resp_ipv6)
 
-    # run ping6
-    self.run_ping6(init_dut, resp_ipv6, init_aware_if)
-    self.run_ping6(resp_dut, init_ipv6, resp_aware_if)
+        # run ping6
+        self.run_ping6(init_dut, resp_ipv6, init_aware_if)
+        self.run_ping6(resp_dut, init_ipv6, resp_aware_if)
 
-    # clean-up
-    resp_dut.droid.connectivityUnregisterNetworkCallback(resp_req_key)
-    init_dut.droid.connectivityUnregisterNetworkCallback(init_req_key)
+        # clean-up
+        resp_dut.droid.connectivityUnregisterNetworkCallback(resp_req_key)
+        init_dut.droid.connectivityUnregisterNetworkCallback(init_req_key)
 
-  @test_tracker_info(uuid="fef86a48-0e05-464b-8c66-64316275c5ba")
-  def test_ping6_ib_unsolicited_passive(self):
-    """Validate that ping6 works correctly on an NDP created using Aware
+    @test_tracker_info(uuid="fef86a48-0e05-464b-8c66-64316275c5ba")
+    def test_ping6_ib_unsolicited_passive(self):
+        """Validate that ping6 works correctly on an NDP created using Aware
     discovery with UNSOLICITED/PASSIVE sessions."""
-    p_dut = self.android_devices[0]
-    s_dut = self.android_devices[1]
+        p_dut = self.android_devices[0]
+        s_dut = self.android_devices[1]
 
-    # create NDP
-    (p_req_key, s_req_key, p_aware_if, s_aware_if, p_ipv6,
-     s_ipv6) = autils.create_ib_ndp(
-         p_dut,
-         s_dut,
-         p_config=autils.create_discovery_config(
-             self.SERVICE_NAME, aconsts.PUBLISH_TYPE_UNSOLICITED),
-         s_config=autils.create_discovery_config(
-             self.SERVICE_NAME, aconsts.SUBSCRIBE_TYPE_PASSIVE),
-         device_startup_offset=self.device_startup_offset)
-    self.log.info("Interface names: P=%s, S=%s", p_aware_if, s_aware_if)
-    self.log.info("Interface addresses (IPv6): P=%s, S=%s", p_ipv6, s_ipv6)
+        # create NDP
+        (p_req_key, s_req_key, p_aware_if, s_aware_if, p_ipv6,
+         s_ipv6) = autils.create_ib_ndp(
+             p_dut,
+             s_dut,
+             p_config=autils.create_discovery_config(
+                 self.SERVICE_NAME, aconsts.PUBLISH_TYPE_UNSOLICITED),
+             s_config=autils.create_discovery_config(
+                 self.SERVICE_NAME, aconsts.SUBSCRIBE_TYPE_PASSIVE),
+             device_startup_offset=self.device_startup_offset)
+        self.log.info("Interface names: P=%s, S=%s", p_aware_if, s_aware_if)
+        self.log.info("Interface addresses (IPv6): P=%s, S=%s", p_ipv6, s_ipv6)
 
-    # run ping6
-    self.run_ping6(p_dut, s_ipv6, p_aware_if)
-    self.run_ping6(s_dut, p_ipv6, s_aware_if)
+        # run ping6
+        self.run_ping6(p_dut, s_ipv6, p_aware_if)
+        self.run_ping6(s_dut, p_ipv6, s_aware_if)
 
-    # clean-up
-    p_dut.droid.connectivityUnregisterNetworkCallback(p_req_key)
-    s_dut.droid.connectivityUnregisterNetworkCallback(s_req_key)
+        # clean-up
+        p_dut.droid.connectivityUnregisterNetworkCallback(p_req_key)
+        s_dut.droid.connectivityUnregisterNetworkCallback(s_req_key)
 
-  @test_tracker_info(uuid="5bbd68a9-994b-4c26-88cd-43388cec280b")
-  def test_ping6_ib_solicited_active(self):
-    """Validate that ping6 works correctly on an NDP created using Aware
+    @test_tracker_info(uuid="5bbd68a9-994b-4c26-88cd-43388cec280b")
+    def test_ping6_ib_solicited_active(self):
+        """Validate that ping6 works correctly on an NDP created using Aware
     discovery with SOLICITED/ACTIVE sessions."""
-    p_dut = self.android_devices[0]
-    s_dut = self.android_devices[1]
+        p_dut = self.android_devices[0]
+        s_dut = self.android_devices[1]
 
-    # create NDP
-    (p_req_key, s_req_key, p_aware_if, s_aware_if, p_ipv6,
-     s_ipv6) = autils.create_ib_ndp(
-         p_dut,
-         s_dut,
-         p_config=autils.create_discovery_config(
-             self.SERVICE_NAME, aconsts.PUBLISH_TYPE_SOLICITED),
-         s_config=autils.create_discovery_config(self.SERVICE_NAME,
-                                                 aconsts.SUBSCRIBE_TYPE_ACTIVE),
-         device_startup_offset=self.device_startup_offset)
-    self.log.info("Interface names: P=%s, S=%s", p_aware_if, s_aware_if)
-    self.log.info("Interface addresses (IPv6): P=%s, S=%s", p_ipv6, s_ipv6)
+        # create NDP
+        (p_req_key, s_req_key, p_aware_if, s_aware_if, p_ipv6,
+         s_ipv6) = autils.create_ib_ndp(
+             p_dut,
+             s_dut,
+             p_config=autils.create_discovery_config(
+                 self.SERVICE_NAME, aconsts.PUBLISH_TYPE_SOLICITED),
+             s_config=autils.create_discovery_config(
+                 self.SERVICE_NAME, aconsts.SUBSCRIBE_TYPE_ACTIVE),
+             device_startup_offset=self.device_startup_offset)
+        self.log.info("Interface names: P=%s, S=%s", p_aware_if, s_aware_if)
+        self.log.info("Interface addresses (IPv6): P=%s, S=%s", p_ipv6, s_ipv6)
 
-    # run ping6
-    self.run_ping6(p_dut, s_ipv6, p_aware_if)
-    self.run_ping6(s_dut, p_ipv6, s_aware_if)
+        # run ping6
+        self.run_ping6(p_dut, s_ipv6, p_aware_if)
+        self.run_ping6(s_dut, p_ipv6, s_aware_if)
 
-    # clean-up
-    p_dut.droid.connectivityUnregisterNetworkCallback(p_req_key)
-    s_dut.droid.connectivityUnregisterNetworkCallback(s_req_key)
+        # clean-up
+        p_dut.droid.connectivityUnregisterNetworkCallback(p_req_key)
+        s_dut.droid.connectivityUnregisterNetworkCallback(s_req_key)
 
-  def test_ping6_oob_max_ndp(self):
-    """Validate that ping6 works correctly on multiple NDPs brought up
+    def test_ping6_oob_max_ndp(self):
+        """Validate that ping6 works correctly on multiple NDPs brought up
     concurrently. Uses the capability of the device to determine the max
     number of NDPs to set up.
 
     Note: the test requires MAX_NDP + 1 devices to be validated. If these are
     not available the test will fail."""
-    dut = self.android_devices[0]
+        dut = self.android_devices[0]
 
-    # get max NDP: using first available device (assumes all devices are the
-    # same)
-    max_ndp = dut.aware_capabilities[aconsts.CAP_MAX_NDP_SESSIONS]
-    asserts.assert_true(len(self.android_devices) > max_ndp,
-                        'Needed %d devices to run the test, have %d' %
-                        (max_ndp + 1, len(self.android_devices)))
+        # get max NDP: using first available device (assumes all devices are the
+        # same)
+        max_ndp = dut.aware_capabilities[aconsts.CAP_MAX_NDP_SESSIONS]
+        asserts.assert_true(
+            len(self.android_devices) > max_ndp,
+            'Needed %d devices to run the test, have %d' %
+            (max_ndp + 1, len(self.android_devices)))
 
-    # create all NDPs
-    dut_aware_if = None
-    dut_ipv6 = None
-    peers_aware_ifs = []
-    peers_ipv6s = []
-    dut_requests = []
-    peers_requests = []
-    for i in range(max_ndp):
-      (init_req_key, resp_req_key, init_aware_if, resp_aware_if, init_ipv6,
-       resp_ipv6) = autils.create_oob_ndp(dut, self.android_devices[i + 1])
-      self.log.info("Interface names: I=%s, R=%s", init_aware_if, resp_aware_if)
-      self.log.info("Interface addresses (IPv6): I=%s, R=%s", init_ipv6,
-                    resp_ipv6)
+        # create all NDPs
+        dut_aware_if = None
+        dut_ipv6 = None
+        peers_aware_ifs = []
+        peers_ipv6s = []
+        dut_requests = []
+        peers_requests = []
+        for i in range(max_ndp):
+            (init_req_key, resp_req_key, init_aware_if, resp_aware_if,
+             init_ipv6, resp_ipv6) = autils.create_oob_ndp(
+                 dut, self.android_devices[i + 1])
+            self.log.info("Interface names: I=%s, R=%s", init_aware_if,
+                          resp_aware_if)
+            self.log.info("Interface addresses (IPv6): I=%s, R=%s", init_ipv6,
+                          resp_ipv6)
 
-      dut_requests.append(init_req_key)
-      peers_requests.append(resp_req_key)
-      if dut_aware_if is None:
-        dut_aware_if = init_aware_if
-      else:
-        asserts.assert_equal(
-            dut_aware_if, init_aware_if,
-            "DUT (Initiator) interface changed on subsequent NDPs!?")
-      if dut_ipv6 is None:
-        dut_ipv6 = init_ipv6
-      else:
-        asserts.assert_equal(
-            dut_ipv6, init_ipv6,
-            "DUT (Initiator) IPv6 changed on subsequent NDPs!?")
-      peers_aware_ifs.append(resp_aware_if)
-      peers_ipv6s.append(resp_ipv6)
+            dut_requests.append(init_req_key)
+            peers_requests.append(resp_req_key)
+            if dut_aware_if is None:
+                dut_aware_if = init_aware_if
+            else:
+                asserts.assert_equal(
+                    dut_aware_if, init_aware_if,
+                    "DUT (Initiator) interface changed on subsequent NDPs!?")
+            if dut_ipv6 is None:
+                dut_ipv6 = init_ipv6
+            else:
+                asserts.assert_equal(
+                    dut_ipv6, init_ipv6,
+                    "DUT (Initiator) IPv6 changed on subsequent NDPs!?")
+            peers_aware_ifs.append(resp_aware_if)
+            peers_ipv6s.append(resp_ipv6)
 
-    # run ping6
-    for i in range(max_ndp):
-      self.run_ping6(dut, peers_ipv6s[i], dut_aware_if)
-      self.run_ping6(self.android_devices[i + 1], dut_ipv6, peers_aware_ifs[i])
+        # run ping6
+        for i in range(max_ndp):
+            self.run_ping6(dut, peers_ipv6s[i], dut_aware_if)
+            self.run_ping6(self.android_devices[i + 1], dut_ipv6,
+                           peers_aware_ifs[i])
 
-    # cleanup
-    for i in range(max_ndp):
-      dut.droid.connectivityUnregisterNetworkCallback(dut_requests[i])
-      self.android_devices[i + 1].droid.connectivityUnregisterNetworkCallback(
-          peers_requests[i])
+        # cleanup
+        for i in range(max_ndp):
+            dut.droid.connectivityUnregisterNetworkCallback(dut_requests[i])
+            self.android_devices[
+                i + 1].droid.connectivityUnregisterNetworkCallback(
+                    peers_requests[i])
 
-  def test_nsd_oob(self):
-    """Validate that NSD (mDNS) works correctly on an NDP created using OOB
+    def test_nsd_oob(self):
+        """Validate that NSD (mDNS) works correctly on an NDP created using OOB
     (out-of band) discovery"""
-    init_dut = self.android_devices[0]
-    resp_dut = self.android_devices[1]
+        init_dut = self.android_devices[0]
+        resp_dut = self.android_devices[1]
 
-    # create NDP
-    (init_req_key, resp_req_key, init_aware_if, resp_aware_if, init_ipv6,
-     resp_ipv6) = autils.create_oob_ndp(init_dut, resp_dut)
-    self.log.info("Interface names: I=%s, R=%s", init_aware_if, resp_aware_if)
-    self.log.info("Interface addresses (IPv6): I=%s, R=%s", init_ipv6,
-                  resp_ipv6)
+        # create NDP
+        (init_req_key, resp_req_key, init_aware_if, resp_aware_if, init_ipv6,
+         resp_ipv6) = autils.create_oob_ndp(init_dut, resp_dut)
+        self.log.info("Interface names: I=%s, R=%s", init_aware_if,
+                      resp_aware_if)
+        self.log.info("Interface addresses (IPv6): I=%s, R=%s", init_ipv6,
+                      resp_ipv6)
 
-    # run NSD
-    nsd_service_info = {
-        "serviceInfoServiceName": "sl4aTestAwareNsd",
-        "serviceInfoServiceType": "_simple-tx-rx._tcp.",
-        "serviceInfoPort": 2257
-    }
-    nsd_reg = None
-    nsd_discovery = None
-    try:
-      # Initiator registers an NSD service
-      nsd_reg = init_dut.droid.nsdRegisterService(nsd_service_info)
-      event_nsd = autils.wait_for_event_with_keys(
-          init_dut, nconsts.REG_LISTENER_EVENT, autils.EVENT_TIMEOUT,
-          (nconsts.REG_LISTENER_CALLBACK,
-           nconsts.REG_LISTENER_EVENT_ON_SERVICE_REGISTERED))
-      self.log.info("Initiator %s: %s",
-                    nconsts.REG_LISTENER_EVENT_ON_SERVICE_REGISTERED,
-                    event_nsd["data"])
+        # run NSD
+        nsd_service_info = {
+            "serviceInfoServiceName": "sl4aTestAwareNsd",
+            "serviceInfoServiceType": "_simple-tx-rx._tcp.",
+            "serviceInfoPort": 2257
+        }
+        nsd_reg = None
+        nsd_discovery = None
+        try:
+            # Initiator registers an NSD service
+            nsd_reg = init_dut.droid.nsdRegisterService(nsd_service_info)
+            event_nsd = autils.wait_for_event_with_keys(
+                init_dut, nconsts.REG_LISTENER_EVENT, autils.EVENT_TIMEOUT,
+                (nconsts.REG_LISTENER_CALLBACK,
+                 nconsts.REG_LISTENER_EVENT_ON_SERVICE_REGISTERED))
+            self.log.info("Initiator %s: %s",
+                          nconsts.REG_LISTENER_EVENT_ON_SERVICE_REGISTERED,
+                          event_nsd["data"])
 
-      # Responder starts an NSD discovery
-      nsd_discovery = resp_dut.droid.nsdDiscoverServices(
-          nsd_service_info[nconsts.NSD_SERVICE_INFO_SERVICE_TYPE])
-      event_nsd = autils.wait_for_event_with_keys(
-          resp_dut, nconsts.DISCOVERY_LISTENER_EVENT, autils.EVENT_TIMEOUT,
-          (nconsts.DISCOVERY_LISTENER_DATA_CALLBACK,
-           nconsts.DISCOVERY_LISTENER_EVENT_ON_SERVICE_FOUND))
-      self.log.info("Responder %s: %s",
-                    nconsts.DISCOVERY_LISTENER_EVENT_ON_SERVICE_FOUND,
-                    event_nsd["data"])
+            # Responder starts an NSD discovery
+            nsd_discovery = resp_dut.droid.nsdDiscoverServices(
+                nsd_service_info[nconsts.NSD_SERVICE_INFO_SERVICE_TYPE])
+            event_nsd = autils.wait_for_event_with_keys(
+                resp_dut, nconsts.DISCOVERY_LISTENER_EVENT,
+                autils.EVENT_TIMEOUT,
+                (nconsts.DISCOVERY_LISTENER_DATA_CALLBACK,
+                 nconsts.DISCOVERY_LISTENER_EVENT_ON_SERVICE_FOUND))
+            self.log.info("Responder %s: %s",
+                          nconsts.DISCOVERY_LISTENER_EVENT_ON_SERVICE_FOUND,
+                          event_nsd["data"])
 
-      # Responder resolves IP address of Initiator from NSD service discovery
-      resp_dut.droid.nsdResolveService(event_nsd["data"])
-      event_nsd = autils.wait_for_event_with_keys(
-          resp_dut, nconsts.RESOLVE_LISTENER_EVENT, autils.EVENT_TIMEOUT,
-          (nconsts.RESOLVE_LISTENER_DATA_CALLBACK,
-           nconsts.RESOLVE_LISTENER_EVENT_ON_SERVICE_RESOLVED))
-      self.log.info("Responder %s: %s",
-                    nconsts.RESOLVE_LISTENER_EVENT_ON_SERVICE_RESOLVED,
-                    event_nsd["data"])
+            # Responder resolves IP address of Initiator from NSD service discovery
+            resp_dut.droid.nsdResolveService(event_nsd["data"])
+            event_nsd = autils.wait_for_event_with_keys(
+                resp_dut, nconsts.RESOLVE_LISTENER_EVENT, autils.EVENT_TIMEOUT,
+                (nconsts.RESOLVE_LISTENER_DATA_CALLBACK,
+                 nconsts.RESOLVE_LISTENER_EVENT_ON_SERVICE_RESOLVED))
+            self.log.info("Responder %s: %s",
+                          nconsts.RESOLVE_LISTENER_EVENT_ON_SERVICE_RESOLVED,
+                          event_nsd["data"])
 
-      # mDNS returns first character as '/' - strip
-      # out to get clean IPv6
-      init_ipv6_nsd = event_nsd["data"][nconsts.NSD_SERVICE_INFO_HOST][1:]
+            # mDNS returns first character as '/' - strip
+            # out to get clean IPv6
+            init_ipv6_nsd = event_nsd["data"][nconsts.NSD_SERVICE_INFO_HOST][
+                1:]
 
-      asserts.assert_equal(
-          init_ipv6, init_ipv6_nsd,
-          "Initiator's IPv6 address obtained through NSD doesn't match!?")
-    finally:
-      # Stop NSD
-      if nsd_reg is not None:
-        init_dut.droid.nsdUnregisterService(nsd_reg)
-      if nsd_discovery is not None:
-        resp_dut.droid.nsdStopServiceDiscovery(nsd_discovery)
+            asserts.assert_equal(
+                init_ipv6, init_ipv6_nsd,
+                "Initiator's IPv6 address obtained through NSD doesn't match!?"
+            )
+        finally:
+            # Stop NSD
+            if nsd_reg is not None:
+                init_dut.droid.nsdUnregisterService(nsd_reg)
+            if nsd_discovery is not None:
+                resp_dut.droid.nsdStopServiceDiscovery(nsd_discovery)
 
-    # clean-up
-    resp_dut.droid.connectivityUnregisterNetworkCallback(resp_req_key)
-    init_dut.droid.connectivityUnregisterNetworkCallback(init_req_key)
+        # clean-up
+        resp_dut.droid.connectivityUnregisterNetworkCallback(resp_req_key)
+        init_dut.droid.connectivityUnregisterNetworkCallback(init_req_key)
diff --git a/acts/tests/google/wifi/aware/ota/ServiceIdsTest.py b/acts/tests/google/wifi/aware/ota/ServiceIdsTest.py
index 1137303..9691305 100644
--- a/acts/tests/google/wifi/aware/ota/ServiceIdsTest.py
+++ b/acts/tests/google/wifi/aware/ota/ServiceIdsTest.py
@@ -23,18 +23,18 @@
 
 
 class ServiceIdsTest(AwareBaseTest):
-  """Set of tests for Wi-Fi Aware to verify that beacons include service IDs
+    """Set of tests for Wi-Fi Aware to verify that beacons include service IDs
   for discovery.
 
   Note: this test is an OTA (over-the-air) and requires a Sniffer.
   """
 
-  def __init__(self, controllers):
-    AwareBaseTest.__init__(self, controllers)
+    def __init__(self, controllers):
+        AwareBaseTest.__init__(self, controllers)
 
-  def start_discovery_session(self, dut, session_id, is_publish, dtype,
-                              service_name):
-    """Start a discovery session
+    def start_discovery_session(self, dut, session_id, is_publish, dtype,
+                                service_name):
+        """Start a discovery session
 
     Args:
       dut: Device under test
@@ -46,55 +46,56 @@
     Returns:
       Discovery session ID.
     """
-    config = {}
-    config[aconsts.DISCOVERY_KEY_DISCOVERY_TYPE] = dtype
-    config[aconsts.DISCOVERY_KEY_SERVICE_NAME] = service_name
+        config = {}
+        config[aconsts.DISCOVERY_KEY_DISCOVERY_TYPE] = dtype
+        config[aconsts.DISCOVERY_KEY_SERVICE_NAME] = service_name
 
-    if is_publish:
-      disc_id = dut.droid.wifiAwarePublish(session_id, config)
-      event_name = aconsts.SESSION_CB_ON_PUBLISH_STARTED
-    else:
-      disc_id = dut.droid.wifiAwareSubscribe(session_id, config)
-      event_name = aconsts.SESSION_CB_ON_SUBSCRIBE_STARTED
+        if is_publish:
+            disc_id = dut.droid.wifiAwarePublish(session_id, config)
+            event_name = aconsts.SESSION_CB_ON_PUBLISH_STARTED
+        else:
+            disc_id = dut.droid.wifiAwareSubscribe(session_id, config)
+            event_name = aconsts.SESSION_CB_ON_SUBSCRIBE_STARTED
 
-    autils.wait_for_event(dut, event_name)
-    return disc_id
+        autils.wait_for_event(dut, event_name)
+        return disc_id
 
-  ####################################################################
+    ####################################################################
 
-  def test_service_ids_in_beacon(self):
-    """Verify that beacons include service IDs for both publish and subscribe
+    def test_service_ids_in_beacon(self):
+        """Verify that beacons include service IDs for both publish and subscribe
     sessions of all types: solicited/unsolicited/active/passive."""
-    dut = self.android_devices[0]
+        dut = self.android_devices[0]
 
-    self.log.info("Reminder: start a sniffer before running test")
+        self.log.info("Reminder: start a sniffer before running test")
 
-    # attach
-    session_id = dut.droid.wifiAwareAttach(True)
-    autils.wait_for_event(dut, aconsts.EVENT_CB_ON_ATTACHED)
-    ident_event = autils.wait_for_event(dut,
-                                        aconsts.EVENT_CB_ON_IDENTITY_CHANGED)
-    mac = ident_event["data"]["mac"]
-    self.log.info("Source MAC Address of 'interesting' packets = %s", mac)
-    self.log.info("Wireshark filter = 'wlan.ta == %s:%s:%s:%s:%s:%s'", mac[0:2],
-                  mac[2:4], mac[4:6], mac[6:8], mac[8:10], mac[10:12])
+        # attach
+        session_id = dut.droid.wifiAwareAttach(True)
+        autils.wait_for_event(dut, aconsts.EVENT_CB_ON_ATTACHED)
+        ident_event = autils.wait_for_event(
+            dut, aconsts.EVENT_CB_ON_IDENTITY_CHANGED)
+        mac = ident_event["data"]["mac"]
+        self.log.info("Source MAC Address of 'interesting' packets = %s", mac)
+        self.log.info("Wireshark filter = 'wlan.ta == %s:%s:%s:%s:%s:%s'",
+                      mac[0:2], mac[2:4], mac[4:6], mac[6:8], mac[8:10],
+                      mac[10:12])
 
-    time.sleep(5)  # get some samples pre-discovery
+        time.sleep(5)  # get some samples pre-discovery
 
-    # start 4 discovery session (one of each type)
-    self.start_discovery_session(dut, session_id, True,
-                                 aconsts.PUBLISH_TYPE_UNSOLICITED,
-                                 "GoogleTestService-Pub-Unsolicited")
-    self.start_discovery_session(dut, session_id, True,
-                                 aconsts.PUBLISH_TYPE_SOLICITED,
-                                 "GoogleTestService-Pub-Solicited")
-    self.start_discovery_session(dut, session_id, False,
-                                 aconsts.SUBSCRIBE_TYPE_ACTIVE,
-                                 "GoogleTestService-Sub-Active")
-    self.start_discovery_session(dut, session_id, False,
-                                 aconsts.SUBSCRIBE_TYPE_PASSIVE,
-                                 "GoogleTestService-Sub-Passive")
+        # start 4 discovery session (one of each type)
+        self.start_discovery_session(dut, session_id, True,
+                                     aconsts.PUBLISH_TYPE_UNSOLICITED,
+                                     "GoogleTestService-Pub-Unsolicited")
+        self.start_discovery_session(dut, session_id, True,
+                                     aconsts.PUBLISH_TYPE_SOLICITED,
+                                     "GoogleTestService-Pub-Solicited")
+        self.start_discovery_session(dut, session_id, False,
+                                     aconsts.SUBSCRIBE_TYPE_ACTIVE,
+                                     "GoogleTestService-Sub-Active")
+        self.start_discovery_session(dut, session_id, False,
+                                     aconsts.SUBSCRIBE_TYPE_PASSIVE,
+                                     "GoogleTestService-Sub-Passive")
 
-    time.sleep(15)  # get some samples while discovery is alive
+        time.sleep(15)  # get some samples while discovery is alive
 
-    self.log.info("Reminder: stop sniffer")
+        self.log.info("Reminder: stop sniffer")
diff --git a/acts/tests/google/wifi/aware/performance/LatencyTest.py b/acts/tests/google/wifi/aware/performance/LatencyTest.py
index bfadebc..db5d416 100644
--- a/acts/tests/google/wifi/aware/performance/LatencyTest.py
+++ b/acts/tests/google/wifi/aware/performance/LatencyTest.py
@@ -25,19 +25,19 @@
 
 
 class LatencyTest(AwareBaseTest):
-  """Set of tests for Wi-Fi Aware to measure latency of Aware operations."""
-  SERVICE_NAME = "GoogleTestServiceXY"
+    """Set of tests for Wi-Fi Aware to measure latency of Aware operations."""
+    SERVICE_NAME = "GoogleTestServiceXY"
 
-  # number of second to 'reasonably' wait to make sure that devices synchronize
-  # with each other - useful for OOB test cases, where the OOB discovery would
-  # take some time
-  WAIT_FOR_CLUSTER = 5
+    # number of second to 'reasonably' wait to make sure that devices synchronize
+    # with each other - useful for OOB test cases, where the OOB discovery would
+    # take some time
+    WAIT_FOR_CLUSTER = 5
 
-  def __init__(self, controllers):
-    AwareBaseTest.__init__(self, controllers)
+    def __init__(self, controllers):
+        AwareBaseTest.__init__(self, controllers)
 
-  def start_discovery_session(self, dut, session_id, is_publish, dtype):
-    """Start a discovery session
+    def start_discovery_session(self, dut, session_id, is_publish, dtype):
+        """Start a discovery session
 
     Args:
       dut: Device under test
@@ -48,24 +48,24 @@
     Returns:
       Discovery session started event.
     """
-    config = {}
-    config[aconsts.DISCOVERY_KEY_DISCOVERY_TYPE] = dtype
-    config[aconsts.DISCOVERY_KEY_SERVICE_NAME] = "GoogleTestServiceXY"
+        config = {}
+        config[aconsts.DISCOVERY_KEY_DISCOVERY_TYPE] = dtype
+        config[aconsts.DISCOVERY_KEY_SERVICE_NAME] = "GoogleTestServiceXY"
 
-    if is_publish:
-      disc_id = dut.droid.wifiAwarePublish(session_id, config)
-      event_name = aconsts.SESSION_CB_ON_PUBLISH_STARTED
-    else:
-      disc_id = dut.droid.wifiAwareSubscribe(session_id, config)
-      event_name = aconsts.SESSION_CB_ON_SUBSCRIBE_STARTED
+        if is_publish:
+            disc_id = dut.droid.wifiAwarePublish(session_id, config)
+            event_name = aconsts.SESSION_CB_ON_PUBLISH_STARTED
+        else:
+            disc_id = dut.droid.wifiAwareSubscribe(session_id, config)
+            event_name = aconsts.SESSION_CB_ON_SUBSCRIBE_STARTED
 
-    event = autils.wait_for_event(dut, event_name)
-    return disc_id, event
+        event = autils.wait_for_event(dut, event_name)
+        return disc_id, event
 
-  def run_synchronization_latency(self, results, do_unsolicited_passive,
-                                  dw_24ghz, dw_5ghz, num_iterations,
-                                  startup_offset, timeout_period):
-    """Run the synchronization latency test with the specified DW intervals.
+    def run_synchronization_latency(self, results, do_unsolicited_passive,
+                                    dw_24ghz, dw_5ghz, num_iterations,
+                                    startup_offset, timeout_period):
+        """Run the synchronization latency test with the specified DW intervals.
     There is no direct measure of synchronization. Instead starts a discovery
     session as soon as possible and measures both probability of discovery
     within a timeout period and the actual discovery time (not necessarily
@@ -80,80 +80,82 @@
       startup_offset: The start-up gap (in seconds) between the two devices
       timeout_period: Time period over which to measure synchronization
     """
-    key = "%s_dw24_%d_dw5_%d_offset_%d" % (
-        "unsolicited_passive" if do_unsolicited_passive else "solicited_active",
-        dw_24ghz, dw_5ghz, startup_offset)
-    results[key] = {}
-    results[key]["num_iterations"] = num_iterations
+        key = "%s_dw24_%d_dw5_%d_offset_%d" % ("unsolicited_passive"
+                                               if do_unsolicited_passive else
+                                               "solicited_active", dw_24ghz,
+                                               dw_5ghz, startup_offset)
+        results[key] = {}
+        results[key]["num_iterations"] = num_iterations
 
-    p_dut = self.android_devices[0]
-    p_dut.pretty_name = "Publisher"
-    s_dut = self.android_devices[1]
-    s_dut.pretty_name = "Subscriber"
+        p_dut = self.android_devices[0]
+        p_dut.pretty_name = "Publisher"
+        s_dut = self.android_devices[1]
+        s_dut.pretty_name = "Subscriber"
 
-    # override the default DW configuration
-    autils.config_power_settings(p_dut, dw_24ghz, dw_5ghz)
-    autils.config_power_settings(s_dut, dw_24ghz, dw_5ghz)
+        # override the default DW configuration
+        autils.config_power_settings(p_dut, dw_24ghz, dw_5ghz)
+        autils.config_power_settings(s_dut, dw_24ghz, dw_5ghz)
 
-    latencies = []
-    failed_discoveries = 0
-    for i in range(num_iterations):
-      # Publisher+Subscriber: attach and wait for confirmation
-      p_id = p_dut.droid.wifiAwareAttach(False)
-      autils.wait_for_event(p_dut, aconsts.EVENT_CB_ON_ATTACHED)
-      time.sleep(startup_offset)
-      s_id = s_dut.droid.wifiAwareAttach(False)
-      autils.wait_for_event(s_dut, aconsts.EVENT_CB_ON_ATTACHED)
+        latencies = []
+        failed_discoveries = 0
+        for i in range(num_iterations):
+            # Publisher+Subscriber: attach and wait for confirmation
+            p_id = p_dut.droid.wifiAwareAttach(False)
+            autils.wait_for_event(p_dut, aconsts.EVENT_CB_ON_ATTACHED)
+            time.sleep(startup_offset)
+            s_id = s_dut.droid.wifiAwareAttach(False)
+            autils.wait_for_event(s_dut, aconsts.EVENT_CB_ON_ATTACHED)
 
-      # start publish
-      p_disc_id, p_disc_event = self.start_discovery_session(
-          p_dut, p_id, True, aconsts.PUBLISH_TYPE_UNSOLICITED
-          if do_unsolicited_passive else aconsts.PUBLISH_TYPE_SOLICITED)
+            # start publish
+            p_disc_id, p_disc_event = self.start_discovery_session(
+                p_dut, p_id, True, aconsts.PUBLISH_TYPE_UNSOLICITED
+                if do_unsolicited_passive else aconsts.PUBLISH_TYPE_SOLICITED)
 
-      # start subscribe
-      s_disc_id, s_session_event = self.start_discovery_session(
-          s_dut, s_id, False, aconsts.SUBSCRIBE_TYPE_PASSIVE
-          if do_unsolicited_passive else aconsts.SUBSCRIBE_TYPE_ACTIVE)
+            # start subscribe
+            s_disc_id, s_session_event = self.start_discovery_session(
+                s_dut, s_id, False, aconsts.SUBSCRIBE_TYPE_PASSIVE
+                if do_unsolicited_passive else aconsts.SUBSCRIBE_TYPE_ACTIVE)
 
-      # wait for discovery (allow for failures here since running lots of
-      # samples and would like to get the partial data even in the presence of
-      # errors)
-      try:
-        discovery_event = s_dut.ed.pop_event(
-            aconsts.SESSION_CB_ON_SERVICE_DISCOVERED, timeout_period)
-        s_dut.log.info("[Subscriber] SESSION_CB_ON_SERVICE_DISCOVERED: %s",
-                       discovery_event["data"])
-      except queue.Empty:
-        s_dut.log.info("[Subscriber] Timed out while waiting for "
-                       "SESSION_CB_ON_SERVICE_DISCOVERED")
-        failed_discoveries = failed_discoveries + 1
-        continue
-      finally:
-        # destroy sessions
-        p_dut.droid.wifiAwareDestroyDiscoverySession(p_disc_id)
-        s_dut.droid.wifiAwareDestroyDiscoverySession(s_disc_id)
-        p_dut.droid.wifiAwareDestroy(p_id)
-        s_dut.droid.wifiAwareDestroy(s_id)
+            # wait for discovery (allow for failures here since running lots of
+            # samples and would like to get the partial data even in the presence of
+            # errors)
+            try:
+                discovery_event = s_dut.ed.pop_event(
+                    aconsts.SESSION_CB_ON_SERVICE_DISCOVERED, timeout_period)
+                s_dut.log.info(
+                    "[Subscriber] SESSION_CB_ON_SERVICE_DISCOVERED: %s",
+                    discovery_event["data"])
+            except queue.Empty:
+                s_dut.log.info("[Subscriber] Timed out while waiting for "
+                               "SESSION_CB_ON_SERVICE_DISCOVERED")
+                failed_discoveries = failed_discoveries + 1
+                continue
+            finally:
+                # destroy sessions
+                p_dut.droid.wifiAwareDestroyDiscoverySession(p_disc_id)
+                s_dut.droid.wifiAwareDestroyDiscoverySession(s_disc_id)
+                p_dut.droid.wifiAwareDestroy(p_id)
+                s_dut.droid.wifiAwareDestroy(s_id)
 
-      # collect latency information
-      latencies.append(
-          discovery_event["data"][aconsts.SESSION_CB_KEY_TIMESTAMP_MS] -
-          s_session_event["data"][aconsts.SESSION_CB_KEY_TIMESTAMP_MS])
-      self.log.info("Latency #%d = %d" % (i, latencies[-1]))
+            # collect latency information
+            latencies.append(
+                discovery_event["data"][aconsts.SESSION_CB_KEY_TIMESTAMP_MS] -
+                s_session_event["data"][aconsts.SESSION_CB_KEY_TIMESTAMP_MS])
+            self.log.info("Latency #%d = %d" % (i, latencies[-1]))
 
-    autils.extract_stats(
-        s_dut,
-        data=latencies,
-        results=results[key],
-        key_prefix="",
-        log_prefix="Subscribe Session Sync/Discovery (%s, dw24=%d, dw5=%d)" %
-        ("Unsolicited/Passive"
-         if do_unsolicited_passive else "Solicited/Active", dw_24ghz, dw_5ghz))
-    results[key]["num_failed_discovery"] = failed_discoveries
+        autils.extract_stats(
+            s_dut,
+            data=latencies,
+            results=results[key],
+            key_prefix="",
+            log_prefix="Subscribe Session Sync/Discovery (%s, dw24=%d, dw5=%d)"
+            % ("Unsolicited/Passive" if do_unsolicited_passive else
+               "Solicited/Active", dw_24ghz, dw_5ghz))
+        results[key]["num_failed_discovery"] = failed_discoveries
 
-  def run_discovery_latency(self, results, do_unsolicited_passive, dw_24ghz,
-                            dw_5ghz, num_iterations):
-    """Run the service discovery latency test with the specified DW intervals.
+    def run_discovery_latency(self, results, do_unsolicited_passive, dw_24ghz,
+                              dw_5ghz, num_iterations):
+        """Run the service discovery latency test with the specified DW intervals.
 
     Args:
       results: Result array to be populated - will add results (not erase it)
@@ -162,274 +164,282 @@
       dw_24ghz: DW interval in the 2.4GHz band.
       dw_5ghz: DW interval in the 5GHz band.
     """
-    key = "%s_dw24_%d_dw5_%d" % (
-        "unsolicited_passive"
-        if do_unsolicited_passive else "solicited_active", dw_24ghz, dw_5ghz)
-    results[key] = {}
-    results[key]["num_iterations"] = num_iterations
+        key = "%s_dw24_%d_dw5_%d" % ("unsolicited_passive"
+                                     if do_unsolicited_passive else
+                                     "solicited_active", dw_24ghz, dw_5ghz)
+        results[key] = {}
+        results[key]["num_iterations"] = num_iterations
 
-    p_dut = self.android_devices[0]
-    p_dut.pretty_name = "Publisher"
-    s_dut = self.android_devices[1]
-    s_dut.pretty_name = "Subscriber"
+        p_dut = self.android_devices[0]
+        p_dut.pretty_name = "Publisher"
+        s_dut = self.android_devices[1]
+        s_dut.pretty_name = "Subscriber"
 
-    # override the default DW configuration
-    autils.config_power_settings(p_dut, dw_24ghz, dw_5ghz)
-    autils.config_power_settings(s_dut, dw_24ghz, dw_5ghz)
+        # override the default DW configuration
+        autils.config_power_settings(p_dut, dw_24ghz, dw_5ghz)
+        autils.config_power_settings(s_dut, dw_24ghz, dw_5ghz)
 
-    # Publisher+Subscriber: attach and wait for confirmation
-    p_id = p_dut.droid.wifiAwareAttach(False)
-    autils.wait_for_event(p_dut, aconsts.EVENT_CB_ON_ATTACHED)
-    time.sleep(self.device_startup_offset)
-    s_id = s_dut.droid.wifiAwareAttach(False)
-    autils.wait_for_event(s_dut, aconsts.EVENT_CB_ON_ATTACHED)
+        # Publisher+Subscriber: attach and wait for confirmation
+        p_id = p_dut.droid.wifiAwareAttach(False)
+        autils.wait_for_event(p_dut, aconsts.EVENT_CB_ON_ATTACHED)
+        time.sleep(self.device_startup_offset)
+        s_id = s_dut.droid.wifiAwareAttach(False)
+        autils.wait_for_event(s_dut, aconsts.EVENT_CB_ON_ATTACHED)
 
-    # start publish
-    p_disc_event = self.start_discovery_session(
-        p_dut, p_id, True, aconsts.PUBLISH_TYPE_UNSOLICITED
-        if do_unsolicited_passive else aconsts.PUBLISH_TYPE_SOLICITED)
+        # start publish
+        p_disc_event = self.start_discovery_session(
+            p_dut, p_id, True, aconsts.PUBLISH_TYPE_UNSOLICITED
+            if do_unsolicited_passive else aconsts.PUBLISH_TYPE_SOLICITED)
 
-    # wait for for devices to synchronize with each other - used so that first
-    # discovery isn't biased by synchronization.
-    time.sleep(self.WAIT_FOR_CLUSTER)
+        # wait for for devices to synchronize with each other - used so that first
+        # discovery isn't biased by synchronization.
+        time.sleep(self.WAIT_FOR_CLUSTER)
 
-    # loop, perform discovery, and collect latency information
-    latencies = []
-    failed_discoveries = 0
-    for i in range(num_iterations):
-      # start subscribe
-      s_disc_id, s_session_event = self.start_discovery_session(
-          s_dut, s_id, False, aconsts.SUBSCRIBE_TYPE_PASSIVE
-          if do_unsolicited_passive else aconsts.SUBSCRIBE_TYPE_ACTIVE)
+        # loop, perform discovery, and collect latency information
+        latencies = []
+        failed_discoveries = 0
+        for i in range(num_iterations):
+            # start subscribe
+            s_disc_id, s_session_event = self.start_discovery_session(
+                s_dut, s_id, False, aconsts.SUBSCRIBE_TYPE_PASSIVE
+                if do_unsolicited_passive else aconsts.SUBSCRIBE_TYPE_ACTIVE)
 
-      # wait for discovery (allow for failures here since running lots of
-      # samples and would like to get the partial data even in the presence of
-      # errors)
-      try:
-        discovery_event = s_dut.ed.pop_event(
-            aconsts.SESSION_CB_ON_SERVICE_DISCOVERED, autils.EVENT_TIMEOUT)
-      except queue.Empty:
-        s_dut.log.info("[Subscriber] Timed out while waiting for "
-                       "SESSION_CB_ON_SERVICE_DISCOVERED")
-        failed_discoveries = failed_discoveries + 1
-        continue
-      finally:
-        # destroy subscribe
-        s_dut.droid.wifiAwareDestroyDiscoverySession(s_disc_id)
+            # wait for discovery (allow for failures here since running lots of
+            # samples and would like to get the partial data even in the presence of
+            # errors)
+            try:
+                discovery_event = s_dut.ed.pop_event(
+                    aconsts.SESSION_CB_ON_SERVICE_DISCOVERED,
+                    autils.EVENT_TIMEOUT)
+            except queue.Empty:
+                s_dut.log.info("[Subscriber] Timed out while waiting for "
+                               "SESSION_CB_ON_SERVICE_DISCOVERED")
+                failed_discoveries = failed_discoveries + 1
+                continue
+            finally:
+                # destroy subscribe
+                s_dut.droid.wifiAwareDestroyDiscoverySession(s_disc_id)
 
-      # collect latency information
-      latencies.append(
-          discovery_event["data"][aconsts.SESSION_CB_KEY_TIMESTAMP_MS] -
-          s_session_event["data"][aconsts.SESSION_CB_KEY_TIMESTAMP_MS])
-      self.log.info("Latency #%d = %d" % (i, latencies[-1]))
+            # collect latency information
+            latencies.append(
+                discovery_event["data"][aconsts.SESSION_CB_KEY_TIMESTAMP_MS] -
+                s_session_event["data"][aconsts.SESSION_CB_KEY_TIMESTAMP_MS])
+            self.log.info("Latency #%d = %d" % (i, latencies[-1]))
 
-    autils.extract_stats(
-        s_dut,
-        data=latencies,
-        results=results[key],
-        key_prefix="",
-        log_prefix="Subscribe Session Discovery (%s, dw24=%d, dw5=%d)" %
-        ("Unsolicited/Passive"
-         if do_unsolicited_passive else "Solicited/Active", dw_24ghz, dw_5ghz))
-    results[key]["num_failed_discovery"] = failed_discoveries
+        autils.extract_stats(
+            s_dut,
+            data=latencies,
+            results=results[key],
+            key_prefix="",
+            log_prefix="Subscribe Session Discovery (%s, dw24=%d, dw5=%d)" %
+            ("Unsolicited/Passive" if do_unsolicited_passive else
+             "Solicited/Active", dw_24ghz, dw_5ghz))
+        results[key]["num_failed_discovery"] = failed_discoveries
 
-    # clean up
-    p_dut.droid.wifiAwareDestroyAll()
-    s_dut.droid.wifiAwareDestroyAll()
+        # clean up
+        p_dut.droid.wifiAwareDestroyAll()
+        s_dut.droid.wifiAwareDestroyAll()
 
-  def run_message_latency(self, results, dw_24ghz, dw_5ghz, num_iterations):
-    """Run the message tx latency test with the specified DW intervals.
+    def run_message_latency(self, results, dw_24ghz, dw_5ghz, num_iterations):
+        """Run the message tx latency test with the specified DW intervals.
 
     Args:
       results: Result array to be populated - will add results (not erase it)
       dw_24ghz: DW interval in the 2.4GHz band.
       dw_5ghz: DW interval in the 5GHz band.
     """
-    key = "dw24_%d_dw5_%d" % (dw_24ghz, dw_5ghz)
-    results[key] = {}
-    results[key]["num_iterations"] = num_iterations
+        key = "dw24_%d_dw5_%d" % (dw_24ghz, dw_5ghz)
+        results[key] = {}
+        results[key]["num_iterations"] = num_iterations
 
-    p_dut = self.android_devices[0]
-    s_dut = self.android_devices[1]
+        p_dut = self.android_devices[0]
+        s_dut = self.android_devices[1]
 
-    # override the default DW configuration
-    autils.config_power_settings(p_dut, dw_24ghz, dw_5ghz)
-    autils.config_power_settings(s_dut, dw_24ghz, dw_5ghz)
+        # override the default DW configuration
+        autils.config_power_settings(p_dut, dw_24ghz, dw_5ghz)
+        autils.config_power_settings(s_dut, dw_24ghz, dw_5ghz)
 
-    # Start up a discovery session
-    (p_id, s_id, p_disc_id, s_disc_id,
-     peer_id_on_sub) = autils.create_discovery_pair(
-         p_dut,
-         s_dut,
-         p_config=autils.create_discovery_config(
-             self.SERVICE_NAME, aconsts.PUBLISH_TYPE_UNSOLICITED),
-         s_config=autils.create_discovery_config(
-             self.SERVICE_NAME, aconsts.SUBSCRIBE_TYPE_PASSIVE),
-         device_startup_offset=self.device_startup_offset)
+        # Start up a discovery session
+        (p_id, s_id, p_disc_id, s_disc_id,
+         peer_id_on_sub) = autils.create_discovery_pair(
+             p_dut,
+             s_dut,
+             p_config=autils.create_discovery_config(
+                 self.SERVICE_NAME, aconsts.PUBLISH_TYPE_UNSOLICITED),
+             s_config=autils.create_discovery_config(
+                 self.SERVICE_NAME, aconsts.SUBSCRIBE_TYPE_PASSIVE),
+             device_startup_offset=self.device_startup_offset)
 
-    latencies = []
-    failed_tx = 0
-    messages_rx = 0
-    missing_rx = 0
-    corrupted_rx = 0
-    for i in range(num_iterations):
-      # send message
-      msg_s2p = "Message Subscriber -> Publisher #%d" % i
-      next_msg_id = self.get_next_msg_id()
-      s_dut.droid.wifiAwareSendMessage(s_disc_id, peer_id_on_sub, next_msg_id,
-                                       msg_s2p, 0)
+        latencies = []
+        failed_tx = 0
+        messages_rx = 0
+        missing_rx = 0
+        corrupted_rx = 0
+        for i in range(num_iterations):
+            # send message
+            msg_s2p = "Message Subscriber -> Publisher #%d" % i
+            next_msg_id = self.get_next_msg_id()
+            s_dut.droid.wifiAwareSendMessage(s_disc_id, peer_id_on_sub,
+                                             next_msg_id, msg_s2p, 0)
 
-      # wait for Tx confirmation
-      try:
-        sub_tx_msg_event = s_dut.ed.pop_event(
-            aconsts.SESSION_CB_ON_MESSAGE_SENT, 2 * autils.EVENT_TIMEOUT)
-        latencies.append(
-            sub_tx_msg_event["data"][aconsts.SESSION_CB_KEY_LATENCY_MS])
-      except queue.Empty:
-        s_dut.log.info("[Subscriber] Timed out while waiting for "
-                       "SESSION_CB_ON_MESSAGE_SENT")
-        failed_tx = failed_tx + 1
-        continue
+            # wait for Tx confirmation
+            try:
+                sub_tx_msg_event = s_dut.ed.pop_event(
+                    aconsts.SESSION_CB_ON_MESSAGE_SENT,
+                    2 * autils.EVENT_TIMEOUT)
+                latencies.append(sub_tx_msg_event["data"][
+                    aconsts.SESSION_CB_KEY_LATENCY_MS])
+            except queue.Empty:
+                s_dut.log.info("[Subscriber] Timed out while waiting for "
+                               "SESSION_CB_ON_MESSAGE_SENT")
+                failed_tx = failed_tx + 1
+                continue
 
-      # wait for Rx confirmation (and validate contents)
-      try:
-        pub_rx_msg_event = p_dut.ed.pop_event(
-            aconsts.SESSION_CB_ON_MESSAGE_RECEIVED, 2 * autils.EVENT_TIMEOUT)
-        messages_rx = messages_rx + 1
-        if (pub_rx_msg_event["data"][aconsts.SESSION_CB_KEY_MESSAGE_AS_STRING]
-            != msg_s2p):
-          corrupted_rx = corrupted_rx + 1
-      except queue.Empty:
-        s_dut.log.info("[Publisher] Timed out while waiting for "
-                       "SESSION_CB_ON_MESSAGE_RECEIVED")
-        missing_rx = missing_rx + 1
-        continue
+            # wait for Rx confirmation (and validate contents)
+            try:
+                pub_rx_msg_event = p_dut.ed.pop_event(
+                    aconsts.SESSION_CB_ON_MESSAGE_RECEIVED,
+                    2 * autils.EVENT_TIMEOUT)
+                messages_rx = messages_rx + 1
+                if (pub_rx_msg_event["data"]
+                    [aconsts.SESSION_CB_KEY_MESSAGE_AS_STRING] != msg_s2p):
+                    corrupted_rx = corrupted_rx + 1
+            except queue.Empty:
+                s_dut.log.info("[Publisher] Timed out while waiting for "
+                               "SESSION_CB_ON_MESSAGE_RECEIVED")
+                missing_rx = missing_rx + 1
+                continue
 
-    autils.extract_stats(
-        s_dut,
-        data=latencies,
-        results=results[key],
-        key_prefix="",
-        log_prefix="Subscribe Session Discovery (dw24=%d, dw5=%d)" %
-                   (dw_24ghz, dw_5ghz))
-    results[key]["failed_tx"] = failed_tx
-    results[key]["messages_rx"] = messages_rx
-    results[key]["missing_rx"] = missing_rx
-    results[key]["corrupted_rx"] = corrupted_rx
+        autils.extract_stats(
+            s_dut,
+            data=latencies,
+            results=results[key],
+            key_prefix="",
+            log_prefix="Subscribe Session Discovery (dw24=%d, dw5=%d)" %
+            (dw_24ghz, dw_5ghz))
+        results[key]["failed_tx"] = failed_tx
+        results[key]["messages_rx"] = messages_rx
+        results[key]["missing_rx"] = missing_rx
+        results[key]["corrupted_rx"] = corrupted_rx
 
-    # clean up
-    p_dut.droid.wifiAwareDestroyAll()
-    s_dut.droid.wifiAwareDestroyAll()
+        # clean up
+        p_dut.droid.wifiAwareDestroyAll()
+        s_dut.droid.wifiAwareDestroyAll()
 
-  def run_ndp_oob_latency(self, results, dw_24ghz, dw_5ghz, num_iterations):
-    """Runs the NDP setup with OOB (out-of-band) discovery latency test.
+    def run_ndp_oob_latency(self, results, dw_24ghz, dw_5ghz, num_iterations):
+        """Runs the NDP setup with OOB (out-of-band) discovery latency test.
 
     Args:
       results: Result array to be populated - will add results (not erase it)
       dw_24ghz: DW interval in the 2.4GHz band.
       dw_5ghz: DW interval in the 5GHz band.
     """
-    key_avail = "on_avail_dw24_%d_dw5_%d" % (dw_24ghz, dw_5ghz)
-    key_link_props = "link_props_dw24_%d_dw5_%d" % (dw_24ghz, dw_5ghz)
-    results[key_avail] = {}
-    results[key_link_props] = {}
-    results[key_avail]["num_iterations"] = num_iterations
+        key_avail = "on_avail_dw24_%d_dw5_%d" % (dw_24ghz, dw_5ghz)
+        key_link_props = "link_props_dw24_%d_dw5_%d" % (dw_24ghz, dw_5ghz)
+        results[key_avail] = {}
+        results[key_link_props] = {}
+        results[key_avail]["num_iterations"] = num_iterations
 
-    init_dut = self.android_devices[0]
-    init_dut.pretty_name = 'Initiator'
-    resp_dut = self.android_devices[1]
-    resp_dut.pretty_name = 'Responder'
+        init_dut = self.android_devices[0]
+        init_dut.pretty_name = 'Initiator'
+        resp_dut = self.android_devices[1]
+        resp_dut.pretty_name = 'Responder'
 
-    # override the default DW configuration
-    autils.config_power_settings(init_dut, dw_24ghz, dw_5ghz)
-    autils.config_power_settings(resp_dut, dw_24ghz, dw_5ghz)
+        # override the default DW configuration
+        autils.config_power_settings(init_dut, dw_24ghz, dw_5ghz)
+        autils.config_power_settings(resp_dut, dw_24ghz, dw_5ghz)
 
-    # Initiator+Responder: attach and wait for confirmation & identity
-    init_id = init_dut.droid.wifiAwareAttach(True)
-    autils.wait_for_event(init_dut, aconsts.EVENT_CB_ON_ATTACHED)
-    init_ident_event = autils.wait_for_event(init_dut,
-                                      aconsts.EVENT_CB_ON_IDENTITY_CHANGED)
-    init_mac = init_ident_event['data']['mac']
-    time.sleep(self.device_startup_offset)
-    resp_id = resp_dut.droid.wifiAwareAttach(True)
-    autils.wait_for_event(resp_dut, aconsts.EVENT_CB_ON_ATTACHED)
-    resp_ident_event = autils.wait_for_event(resp_dut,
-                                      aconsts.EVENT_CB_ON_IDENTITY_CHANGED)
-    resp_mac = resp_ident_event['data']['mac']
+        # Initiator+Responder: attach and wait for confirmation & identity
+        init_id = init_dut.droid.wifiAwareAttach(True)
+        autils.wait_for_event(init_dut, aconsts.EVENT_CB_ON_ATTACHED)
+        init_ident_event = autils.wait_for_event(
+            init_dut, aconsts.EVENT_CB_ON_IDENTITY_CHANGED)
+        init_mac = init_ident_event['data']['mac']
+        time.sleep(self.device_startup_offset)
+        resp_id = resp_dut.droid.wifiAwareAttach(True)
+        autils.wait_for_event(resp_dut, aconsts.EVENT_CB_ON_ATTACHED)
+        resp_ident_event = autils.wait_for_event(
+            resp_dut, aconsts.EVENT_CB_ON_IDENTITY_CHANGED)
+        resp_mac = resp_ident_event['data']['mac']
 
-    # wait for for devices to synchronize with each other - there are no other
-    # mechanisms to make sure this happens for OOB discovery (except retrying
-    # to execute the data-path request)
-    time.sleep(autils.WAIT_FOR_CLUSTER)
+        # wait for for devices to synchronize with each other - there are no other
+        # mechanisms to make sure this happens for OOB discovery (except retrying
+        # to execute the data-path request)
+        time.sleep(autils.WAIT_FOR_CLUSTER)
 
-    on_available_latencies = []
-    link_props_latencies = []
-    ndp_setup_failures = 0
-    for i in range(num_iterations):
-      # Responder: request network
-      resp_req_key = autils.request_network(
-          resp_dut,
-          resp_dut.droid.wifiAwareCreateNetworkSpecifierOob(
-              resp_id, aconsts.DATA_PATH_RESPONDER, init_mac, None))
+        on_available_latencies = []
+        link_props_latencies = []
+        ndp_setup_failures = 0
+        for i in range(num_iterations):
+            # Responder: request network
+            resp_req_key = autils.request_network(
+                resp_dut,
+                resp_dut.droid.wifiAwareCreateNetworkSpecifierOob(
+                    resp_id, aconsts.DATA_PATH_RESPONDER, init_mac, None))
 
-      # Initiator: request network
-      init_req_key = autils.request_network(
-          init_dut,
-          init_dut.droid.wifiAwareCreateNetworkSpecifierOob(
-              init_id, aconsts.DATA_PATH_INITIATOR, resp_mac, None))
+            # Initiator: request network
+            init_req_key = autils.request_network(
+                init_dut,
+                init_dut.droid.wifiAwareCreateNetworkSpecifierOob(
+                    init_id, aconsts.DATA_PATH_INITIATOR, resp_mac, None))
 
-      # Initiator & Responder: wait for network formation
-      got_on_available = False
-      got_on_link_props = False
-      while not got_on_available or not got_on_link_props:
-        try:
-          nc_event = init_dut.ed.pop_event(cconsts.EVENT_NETWORK_CALLBACK,
-                                           autils.EVENT_NDP_TIMEOUT)
-          if nc_event["data"][
-              cconsts.NETWORK_CB_KEY_EVENT] == cconsts.NETWORK_CB_AVAILABLE:
-            got_on_available = True
-            on_available_latencies.append(
-                nc_event["data"][cconsts.NETWORK_CB_KEY_CURRENT_TS] -
-                nc_event["data"][cconsts.NETWORK_CB_KEY_CREATE_TS])
-          elif (nc_event["data"][cconsts.NETWORK_CB_KEY_EVENT] ==
-                cconsts.NETWORK_CB_LINK_PROPERTIES_CHANGED):
-            got_on_link_props = True
-            link_props_latencies.append(
-                nc_event["data"][cconsts.NETWORK_CB_KEY_CURRENT_TS] -
-                nc_event["data"][cconsts.NETWORK_CB_KEY_CREATE_TS])
-        except queue.Empty:
-          ndp_setup_failures = ndp_setup_failures + 1
-          init_dut.log.info("[Initiator] Timed out while waiting for "
-                         "EVENT_NETWORK_CALLBACK")
-          break
+            # Initiator & Responder: wait for network formation
+            got_on_available = False
+            got_on_link_props = False
+            while not got_on_available or not got_on_link_props:
+                try:
+                    nc_event = init_dut.ed.pop_event(
+                        cconsts.EVENT_NETWORK_CALLBACK,
+                        autils.EVENT_NDP_TIMEOUT)
+                    if nc_event["data"][
+                            cconsts.
+                            NETWORK_CB_KEY_EVENT] == cconsts.NETWORK_CB_AVAILABLE:
+                        got_on_available = True
+                        on_available_latencies.append(
+                            nc_event["data"][cconsts.NETWORK_CB_KEY_CURRENT_TS]
+                            -
+                            nc_event["data"][cconsts.NETWORK_CB_KEY_CREATE_TS])
+                    elif (nc_event["data"][cconsts.NETWORK_CB_KEY_EVENT] ==
+                          cconsts.NETWORK_CB_LINK_PROPERTIES_CHANGED):
+                        got_on_link_props = True
+                        link_props_latencies.append(
+                            nc_event["data"][cconsts.NETWORK_CB_KEY_CURRENT_TS]
+                            -
+                            nc_event["data"][cconsts.NETWORK_CB_KEY_CREATE_TS])
+                except queue.Empty:
+                    ndp_setup_failures = ndp_setup_failures + 1
+                    init_dut.log.info(
+                        "[Initiator] Timed out while waiting for "
+                        "EVENT_NETWORK_CALLBACK")
+                    break
 
-      # clean-up
-      init_dut.droid.connectivityUnregisterNetworkCallback(init_req_key)
-      resp_dut.droid.connectivityUnregisterNetworkCallback(resp_req_key)
+            # clean-up
+            init_dut.droid.connectivityUnregisterNetworkCallback(init_req_key)
+            resp_dut.droid.connectivityUnregisterNetworkCallback(resp_req_key)
 
-      # wait to make sure previous NDP terminated, otherwise its termination
-      # time will be counted in the setup latency!
-      time.sleep(2)
+            # wait to make sure previous NDP terminated, otherwise its termination
+            # time will be counted in the setup latency!
+            time.sleep(2)
 
-    autils.extract_stats(
-        init_dut,
-        data=on_available_latencies,
-        results=results[key_avail],
-        key_prefix="",
-        log_prefix="NDP setup OnAvailable(dw24=%d, dw5=%d)" % (dw_24ghz,
-                                                               dw_5ghz))
-    autils.extract_stats(
-        init_dut,
-        data=link_props_latencies,
-        results=results[key_link_props],
-        key_prefix="",
-        log_prefix="NDP setup OnLinkProperties (dw24=%d, dw5=%d)" % (dw_24ghz,
-                                                                     dw_5ghz))
-    results[key_avail]["ndp_setup_failures"] = ndp_setup_failures
+        autils.extract_stats(
+            init_dut,
+            data=on_available_latencies,
+            results=results[key_avail],
+            key_prefix="",
+            log_prefix="NDP setup OnAvailable(dw24=%d, dw5=%d)" % (dw_24ghz,
+                                                                   dw_5ghz))
+        autils.extract_stats(
+            init_dut,
+            data=link_props_latencies,
+            results=results[key_link_props],
+            key_prefix="",
+            log_prefix="NDP setup OnLinkProperties (dw24=%d, dw5=%d)" %
+            (dw_24ghz, dw_5ghz))
+        results[key_avail]["ndp_setup_failures"] = ndp_setup_failures
 
-  def run_end_to_end_latency(self, results, dw_24ghz, dw_5ghz, num_iterations,
-      startup_offset, include_setup):
-    """Measure the latency for end-to-end communication link setup:
+    def run_end_to_end_latency(self, results, dw_24ghz, dw_5ghz,
+                               num_iterations, startup_offset, include_setup):
+        """Measure the latency for end-to-end communication link setup:
     - Start Aware
     - Discovery
     - Message from Sub -> Pub
@@ -444,341 +454,359 @@
       include_setup: True to include the cluster setup in the latency
                     measurements.
     """
-    key = "dw24_%d_dw5_%d" % (dw_24ghz, dw_5ghz)
-    results[key] = {}
-    results[key]["num_iterations"] = num_iterations
+        key = "dw24_%d_dw5_%d" % (dw_24ghz, dw_5ghz)
+        results[key] = {}
+        results[key]["num_iterations"] = num_iterations
 
-    p_dut = self.android_devices[0]
-    p_dut.pretty_name = "Publisher"
-    s_dut = self.android_devices[1]
-    s_dut.pretty_name = "Subscriber"
+        p_dut = self.android_devices[0]
+        p_dut.pretty_name = "Publisher"
+        s_dut = self.android_devices[1]
+        s_dut.pretty_name = "Subscriber"
 
-    # override the default DW configuration
-    autils.config_power_settings(p_dut, dw_24ghz, dw_5ghz)
-    autils.config_power_settings(s_dut, dw_24ghz, dw_5ghz)
+        # override the default DW configuration
+        autils.config_power_settings(p_dut, dw_24ghz, dw_5ghz)
+        autils.config_power_settings(s_dut, dw_24ghz, dw_5ghz)
 
-    latencies = []
+        latencies = []
 
-    # allow for failures here since running lots of samples and would like to
-    # get the partial data even in the presence of errors
-    failures = 0
+        # allow for failures here since running lots of samples and would like to
+        # get the partial data even in the presence of errors
+        failures = 0
 
-    if not include_setup:
-      # Publisher+Subscriber: attach and wait for confirmation
-      p_id = p_dut.droid.wifiAwareAttach(False)
-      autils.wait_for_event(p_dut, aconsts.EVENT_CB_ON_ATTACHED)
-      time.sleep(startup_offset)
-      s_id = s_dut.droid.wifiAwareAttach(False)
-      autils.wait_for_event(s_dut, aconsts.EVENT_CB_ON_ATTACHED)
+        if not include_setup:
+            # Publisher+Subscriber: attach and wait for confirmation
+            p_id = p_dut.droid.wifiAwareAttach(False)
+            autils.wait_for_event(p_dut, aconsts.EVENT_CB_ON_ATTACHED)
+            time.sleep(startup_offset)
+            s_id = s_dut.droid.wifiAwareAttach(False)
+            autils.wait_for_event(s_dut, aconsts.EVENT_CB_ON_ATTACHED)
 
-    for i in range(num_iterations):
-      while (True): # for pseudo-goto/finalize
-        timestamp_start = time.perf_counter()
+        for i in range(num_iterations):
+            while (True):  # for pseudo-goto/finalize
+                timestamp_start = time.perf_counter()
 
-        if include_setup:
-          # Publisher+Subscriber: attach and wait for confirmation
-          p_id = p_dut.droid.wifiAwareAttach(False)
-          autils.wait_for_event(p_dut, aconsts.EVENT_CB_ON_ATTACHED)
-          time.sleep(startup_offset)
-          s_id = s_dut.droid.wifiAwareAttach(False)
-          autils.wait_for_event(s_dut, aconsts.EVENT_CB_ON_ATTACHED)
+                if include_setup:
+                    # Publisher+Subscriber: attach and wait for confirmation
+                    p_id = p_dut.droid.wifiAwareAttach(False)
+                    autils.wait_for_event(p_dut, aconsts.EVENT_CB_ON_ATTACHED)
+                    time.sleep(startup_offset)
+                    s_id = s_dut.droid.wifiAwareAttach(False)
+                    autils.wait_for_event(s_dut, aconsts.EVENT_CB_ON_ATTACHED)
 
-        # start publish
-        p_disc_id, p_disc_event = self.start_discovery_session(
-            p_dut, p_id, True, aconsts.PUBLISH_TYPE_UNSOLICITED)
+                # start publish
+                p_disc_id, p_disc_event = self.start_discovery_session(
+                    p_dut, p_id, True, aconsts.PUBLISH_TYPE_UNSOLICITED)
 
-        # start subscribe
-        s_disc_id, s_session_event = self.start_discovery_session(
-            s_dut, s_id, False, aconsts.SUBSCRIBE_TYPE_PASSIVE)
+                # start subscribe
+                s_disc_id, s_session_event = self.start_discovery_session(
+                    s_dut, s_id, False, aconsts.SUBSCRIBE_TYPE_PASSIVE)
 
-        # wait for discovery (allow for failures here since running lots of
-        # samples and would like to get the partial data even in the presence of
-        # errors)
-        try:
-          event = s_dut.ed.pop_event(aconsts.SESSION_CB_ON_SERVICE_DISCOVERED,
-                                     autils.EVENT_TIMEOUT)
-          s_dut.log.info("[Subscriber] SESSION_CB_ON_SERVICE_DISCOVERED: %s",
-                         event["data"])
-          peer_id_on_sub = event['data'][aconsts.SESSION_CB_KEY_PEER_ID]
-        except queue.Empty:
-          s_dut.log.info("[Subscriber] Timed out while waiting for "
-                         "SESSION_CB_ON_SERVICE_DISCOVERED")
-          failures = failures + 1
-          break
+                # wait for discovery (allow for failures here since running lots of
+                # samples and would like to get the partial data even in the presence of
+                # errors)
+                try:
+                    event = s_dut.ed.pop_event(
+                        aconsts.SESSION_CB_ON_SERVICE_DISCOVERED,
+                        autils.EVENT_TIMEOUT)
+                    s_dut.log.info(
+                        "[Subscriber] SESSION_CB_ON_SERVICE_DISCOVERED: %s",
+                        event["data"])
+                    peer_id_on_sub = event['data'][
+                        aconsts.SESSION_CB_KEY_PEER_ID]
+                except queue.Empty:
+                    s_dut.log.info("[Subscriber] Timed out while waiting for "
+                                   "SESSION_CB_ON_SERVICE_DISCOVERED")
+                    failures = failures + 1
+                    break
 
-        # message from Sub -> Pub
-        msg_s2p = "Message Subscriber -> Publisher #%d" % i
-        next_msg_id = self.get_next_msg_id()
-        s_dut.droid.wifiAwareSendMessage(s_disc_id, peer_id_on_sub, next_msg_id,
-                                         msg_s2p, 0)
+                # message from Sub -> Pub
+                msg_s2p = "Message Subscriber -> Publisher #%d" % i
+                next_msg_id = self.get_next_msg_id()
+                s_dut.droid.wifiAwareSendMessage(s_disc_id, peer_id_on_sub,
+                                                 next_msg_id, msg_s2p, 0)
 
-        # wait for Tx confirmation
-        try:
-          s_dut.ed.pop_event(aconsts.SESSION_CB_ON_MESSAGE_SENT,
-                             autils.EVENT_TIMEOUT)
-        except queue.Empty:
-          s_dut.log.info("[Subscriber] Timed out while waiting for "
-                         "SESSION_CB_ON_MESSAGE_SENT")
-          failures = failures + 1
-          break
+                # wait for Tx confirmation
+                try:
+                    s_dut.ed.pop_event(aconsts.SESSION_CB_ON_MESSAGE_SENT,
+                                       autils.EVENT_TIMEOUT)
+                except queue.Empty:
+                    s_dut.log.info("[Subscriber] Timed out while waiting for "
+                                   "SESSION_CB_ON_MESSAGE_SENT")
+                    failures = failures + 1
+                    break
 
-        # wait for Rx confirmation (and validate contents)
-        try:
-          event = p_dut.ed.pop_event(aconsts.SESSION_CB_ON_MESSAGE_RECEIVED,
-                                     autils.EVENT_TIMEOUT)
-          peer_id_on_pub = event['data'][aconsts.SESSION_CB_KEY_PEER_ID]
-          if (event["data"][
-            aconsts.SESSION_CB_KEY_MESSAGE_AS_STRING] != msg_s2p):
-            p_dut.log.info("[Publisher] Corrupted input message - %s", event)
-            failures = failures + 1
-            break
-        except queue.Empty:
-          p_dut.log.info("[Publisher] Timed out while waiting for "
-                         "SESSION_CB_ON_MESSAGE_RECEIVED")
-          failures = failures + 1
-          break
+                # wait for Rx confirmation (and validate contents)
+                try:
+                    event = p_dut.ed.pop_event(
+                        aconsts.SESSION_CB_ON_MESSAGE_RECEIVED,
+                        autils.EVENT_TIMEOUT)
+                    peer_id_on_pub = event['data'][
+                        aconsts.SESSION_CB_KEY_PEER_ID]
+                    if (event["data"][aconsts.SESSION_CB_KEY_MESSAGE_AS_STRING]
+                            != msg_s2p):
+                        p_dut.log.info(
+                            "[Publisher] Corrupted input message - %s", event)
+                        failures = failures + 1
+                        break
+                except queue.Empty:
+                    p_dut.log.info("[Publisher] Timed out while waiting for "
+                                   "SESSION_CB_ON_MESSAGE_RECEIVED")
+                    failures = failures + 1
+                    break
 
-        # message from Pub -> Sub
-        msg_p2s = "Message Publisher -> Subscriber #%d" % i
-        next_msg_id = self.get_next_msg_id()
-        p_dut.droid.wifiAwareSendMessage(p_disc_id, peer_id_on_pub, next_msg_id,
-                                         msg_p2s, 0)
+                # message from Pub -> Sub
+                msg_p2s = "Message Publisher -> Subscriber #%d" % i
+                next_msg_id = self.get_next_msg_id()
+                p_dut.droid.wifiAwareSendMessage(p_disc_id, peer_id_on_pub,
+                                                 next_msg_id, msg_p2s, 0)
 
-        # wait for Tx confirmation
-        try:
-          p_dut.ed.pop_event(aconsts.SESSION_CB_ON_MESSAGE_SENT,
-                             autils.EVENT_TIMEOUT)
-        except queue.Empty:
-          p_dut.log.info("[Publisher] Timed out while waiting for "
-                         "SESSION_CB_ON_MESSAGE_SENT")
-          failures = failures + 1
-          break
+                # wait for Tx confirmation
+                try:
+                    p_dut.ed.pop_event(aconsts.SESSION_CB_ON_MESSAGE_SENT,
+                                       autils.EVENT_TIMEOUT)
+                except queue.Empty:
+                    p_dut.log.info("[Publisher] Timed out while waiting for "
+                                   "SESSION_CB_ON_MESSAGE_SENT")
+                    failures = failures + 1
+                    break
 
-        # wait for Rx confirmation (and validate contents)
-        try:
-          event = s_dut.ed.pop_event(aconsts.SESSION_CB_ON_MESSAGE_RECEIVED,
-                                     autils.EVENT_TIMEOUT)
-          if (event["data"][
-            aconsts.SESSION_CB_KEY_MESSAGE_AS_STRING] != msg_p2s):
-            s_dut.log.info("[Subscriber] Corrupted input message - %s", event)
-            failures = failures + 1
-            break
-        except queue.Empty:
-          s_dut.log.info("[Subscriber] Timed out while waiting for "
-                         "SESSION_CB_ON_MESSAGE_RECEIVED")
-          failures = failures + 1
-          break
+                # wait for Rx confirmation (and validate contents)
+                try:
+                    event = s_dut.ed.pop_event(
+                        aconsts.SESSION_CB_ON_MESSAGE_RECEIVED,
+                        autils.EVENT_TIMEOUT)
+                    if (event["data"][aconsts.SESSION_CB_KEY_MESSAGE_AS_STRING]
+                            != msg_p2s):
+                        s_dut.log.info(
+                            "[Subscriber] Corrupted input message - %s", event)
+                        failures = failures + 1
+                        break
+                except queue.Empty:
+                    s_dut.log.info("[Subscriber] Timed out while waiting for "
+                                   "SESSION_CB_ON_MESSAGE_RECEIVED")
+                    failures = failures + 1
+                    break
 
-        # create NDP
+                # create NDP
 
-        # Publisher: request network
-        p_req_key = autils.request_network(
+                # Publisher: request network
+                p_req_key = autils.request_network(
+                    p_dut,
+                    p_dut.droid.wifiAwareCreateNetworkSpecifier(
+                        p_disc_id, peer_id_on_pub, None))
+
+                # Subscriber: request network
+                s_req_key = autils.request_network(
+                    s_dut,
+                    s_dut.droid.wifiAwareCreateNetworkSpecifier(
+                        s_disc_id, peer_id_on_sub, None))
+
+                # Publisher & Subscriber: wait for network formation
+                try:
+                    p_net_event = autils.wait_for_event_with_keys(
+                        p_dut, cconsts.EVENT_NETWORK_CALLBACK,
+                        autils.EVENT_TIMEOUT,
+                        (cconsts.NETWORK_CB_KEY_EVENT,
+                         cconsts.NETWORK_CB_LINK_PROPERTIES_CHANGED),
+                        (cconsts.NETWORK_CB_KEY_ID, p_req_key))
+                    s_net_event = autils.wait_for_event_with_keys(
+                        s_dut, cconsts.EVENT_NETWORK_CALLBACK,
+                        autils.EVENT_TIMEOUT,
+                        (cconsts.NETWORK_CB_KEY_EVENT,
+                         cconsts.NETWORK_CB_LINK_PROPERTIES_CHANGED),
+                        (cconsts.NETWORK_CB_KEY_ID, s_req_key))
+                except:
+                    failures = failures + 1
+                    break
+
+                p_aware_if = p_net_event["data"][
+                    cconsts.NETWORK_CB_KEY_INTERFACE_NAME]
+                s_aware_if = s_net_event["data"][
+                    cconsts.NETWORK_CB_KEY_INTERFACE_NAME]
+
+                p_ipv6 = \
+                p_dut.droid.connectivityGetLinkLocalIpv6Address(p_aware_if).split("%")[
+                  0]
+                s_ipv6 = \
+                s_dut.droid.connectivityGetLinkLocalIpv6Address(s_aware_if).split("%")[
+                  0]
+
+                p_dut.log.info("[Publisher] IF=%s, IPv6=%s", p_aware_if,
+                               p_ipv6)
+                s_dut.log.info("[Subscriber] IF=%s, IPv6=%s", s_aware_if,
+                               s_ipv6)
+
+                latencies.append(time.perf_counter() - timestamp_start)
+                break
+
+            # destroy sessions
+            p_dut.droid.wifiAwareDestroyDiscoverySession(p_disc_id)
+            s_dut.droid.wifiAwareDestroyDiscoverySession(s_disc_id)
+            if include_setup:
+                p_dut.droid.wifiAwareDestroy(p_id)
+                s_dut.droid.wifiAwareDestroy(s_id)
+
+        autils.extract_stats(
             p_dut,
-            p_dut.droid.wifiAwareCreateNetworkSpecifier(p_disc_id,
-                                                        peer_id_on_pub, None))
+            data=latencies,
+            results=results[key],
+            key_prefix="",
+            log_prefix="End-to-End(dw24=%d, dw5=%d)" % (dw_24ghz, dw_5ghz))
+        results[key]["failures"] = failures
 
-        # Subscriber: request network
-        s_req_key = autils.request_network(
-            s_dut,
-            s_dut.droid.wifiAwareCreateNetworkSpecifier(s_disc_id,
-                                                        peer_id_on_sub, None))
+    ########################################################################
 
-        # Publisher & Subscriber: wait for network formation
-        try:
-          p_net_event = autils.wait_for_event_with_keys(
-              p_dut, cconsts.EVENT_NETWORK_CALLBACK, autils.EVENT_TIMEOUT, (
-              cconsts.NETWORK_CB_KEY_EVENT,
-              cconsts.NETWORK_CB_LINK_PROPERTIES_CHANGED),
-              (cconsts.NETWORK_CB_KEY_ID, p_req_key))
-          s_net_event = autils.wait_for_event_with_keys(
-              s_dut, cconsts.EVENT_NETWORK_CALLBACK, autils.EVENT_TIMEOUT, (
-              cconsts.NETWORK_CB_KEY_EVENT,
-              cconsts.NETWORK_CB_LINK_PROPERTIES_CHANGED),
-              (cconsts.NETWORK_CB_KEY_ID, s_req_key))
-        except:
-          failures = failures + 1
-          break
-
-        p_aware_if = p_net_event["data"][cconsts.NETWORK_CB_KEY_INTERFACE_NAME]
-        s_aware_if = s_net_event["data"][cconsts.NETWORK_CB_KEY_INTERFACE_NAME]
-
-        p_ipv6 = \
-        p_dut.droid.connectivityGetLinkLocalIpv6Address(p_aware_if).split("%")[
-          0]
-        s_ipv6 = \
-        s_dut.droid.connectivityGetLinkLocalIpv6Address(s_aware_if).split("%")[
-          0]
-
-        p_dut.log.info("[Publisher] IF=%s, IPv6=%s", p_aware_if, p_ipv6)
-        s_dut.log.info("[Subscriber] IF=%s, IPv6=%s", s_aware_if, s_ipv6)
-
-        latencies.append(time.perf_counter() - timestamp_start)
-        break
-
-      # destroy sessions
-      p_dut.droid.wifiAwareDestroyDiscoverySession(p_disc_id)
-      s_dut.droid.wifiAwareDestroyDiscoverySession(s_disc_id)
-      if include_setup:
-        p_dut.droid.wifiAwareDestroy(p_id)
-        s_dut.droid.wifiAwareDestroy(s_id)
-
-    autils.extract_stats(
-        p_dut,
-        data=latencies,
-        results=results[key],
-        key_prefix="",
-        log_prefix="End-to-End(dw24=%d, dw5=%d)" % (dw_24ghz, dw_5ghz))
-    results[key]["failures"] = failures
-
-
-  ########################################################################
-
-  def test_synchronization_default_dws(self):
-    """Measure the device synchronization for default dws. Loop over values
+    def test_synchronization_default_dws(self):
+        """Measure the device synchronization for default dws. Loop over values
     from 0 to 4 seconds."""
-    results = {}
-    for startup_offset in range(5):
-      self.run_synchronization_latency(
-          results=results,
-          do_unsolicited_passive=True,
-          dw_24ghz=aconsts.POWER_DW_24_INTERACTIVE,
-          dw_5ghz=aconsts.POWER_DW_5_INTERACTIVE,
-          num_iterations=10,
-          startup_offset=startup_offset,
-          timeout_period=20)
-    asserts.explicit_pass(
-        "test_synchronization_default_dws finished", extras=results)
+        results = {}
+        for startup_offset in range(5):
+            self.run_synchronization_latency(
+                results=results,
+                do_unsolicited_passive=True,
+                dw_24ghz=aconsts.POWER_DW_24_INTERACTIVE,
+                dw_5ghz=aconsts.POWER_DW_5_INTERACTIVE,
+                num_iterations=10,
+                startup_offset=startup_offset,
+                timeout_period=20)
+        asserts.explicit_pass(
+            "test_synchronization_default_dws finished", extras=results)
 
-  def test_synchronization_non_interactive_dws(self):
-    """Measure the device synchronization for non-interactive dws. Loop over
+    def test_synchronization_non_interactive_dws(self):
+        """Measure the device synchronization for non-interactive dws. Loop over
     values from 0 to 4 seconds."""
-    results = {}
-    for startup_offset in range(5):
-      self.run_synchronization_latency(
-          results=results,
-          do_unsolicited_passive=True,
-          dw_24ghz=aconsts.POWER_DW_24_NON_INTERACTIVE,
-          dw_5ghz=aconsts.POWER_DW_5_NON_INTERACTIVE,
-          num_iterations=10,
-          startup_offset=startup_offset,
-          timeout_period=20)
-    asserts.explicit_pass(
-        "test_synchronization_non_interactive_dws finished", extras=results)
+        results = {}
+        for startup_offset in range(5):
+            self.run_synchronization_latency(
+                results=results,
+                do_unsolicited_passive=True,
+                dw_24ghz=aconsts.POWER_DW_24_NON_INTERACTIVE,
+                dw_5ghz=aconsts.POWER_DW_5_NON_INTERACTIVE,
+                num_iterations=10,
+                startup_offset=startup_offset,
+                timeout_period=20)
+        asserts.explicit_pass(
+            "test_synchronization_non_interactive_dws finished",
+            extras=results)
 
-  def test_discovery_latency_default_dws(self):
-    """Measure the service discovery latency with the default DW configuration.
+    def test_discovery_latency_default_dws(self):
+        """Measure the service discovery latency with the default DW configuration.
     """
-    results = {}
-    self.run_discovery_latency(
-        results=results,
-        do_unsolicited_passive=True,
-        dw_24ghz=aconsts.POWER_DW_24_INTERACTIVE,
-        dw_5ghz=aconsts.POWER_DW_5_INTERACTIVE,
-        num_iterations=100)
-    asserts.explicit_pass(
-        "test_discovery_latency_default_parameters finished", extras=results)
-
-  def test_discovery_latency_non_interactive_dws(self):
-    """Measure the service discovery latency with the DW configuration for non
-    -interactive mode (lower power)."""
-    results = {}
-    self.run_discovery_latency(
-        results=results,
-        do_unsolicited_passive=True,
-        dw_24ghz=aconsts.POWER_DW_24_NON_INTERACTIVE,
-        dw_5ghz=aconsts.POWER_DW_5_NON_INTERACTIVE,
-        num_iterations=100)
-    asserts.explicit_pass(
-        "test_discovery_latency_non_interactive_dws finished", extras=results)
-
-  def test_discovery_latency_all_dws(self):
-    """Measure the service discovery latency with all DW combinations (low
-    iteration count)"""
-    results = {}
-    for dw24 in range(1, 6):  # permitted values: 1-5
-      for dw5 in range(0, 6): # permitted values: 0, 1-5
+        results = {}
         self.run_discovery_latency(
             results=results,
             do_unsolicited_passive=True,
-            dw_24ghz=dw24,
-            dw_5ghz=dw5,
-            num_iterations=10)
-    asserts.explicit_pass(
-        "test_discovery_latency_all_dws finished", extras=results)
+            dw_24ghz=aconsts.POWER_DW_24_INTERACTIVE,
+            dw_5ghz=aconsts.POWER_DW_5_INTERACTIVE,
+            num_iterations=100)
+        asserts.explicit_pass(
+            "test_discovery_latency_default_parameters finished",
+            extras=results)
 
-  def test_message_latency_default_dws(self):
-    """Measure the send message latency with the default DW configuration. Test
+    def test_discovery_latency_non_interactive_dws(self):
+        """Measure the service discovery latency with the DW configuration for non
+    -interactive mode (lower power)."""
+        results = {}
+        self.run_discovery_latency(
+            results=results,
+            do_unsolicited_passive=True,
+            dw_24ghz=aconsts.POWER_DW_24_NON_INTERACTIVE,
+            dw_5ghz=aconsts.POWER_DW_5_NON_INTERACTIVE,
+            num_iterations=100)
+        asserts.explicit_pass(
+            "test_discovery_latency_non_interactive_dws finished",
+            extras=results)
+
+    def test_discovery_latency_all_dws(self):
+        """Measure the service discovery latency with all DW combinations (low
+    iteration count)"""
+        results = {}
+        for dw24 in range(1, 6):  # permitted values: 1-5
+            for dw5 in range(0, 6):  # permitted values: 0, 1-5
+                self.run_discovery_latency(
+                    results=results,
+                    do_unsolicited_passive=True,
+                    dw_24ghz=dw24,
+                    dw_5ghz=dw5,
+                    num_iterations=10)
+        asserts.explicit_pass(
+            "test_discovery_latency_all_dws finished", extras=results)
+
+    def test_message_latency_default_dws(self):
+        """Measure the send message latency with the default DW configuration. Test
     performed on non-queued message transmission - i.e. waiting for confirmation
     of reception (ACK) before sending the next message."""
-    results = {}
-    self.run_message_latency(
-        results=results,
-        dw_24ghz=aconsts.POWER_DW_24_INTERACTIVE,
-        dw_5ghz=aconsts.POWER_DW_5_INTERACTIVE,
-        num_iterations=100)
-    asserts.explicit_pass(
-        "test_message_latency_default_dws finished", extras=results)
+        results = {}
+        self.run_message_latency(
+            results=results,
+            dw_24ghz=aconsts.POWER_DW_24_INTERACTIVE,
+            dw_5ghz=aconsts.POWER_DW_5_INTERACTIVE,
+            num_iterations=100)
+        asserts.explicit_pass(
+            "test_message_latency_default_dws finished", extras=results)
 
-  def test_message_latency_non_interactive_dws(self):
-    """Measure the send message latency with the DW configuration for
+    def test_message_latency_non_interactive_dws(self):
+        """Measure the send message latency with the DW configuration for
     non-interactive mode. Test performed on non-queued message transmission -
     i.e. waiting for confirmation of reception (ACK) before sending the next
     message."""
-    results = {}
-    self.run_message_latency(
-        results=results,
-        dw_24ghz=aconsts.POWER_DW_24_NON_INTERACTIVE,
-        dw_5ghz=aconsts.POWER_DW_5_NON_INTERACTIVE,
-        num_iterations=100)
-    asserts.explicit_pass(
-        "test_message_latency_non_interactive_dws finished", extras=results)
+        results = {}
+        self.run_message_latency(
+            results=results,
+            dw_24ghz=aconsts.POWER_DW_24_NON_INTERACTIVE,
+            dw_5ghz=aconsts.POWER_DW_5_NON_INTERACTIVE,
+            num_iterations=100)
+        asserts.explicit_pass(
+            "test_message_latency_non_interactive_dws finished",
+            extras=results)
 
-  def test_oob_ndp_setup_latency_default_dws(self):
-    """Measure the NDP setup latency with the default DW configuration. The
+    def test_oob_ndp_setup_latency_default_dws(self):
+        """Measure the NDP setup latency with the default DW configuration. The
     NDP is setup with OOB (out-of-band) configuration."""
-    results = {}
-    self.run_ndp_oob_latency(
-        results=results,
-        dw_24ghz=aconsts.POWER_DW_24_INTERACTIVE,
-        dw_5ghz=aconsts.POWER_DW_5_INTERACTIVE,
-        num_iterations=100)
-    asserts.explicit_pass(
-        "test_ndp_setup_latency_default_dws finished", extras=results)
+        results = {}
+        self.run_ndp_oob_latency(
+            results=results,
+            dw_24ghz=aconsts.POWER_DW_24_INTERACTIVE,
+            dw_5ghz=aconsts.POWER_DW_5_INTERACTIVE,
+            num_iterations=100)
+        asserts.explicit_pass(
+            "test_ndp_setup_latency_default_dws finished", extras=results)
 
-  def test_oob_ndp_setup_latency_non_interactive_dws(self):
-    """Measure the NDP setup latency with the DW configuration for
+    def test_oob_ndp_setup_latency_non_interactive_dws(self):
+        """Measure the NDP setup latency with the DW configuration for
     non-interactive mode. The NDP is setup with OOB (out-of-band)
     configuration"""
-    results = {}
-    self.run_ndp_oob_latency(
-        results=results,
-        dw_24ghz=aconsts.POWER_DW_24_NON_INTERACTIVE,
-        dw_5ghz=aconsts.POWER_DW_5_NON_INTERACTIVE,
-        num_iterations=100)
-    asserts.explicit_pass(
-        "test_ndp_setup_latency_non_interactive_dws finished", extras=results)
+        results = {}
+        self.run_ndp_oob_latency(
+            results=results,
+            dw_24ghz=aconsts.POWER_DW_24_NON_INTERACTIVE,
+            dw_5ghz=aconsts.POWER_DW_5_NON_INTERACTIVE,
+            num_iterations=100)
+        asserts.explicit_pass(
+            "test_ndp_setup_latency_non_interactive_dws finished",
+            extras=results)
 
-  def test_end_to_end_latency_default_dws(self):
-    """Measure the latency for end-to-end communication link setup:
+    def test_end_to_end_latency_default_dws(self):
+        """Measure the latency for end-to-end communication link setup:
       - Start Aware
       - Discovery
       - Message from Sub -> Pub
       - Message from Pub -> Sub
       - NDP setup
     """
-    results = {}
-    self.run_end_to_end_latency(
-        results,
-        dw_24ghz=aconsts.POWER_DW_24_INTERACTIVE,
-        dw_5ghz=aconsts.POWER_DW_5_INTERACTIVE,
-        num_iterations=10,
-        startup_offset=0,
-        include_setup=True)
-    asserts.explicit_pass(
-        "test_end_to_end_latency_default_dws finished", extras=results)
+        results = {}
+        self.run_end_to_end_latency(
+            results,
+            dw_24ghz=aconsts.POWER_DW_24_INTERACTIVE,
+            dw_5ghz=aconsts.POWER_DW_5_INTERACTIVE,
+            num_iterations=10,
+            startup_offset=0,
+            include_setup=True)
+        asserts.explicit_pass(
+            "test_end_to_end_latency_default_dws finished", extras=results)
 
-  def test_end_to_end_latency_post_attach_default_dws(self):
-    """Measure the latency for end-to-end communication link setup without
+    def test_end_to_end_latency_post_attach_default_dws(self):
+        """Measure the latency for end-to-end communication link setup without
     the initial synchronization:
       - Start Aware & synchronize initially
       - Loop:
@@ -787,14 +815,14 @@
         - Message from Pub -> Sub
         - NDP setup
     """
-    results = {}
-    self.run_end_to_end_latency(
-        results,
-        dw_24ghz=aconsts.POWER_DW_24_INTERACTIVE,
-        dw_5ghz=aconsts.POWER_DW_5_INTERACTIVE,
-        num_iterations=10,
-        startup_offset=0,
-        include_setup=False)
-    asserts.explicit_pass(
-      "test_end_to_end_latency_post_attach_default_dws finished",
-      extras=results)
+        results = {}
+        self.run_end_to_end_latency(
+            results,
+            dw_24ghz=aconsts.POWER_DW_24_INTERACTIVE,
+            dw_5ghz=aconsts.POWER_DW_5_INTERACTIVE,
+            num_iterations=10,
+            startup_offset=0,
+            include_setup=False)
+        asserts.explicit_pass(
+            "test_end_to_end_latency_post_attach_default_dws finished",
+            extras=results)
diff --git a/acts/tests/google/wifi/aware/performance/ThroughputTest.py b/acts/tests/google/wifi/aware/performance/ThroughputTest.py
index ddb6d15..a5b1b8d 100644
--- a/acts/tests/google/wifi/aware/performance/ThroughputTest.py
+++ b/acts/tests/google/wifi/aware/performance/ThroughputTest.py
@@ -28,82 +28,84 @@
 
 
 class ThroughputTest(AwareBaseTest):
-  """Set of tests for Wi-Fi Aware to measure latency of Aware operations."""
+    """Set of tests for Wi-Fi Aware to measure latency of Aware operations."""
 
-  SERVICE_NAME = "GoogleTestServiceXYZ"
+    SERVICE_NAME = "GoogleTestServiceXYZ"
 
-  PASSPHRASE = "This is some random passphrase - very very secure!!"
-  PASSPHRASE2 = "This is some random passphrase - very very secure - but diff!!"
+    PASSPHRASE = "This is some random passphrase - very very secure!!"
+    PASSPHRASE2 = "This is some random passphrase - very very secure - but diff!!"
 
-  def __init__(self, controllers):
-    super(ThroughputTest, self).__init__(controllers)
+    def __init__(self, controllers):
+        super(ThroughputTest, self).__init__(controllers)
 
-  def request_network(self, dut, ns):
-    """Request a Wi-Fi Aware network.
+    def request_network(self, dut, ns):
+        """Request a Wi-Fi Aware network.
 
     Args:
       dut: Device
       ns: Network specifier
     Returns: the request key
     """
-    network_req = {"TransportType": 5, "NetworkSpecifier": ns}
-    return dut.droid.connectivityRequestWifiAwareNetwork(network_req)
+        network_req = {"TransportType": 5, "NetworkSpecifier": ns}
+        return dut.droid.connectivityRequestWifiAwareNetwork(network_req)
 
-  def run_iperf_single_ndp_aware_only(self, use_ib, results):
-    """Measure iperf performance on a single NDP, with Aware enabled and no
+    def run_iperf_single_ndp_aware_only(self, use_ib, results):
+        """Measure iperf performance on a single NDP, with Aware enabled and no
     infrastructure connection - i.e. device is not associated to an AP.
 
     Args:
       use_ib: True to use in-band discovery, False to use out-of-band discovery.
       results: Dictionary into which to place test results.
     """
-    init_dut = self.android_devices[0]
-    resp_dut = self.android_devices[1]
+        init_dut = self.android_devices[0]
+        resp_dut = self.android_devices[1]
 
-    if use_ib:
-      # note: Publisher = Responder, Subscribe = Initiator
-      (resp_req_key, init_req_key, resp_aware_if,
-       init_aware_if, resp_ipv6, init_ipv6) = autils.create_ib_ndp(
-           resp_dut, init_dut,
-           autils.create_discovery_config(self.SERVICE_NAME,
-                                          aconsts.PUBLISH_TYPE_UNSOLICITED),
-           autils.create_discovery_config(self.SERVICE_NAME,
-                                          aconsts.SUBSCRIBE_TYPE_PASSIVE),
-           self.device_startup_offset)
-    else:
-      (init_req_key, resp_req_key, init_aware_if, resp_aware_if, init_ipv6,
-      resp_ipv6) = autils.create_oob_ndp(init_dut, resp_dut)
-    self.log.info("Interface names: I=%s, R=%s", init_aware_if, resp_aware_if)
-    self.log.info("Interface addresses (IPv6): I=%s, R=%s", init_ipv6,
-                  resp_ipv6)
+        if use_ib:
+            # note: Publisher = Responder, Subscribe = Initiator
+            (resp_req_key, init_req_key, resp_aware_if, init_aware_if,
+             resp_ipv6, init_ipv6) = autils.create_ib_ndp(
+                 resp_dut, init_dut,
+                 autils.create_discovery_config(
+                     self.SERVICE_NAME, aconsts.PUBLISH_TYPE_UNSOLICITED),
+                 autils.create_discovery_config(
+                     self.SERVICE_NAME, aconsts.SUBSCRIBE_TYPE_PASSIVE),
+                 self.device_startup_offset)
+        else:
+            (init_req_key, resp_req_key, init_aware_if, resp_aware_if,
+             init_ipv6, resp_ipv6) = autils.create_oob_ndp(init_dut, resp_dut)
+        self.log.info("Interface names: I=%s, R=%s", init_aware_if,
+                      resp_aware_if)
+        self.log.info("Interface addresses (IPv6): I=%s, R=%s", init_ipv6,
+                      resp_ipv6)
 
-    # Run iperf3
-    result, data = init_dut.run_iperf_server("-D")
-    asserts.assert_true(result, "Can't start iperf3 server")
+        # Run iperf3
+        result, data = init_dut.run_iperf_server("-D")
+        asserts.assert_true(result, "Can't start iperf3 server")
 
-    result, data = resp_dut.run_iperf_client(
-        "%s%%%s" % (init_ipv6, resp_aware_if), "-6 -J")
-    self.log.debug(data)
-    asserts.assert_true(result,
-                        "Failure starting/running iperf3 in client mode")
-    self.log.debug(pprint.pformat(data))
+        result, data = resp_dut.run_iperf_client(
+            "%s%%%s" % (init_ipv6, resp_aware_if), "-6 -J")
+        self.log.debug(data)
+        asserts.assert_true(result,
+                            "Failure starting/running iperf3 in client mode")
+        self.log.debug(pprint.pformat(data))
 
-    # clean-up
-    resp_dut.droid.connectivityUnregisterNetworkCallback(resp_req_key)
-    init_dut.droid.connectivityUnregisterNetworkCallback(init_req_key)
+        # clean-up
+        resp_dut.droid.connectivityUnregisterNetworkCallback(resp_req_key)
+        init_dut.droid.connectivityUnregisterNetworkCallback(init_req_key)
 
-    # Collect results
-    data_json = json.loads("".join(data))
-    if "error" in data_json:
-      asserts.fail(
-          "iperf run failed: %s" % data_json["error"], extras=data_json)
-    results["tx_rate"] = data_json["end"]["sum_sent"]["bits_per_second"]
-    results["rx_rate"] = data_json["end"]["sum_received"]["bits_per_second"]
-    self.log.info("iPerf3: Sent = %d bps Received = %d bps", results["tx_rate"],
-                  results["rx_rate"])
+        # Collect results
+        data_json = json.loads("".join(data))
+        if "error" in data_json:
+            asserts.fail(
+                "iperf run failed: %s" % data_json["error"], extras=data_json)
+        results["tx_rate"] = data_json["end"]["sum_sent"]["bits_per_second"]
+        results["rx_rate"] = data_json["end"]["sum_received"][
+            "bits_per_second"]
+        self.log.info("iPerf3: Sent = %d bps Received = %d bps",
+                      results["tx_rate"], results["rx_rate"])
 
-  def run_iperf(self, q, dut, peer_dut, peer_aware_if, dut_ipv6, port):
-    """Runs iperf and places results in the queue.
+    def run_iperf(self, q, dut, peer_dut, peer_aware_if, dut_ipv6, port):
+        """Runs iperf and places results in the queue.
 
     Args:
       q: The queue into which to place the results
@@ -113,16 +115,16 @@
       dut_ipv6: The IPv6 address of the server.
       port: The port to use for the server and client.
     """
-    result, data = dut.run_iperf_server("-D -p %d" % port)
-    asserts.assert_true(result, "Can't start iperf3 server")
+        result, data = dut.run_iperf_server("-D -p %d" % port)
+        asserts.assert_true(result, "Can't start iperf3 server")
 
-    result, data = peer_dut.run_iperf_client(
-        "%s%%%s" % (dut_ipv6, peer_aware_if), "-6 -J -p %d" % port)
-    self.log.debug(data)
-    q.put((result, data))
+        result, data = peer_dut.run_iperf_client(
+            "%s%%%s" % (dut_ipv6, peer_aware_if), "-6 -J -p %d" % port)
+        self.log.debug(data)
+        q.put((result, data))
 
-  def run_iperf_max_ndp_aware_only(self, results):
-    """Measure iperf performance on the max number of concurrent OOB NDPs, with
+    def run_iperf_max_ndp_aware_only(self, results):
+        """Measure iperf performance on the max number of concurrent OOB NDPs, with
     Aware enabled and no infrastructure connection - i.e. device is not
     associated to an AP.
 
@@ -132,117 +134,123 @@
     Args:
       results: Dictionary into which to place test results.
     """
-    dut = self.android_devices[0]
+        dut = self.android_devices[0]
 
-    # get max NDP: using first available device (assumes all devices are the
-    # same)
-    max_ndp = dut.aware_capabilities[aconsts.CAP_MAX_NDP_SESSIONS]
-    asserts.assert_true(len(self.android_devices) > max_ndp,
-                        'Needed %d devices to run the test, have %d' %
-                        (max_ndp + 1, len(self.android_devices)))
+        # get max NDP: using first available device (assumes all devices are the
+        # same)
+        max_ndp = dut.aware_capabilities[aconsts.CAP_MAX_NDP_SESSIONS]
+        asserts.assert_true(
+            len(self.android_devices) > max_ndp,
+            'Needed %d devices to run the test, have %d' %
+            (max_ndp + 1, len(self.android_devices)))
 
-    # create all NDPs
-    dut_aware_if = None
-    dut_ipv6 = None
-    peers_aware_ifs = []
-    peers_ipv6s = []
-    dut_requests = []
-    peers_requests = []
-    for i in range(max_ndp):
-      (init_req_key, resp_req_key, init_aware_if, resp_aware_if, init_ipv6,
-       resp_ipv6) = autils.create_oob_ndp(dut, self.android_devices[i + 1])
-      self.log.info("Interface names: I=%s, R=%s", init_aware_if, resp_aware_if)
-      self.log.info("Interface addresses (IPv6): I=%s, R=%s", init_ipv6,
-                    resp_ipv6)
+        # create all NDPs
+        dut_aware_if = None
+        dut_ipv6 = None
+        peers_aware_ifs = []
+        peers_ipv6s = []
+        dut_requests = []
+        peers_requests = []
+        for i in range(max_ndp):
+            (init_req_key, resp_req_key, init_aware_if, resp_aware_if,
+             init_ipv6, resp_ipv6) = autils.create_oob_ndp(
+                 dut, self.android_devices[i + 1])
+            self.log.info("Interface names: I=%s, R=%s", init_aware_if,
+                          resp_aware_if)
+            self.log.info("Interface addresses (IPv6): I=%s, R=%s", init_ipv6,
+                          resp_ipv6)
 
-      dut_requests.append(init_req_key)
-      peers_requests.append(resp_req_key)
-      if dut_aware_if is None:
-        dut_aware_if = init_aware_if
-      else:
-        asserts.assert_equal(
-            dut_aware_if, init_aware_if,
-            "DUT (Initiator) interface changed on subsequent NDPs!?")
-      if dut_ipv6 is None:
-        dut_ipv6 = init_ipv6
-      else:
-        asserts.assert_equal(
-            dut_ipv6, init_ipv6,
-            "DUT (Initiator) IPv6 changed on subsequent NDPs!?")
-      peers_aware_ifs.append(resp_aware_if)
-      peers_ipv6s.append(resp_ipv6)
+            dut_requests.append(init_req_key)
+            peers_requests.append(resp_req_key)
+            if dut_aware_if is None:
+                dut_aware_if = init_aware_if
+            else:
+                asserts.assert_equal(
+                    dut_aware_if, init_aware_if,
+                    "DUT (Initiator) interface changed on subsequent NDPs!?")
+            if dut_ipv6 is None:
+                dut_ipv6 = init_ipv6
+            else:
+                asserts.assert_equal(
+                    dut_ipv6, init_ipv6,
+                    "DUT (Initiator) IPv6 changed on subsequent NDPs!?")
+            peers_aware_ifs.append(resp_aware_if)
+            peers_ipv6s.append(resp_ipv6)
 
-    # create threads, start them, and wait for all to finish
-    base_port = 5000
-    q = queue.Queue()
-    threads = []
-    for i in range(max_ndp):
-      threads.append(
-          threading.Thread(
-              target=self.run_iperf,
-              args=(q, dut, self.android_devices[i + 1], peers_aware_ifs[i],
-                    dut_ipv6, base_port + i)))
+        # create threads, start them, and wait for all to finish
+        base_port = 5000
+        q = queue.Queue()
+        threads = []
+        for i in range(max_ndp):
+            threads.append(
+                threading.Thread(
+                    target=self.run_iperf,
+                    args=(q, dut, self.android_devices[i + 1],
+                          peers_aware_ifs[i], dut_ipv6, base_port + i)))
 
-    for thread in threads:
-      thread.start()
+        for thread in threads:
+            thread.start()
 
-    for thread in threads:
-      thread.join()
+        for thread in threads:
+            thread.join()
 
-    # cleanup
-    for i in range(max_ndp):
-      dut.droid.connectivityUnregisterNetworkCallback(dut_requests[i])
-      self.android_devices[i + 1].droid.connectivityUnregisterNetworkCallback(
-          peers_requests[i])
+        # cleanup
+        for i in range(max_ndp):
+            dut.droid.connectivityUnregisterNetworkCallback(dut_requests[i])
+            self.android_devices[
+                i + 1].droid.connectivityUnregisterNetworkCallback(
+                    peers_requests[i])
 
-    # collect data
-    for i in range(max_ndp):
-      results[i] = {}
-      result, data = q.get()
-      asserts.assert_true(result,
-                          "Failure starting/running iperf3 in client mode")
-      self.log.debug(pprint.pformat(data))
-      data_json = json.loads("".join(data))
-      if "error" in data_json:
-        asserts.fail(
-            "iperf run failed: %s" % data_json["error"], extras=data_json)
-      results[i]["tx_rate"] = data_json["end"]["sum_sent"]["bits_per_second"]
-      results[i]["rx_rate"] = data_json["end"]["sum_received"][
-          "bits_per_second"]
-      self.log.info("iPerf3: Sent = %d bps Received = %d bps",
-                    results[i]["tx_rate"], results[i]["rx_rate"])
+        # collect data
+        for i in range(max_ndp):
+            results[i] = {}
+            result, data = q.get()
+            asserts.assert_true(
+                result, "Failure starting/running iperf3 in client mode")
+            self.log.debug(pprint.pformat(data))
+            data_json = json.loads("".join(data))
+            if "error" in data_json:
+                asserts.fail(
+                    "iperf run failed: %s" % data_json["error"],
+                    extras=data_json)
+            results[i]["tx_rate"] = data_json["end"]["sum_sent"][
+                "bits_per_second"]
+            results[i]["rx_rate"] = data_json["end"]["sum_received"][
+                "bits_per_second"]
+            self.log.info("iPerf3: Sent = %d bps Received = %d bps",
+                          results[i]["tx_rate"], results[i]["rx_rate"])
 
-  ########################################################################
+    ########################################################################
 
-  def test_iperf_single_ndp_aware_only_ib(self):
-    """Measure throughput using iperf on a single NDP, with Aware enabled and
+    def test_iperf_single_ndp_aware_only_ib(self):
+        """Measure throughput using iperf on a single NDP, with Aware enabled and
     no infrastructure connection. Use in-band discovery."""
-    results = {}
-    self.run_iperf_single_ndp_aware_only(use_ib=True, results=results)
-    asserts.explicit_pass(
-        "test_iperf_single_ndp_aware_only_ib passes", extras=results)
+        results = {}
+        self.run_iperf_single_ndp_aware_only(use_ib=True, results=results)
+        asserts.explicit_pass(
+            "test_iperf_single_ndp_aware_only_ib passes", extras=results)
 
-  def test_iperf_single_ndp_aware_only_oob(self):
-    """Measure throughput using iperf on a single NDP, with Aware enabled and
+    def test_iperf_single_ndp_aware_only_oob(self):
+        """Measure throughput using iperf on a single NDP, with Aware enabled and
     no infrastructure connection. Use out-of-band discovery."""
-    results = {}
-    self.run_iperf_single_ndp_aware_only(use_ib=False, results=results)
-    asserts.explicit_pass(
-        "test_iperf_single_ndp_aware_only_oob passes", extras=results)
+        results = {}
+        self.run_iperf_single_ndp_aware_only(use_ib=False, results=results)
+        asserts.explicit_pass(
+            "test_iperf_single_ndp_aware_only_oob passes", extras=results)
 
-  def test_iperf_max_ndp_aware_only_oob(self):
-    """Measure throughput using iperf on all possible concurrent NDPs, with
+    def test_iperf_max_ndp_aware_only_oob(self):
+        """Measure throughput using iperf on all possible concurrent NDPs, with
     Aware enabled and no infrastructure connection. Use out-of-band discovery.
     """
-    results = {}
-    self.run_iperf_max_ndp_aware_only(results=results)
-    asserts.explicit_pass(
-        "test_iperf_max_ndp_aware_only_oob passes", extras=results)
+        results = {}
+        self.run_iperf_max_ndp_aware_only(results=results)
+        asserts.explicit_pass(
+            "test_iperf_max_ndp_aware_only_oob passes", extras=results)
 
-  ########################################################################
+    ########################################################################
 
-  def run_iperf_max_ndi_aware_only(self, sec_configs, results):
-    """Measure iperf performance on multiple NDPs between 2 devices using
+    def run_iperf_max_ndi_aware_only(self, sec_configs, results):
+        """Measure iperf performance on multiple NDPs between 2 devices using
     different security configurations (and hence different NDIs). Test with
     Aware enabled and no infrastructure connection - i.e. device is not
     associated to an AP.
@@ -256,123 +264,126 @@
       sec_configs: list of security configurations
       results: Dictionary into which to place test results.
     """
-    init_dut = self.android_devices[0]
-    init_dut.pretty_name = "Initiator"
-    resp_dut = self.android_devices[1]
-    resp_dut.pretty_name = "Responder"
+        init_dut = self.android_devices[0]
+        init_dut.pretty_name = "Initiator"
+        resp_dut = self.android_devices[1]
+        resp_dut.pretty_name = "Responder"
 
-    asserts.skip_if(init_dut.aware_capabilities[aconsts.CAP_MAX_NDI_INTERFACES]
-                    < len(sec_configs) or
-                    resp_dut.aware_capabilities[aconsts.CAP_MAX_NDI_INTERFACES]
-                    < len(sec_configs),
-                    "Initiator or Responder do not support multiple NDIs")
+        asserts.skip_if(
+            init_dut.aware_capabilities[aconsts.CAP_MAX_NDI_INTERFACES] <
+            len(sec_configs)
+            or resp_dut.aware_capabilities[aconsts.CAP_MAX_NDI_INTERFACES] <
+            len(sec_configs),
+            "Initiator or Responder do not support multiple NDIs")
 
+        init_id, init_mac = autils.attach_with_identity(init_dut)
+        resp_id, resp_mac = autils.attach_with_identity(resp_dut)
 
-    init_id, init_mac = autils.attach_with_identity(init_dut)
-    resp_id, resp_mac = autils.attach_with_identity(resp_dut)
+        # wait for for devices to synchronize with each other - there are no other
+        # mechanisms to make sure this happens for OOB discovery (except retrying
+        # to execute the data-path request)
+        time.sleep(autils.WAIT_FOR_CLUSTER)
 
-    # wait for for devices to synchronize with each other - there are no other
-    # mechanisms to make sure this happens for OOB discovery (except retrying
-    # to execute the data-path request)
-    time.sleep(autils.WAIT_FOR_CLUSTER)
+        resp_req_keys = []
+        init_req_keys = []
+        resp_aware_ifs = []
+        init_aware_ifs = []
+        resp_aware_ipv6s = []
+        init_aware_ipv6s = []
 
-    resp_req_keys = []
-    init_req_keys = []
-    resp_aware_ifs = []
-    init_aware_ifs = []
-    resp_aware_ipv6s = []
-    init_aware_ipv6s = []
+        for sec in sec_configs:
+            # Responder: request network
+            resp_req_key = autils.request_network(
+                resp_dut,
+                autils.get_network_specifier(resp_dut, resp_id,
+                                             aconsts.DATA_PATH_RESPONDER,
+                                             init_mac, sec))
+            resp_req_keys.append(resp_req_key)
 
-    for sec in sec_configs:
-      # Responder: request network
-      resp_req_key = autils.request_network(resp_dut,
-                                            autils.get_network_specifier(
-                                                resp_dut, resp_id,
-                                                aconsts.DATA_PATH_RESPONDER,
-                                                init_mac, sec))
-      resp_req_keys.append(resp_req_key)
+            # Initiator: request network
+            init_req_key = autils.request_network(
+                init_dut,
+                autils.get_network_specifier(init_dut, init_id,
+                                             aconsts.DATA_PATH_INITIATOR,
+                                             resp_mac, sec))
+            init_req_keys.append(init_req_key)
 
-      # Initiator: request network
-      init_req_key = autils.request_network(init_dut,
-                                            autils.get_network_specifier(
-                                                init_dut, init_id,
-                                                aconsts.DATA_PATH_INITIATOR,
-                                                resp_mac, sec))
-      init_req_keys.append(init_req_key)
+            # Wait for network
+            init_net_event = autils.wait_for_event_with_keys(
+                init_dut, cconsts.EVENT_NETWORK_CALLBACK,
+                autils.EVENT_NDP_TIMEOUT,
+                (cconsts.NETWORK_CB_KEY_EVENT,
+                 cconsts.NETWORK_CB_LINK_PROPERTIES_CHANGED),
+                (cconsts.NETWORK_CB_KEY_ID, init_req_key))
+            resp_net_event = autils.wait_for_event_with_keys(
+                resp_dut, cconsts.EVENT_NETWORK_CALLBACK,
+                autils.EVENT_NDP_TIMEOUT,
+                (cconsts.NETWORK_CB_KEY_EVENT,
+                 cconsts.NETWORK_CB_LINK_PROPERTIES_CHANGED),
+                (cconsts.NETWORK_CB_KEY_ID, resp_req_key))
 
-      # Wait for network
-      init_net_event = autils.wait_for_event_with_keys(
-          init_dut, cconsts.EVENT_NETWORK_CALLBACK, autils.EVENT_NDP_TIMEOUT,
-          (cconsts.NETWORK_CB_KEY_EVENT,
-           cconsts.NETWORK_CB_LINK_PROPERTIES_CHANGED),
-          (cconsts.NETWORK_CB_KEY_ID, init_req_key))
-      resp_net_event = autils.wait_for_event_with_keys(
-          resp_dut, cconsts.EVENT_NETWORK_CALLBACK, autils.EVENT_NDP_TIMEOUT,
-          (cconsts.NETWORK_CB_KEY_EVENT,
-           cconsts.NETWORK_CB_LINK_PROPERTIES_CHANGED),
-          (cconsts.NETWORK_CB_KEY_ID, resp_req_key))
+            resp_aware_ifs.append(
+                resp_net_event["data"][cconsts.NETWORK_CB_KEY_INTERFACE_NAME])
+            init_aware_ifs.append(
+                init_net_event["data"][cconsts.NETWORK_CB_KEY_INTERFACE_NAME])
 
-      resp_aware_ifs.append(
-          resp_net_event["data"][cconsts.NETWORK_CB_KEY_INTERFACE_NAME])
-      init_aware_ifs.append(
-          init_net_event["data"][cconsts.NETWORK_CB_KEY_INTERFACE_NAME])
+            resp_aware_ipv6s.append(
+                autils.get_ipv6_addr(resp_dut, resp_aware_ifs[-1]))
+            init_aware_ipv6s.append(
+                autils.get_ipv6_addr(init_dut, init_aware_ifs[-1]))
 
-      resp_aware_ipv6s.append(
-          autils.get_ipv6_addr(resp_dut, resp_aware_ifs[-1]))
-      init_aware_ipv6s.append(
-          autils.get_ipv6_addr(init_dut, init_aware_ifs[-1]))
+        self.log.info("Initiator interfaces/ipv6: %s / %s", init_aware_ifs,
+                      init_aware_ipv6s)
+        self.log.info("Responder interfaces/ipv6: %s / %s", resp_aware_ifs,
+                      resp_aware_ipv6s)
 
-    self.log.info("Initiator interfaces/ipv6: %s / %s", init_aware_ifs,
-                  init_aware_ipv6s)
-    self.log.info("Responder interfaces/ipv6: %s / %s", resp_aware_ifs,
-                  resp_aware_ipv6s)
+        # create threads, start them, and wait for all to finish
+        base_port = 5000
+        q = queue.Queue()
+        threads = []
+        for i in range(len(sec_configs)):
+            threads.append(
+                threading.Thread(
+                    target=self.run_iperf,
+                    args=(q, init_dut, resp_dut, resp_aware_ifs[i],
+                          init_aware_ipv6s[i], base_port + i)))
 
-    # create threads, start them, and wait for all to finish
-    base_port = 5000
-    q = queue.Queue()
-    threads = []
-    for i in range(len(sec_configs)):
-      threads.append(
-          threading.Thread(
-              target=self.run_iperf,
-              args=(q, init_dut, resp_dut, resp_aware_ifs[i], init_aware_ipv6s[
-                  i], base_port + i)))
+        for thread in threads:
+            thread.start()
 
-    for thread in threads:
-      thread.start()
+        for thread in threads:
+            thread.join()
 
-    for thread in threads:
-      thread.join()
+        # release requests
+        for resp_req_key in resp_req_keys:
+            resp_dut.droid.connectivityUnregisterNetworkCallback(resp_req_key)
+        for init_req_key in init_req_keys:
+            init_dut.droid.connectivityUnregisterNetworkCallback(init_req_key)
 
-    # release requests
-    for resp_req_key in resp_req_keys:
-      resp_dut.droid.connectivityUnregisterNetworkCallback(resp_req_key)
-    for init_req_key in init_req_keys:
-      init_dut.droid.connectivityUnregisterNetworkCallback(init_req_key)
+        # collect data
+        for i in range(len(sec_configs)):
+            results[i] = {}
+            result, data = q.get()
+            asserts.assert_true(
+                result, "Failure starting/running iperf3 in client mode")
+            self.log.debug(pprint.pformat(data))
+            data_json = json.loads("".join(data))
+            if "error" in data_json:
+                asserts.fail(
+                    "iperf run failed: %s" % data_json["error"],
+                    extras=data_json)
+            results[i]["tx_rate"] = data_json["end"]["sum_sent"][
+                "bits_per_second"]
+            results[i]["rx_rate"] = data_json["end"]["sum_received"][
+                "bits_per_second"]
+            self.log.info("iPerf3: Sent = %d bps Received = %d bps",
+                          results[i]["tx_rate"], results[i]["rx_rate"])
 
-
-    # collect data
-    for i in range(len(sec_configs)):
-      results[i] = {}
-      result, data = q.get()
-      asserts.assert_true(result,
-                          "Failure starting/running iperf3 in client mode")
-      self.log.debug(pprint.pformat(data))
-      data_json = json.loads("".join(data))
-      if "error" in data_json:
-        asserts.fail(
-            "iperf run failed: %s" % data_json["error"], extras=data_json)
-      results[i]["tx_rate"] = data_json["end"]["sum_sent"]["bits_per_second"]
-      results[i]["rx_rate"] = data_json["end"]["sum_received"][
-        "bits_per_second"]
-      self.log.info("iPerf3: Sent = %d bps Received = %d bps",
-                    results[i]["tx_rate"], results[i]["rx_rate"])
-
-  def test_iperf_max_ndi_aware_only_passphrases(self):
-    """Test throughput for multiple NDIs configured with different passphrases.
+    def test_iperf_max_ndi_aware_only_passphrases(self):
+        """Test throughput for multiple NDIs configured with different passphrases.
     """
-    results = {}
-    self.run_iperf_max_ndi_aware_only(
-        [self.PASSPHRASE, self.PASSPHRASE2], results=results)
-    asserts.explicit_pass(
-        "test_iperf_max_ndi_aware_only_passphrases passes", extras=results)
+        results = {}
+        self.run_iperf_max_ndi_aware_only(
+            [self.PASSPHRASE, self.PASSPHRASE2], results=results)
+        asserts.explicit_pass(
+            "test_iperf_max_ndi_aware_only_passphrases passes", extras=results)
diff --git a/acts/tests/google/wifi/aware/stress/DataPathStressTest.py b/acts/tests/google/wifi/aware/stress/DataPathStressTest.py
index f718007..ee84afb 100644
--- a/acts/tests/google/wifi/aware/stress/DataPathStressTest.py
+++ b/acts/tests/google/wifi/aware/stress/DataPathStressTest.py
@@ -27,23 +27,25 @@
 
 class DataPathStressTest(AwareBaseTest):
 
-  # Number of iterations on create/destroy Attach sessions.
-  ATTACH_ITERATIONS = 2
+    # Number of iterations on create/destroy Attach sessions.
+    ATTACH_ITERATIONS = 2
 
-  # Number of iterations on create/destroy NDP in each discovery session.
-  NDP_ITERATIONS = 50
+    # Number of iterations on create/destroy NDP in each discovery session.
+    NDP_ITERATIONS = 50
 
-  # Maximum percentage of NDP setup failures over all iterations
-  MAX_FAILURE_PERCENTAGE = 1
+    # Maximum percentage of NDP setup failures over all iterations
+    MAX_FAILURE_PERCENTAGE = 1
 
-  def __init__(self, controllers):
-    AwareBaseTest.__init__(self, controllers)
+    def __init__(self, controllers):
+        AwareBaseTest.__init__(self, controllers)
 
-  ################################################################
+    ################################################################
 
-  def run_oob_ndp_stress(self, attach_iterations, ndp_iterations,
-      trigger_failure_on_index=None):
-    """Run NDP (NAN data-path) stress test creating and destroying Aware
+    def run_oob_ndp_stress(self,
+                           attach_iterations,
+                           ndp_iterations,
+                           trigger_failure_on_index=None):
+        """Run NDP (NAN data-path) stress test creating and destroying Aware
     attach sessions, discovery sessions, and NDPs.
 
     Args:
@@ -54,152 +56,165 @@
                                 before issuing the requeest on the Responder).
                                 If None then no artificial failure triggered.
     """
-    init_dut = self.android_devices[0]
-    init_dut.pretty_name = 'Initiator'
-    resp_dut = self.android_devices[1]
-    resp_dut.pretty_name = 'Responder'
+        init_dut = self.android_devices[0]
+        init_dut.pretty_name = 'Initiator'
+        resp_dut = self.android_devices[1]
+        resp_dut.pretty_name = 'Responder'
 
-    ndp_init_setup_success = 0
-    ndp_init_setup_failures = 0
-    ndp_resp_setup_success = 0
-    ndp_resp_setup_failures = 0
+        ndp_init_setup_success = 0
+        ndp_init_setup_failures = 0
+        ndp_resp_setup_success = 0
+        ndp_resp_setup_failures = 0
 
-    for attach_iter in range(attach_iterations):
-      init_id = init_dut.droid.wifiAwareAttach(True)
-      autils.wait_for_event(init_dut, aconsts.EVENT_CB_ON_ATTACHED)
-      init_ident_event = autils.wait_for_event(
-          init_dut, aconsts.EVENT_CB_ON_IDENTITY_CHANGED)
-      init_mac = init_ident_event['data']['mac']
-      time.sleep(self.device_startup_offset)
-      resp_id = resp_dut.droid.wifiAwareAttach(True)
-      autils.wait_for_event(resp_dut, aconsts.EVENT_CB_ON_ATTACHED)
-      resp_ident_event = autils.wait_for_event(
-          resp_dut, aconsts.EVENT_CB_ON_IDENTITY_CHANGED)
-      resp_mac = resp_ident_event['data']['mac']
+        for attach_iter in range(attach_iterations):
+            init_id = init_dut.droid.wifiAwareAttach(True)
+            autils.wait_for_event(init_dut, aconsts.EVENT_CB_ON_ATTACHED)
+            init_ident_event = autils.wait_for_event(
+                init_dut, aconsts.EVENT_CB_ON_IDENTITY_CHANGED)
+            init_mac = init_ident_event['data']['mac']
+            time.sleep(self.device_startup_offset)
+            resp_id = resp_dut.droid.wifiAwareAttach(True)
+            autils.wait_for_event(resp_dut, aconsts.EVENT_CB_ON_ATTACHED)
+            resp_ident_event = autils.wait_for_event(
+                resp_dut, aconsts.EVENT_CB_ON_IDENTITY_CHANGED)
+            resp_mac = resp_ident_event['data']['mac']
 
-      # wait for for devices to synchronize with each other - there are no other
-      # mechanisms to make sure this happens for OOB discovery (except retrying
-      # to execute the data-path request)
-      time.sleep(autils.WAIT_FOR_CLUSTER)
+            # wait for for devices to synchronize with each other - there are no other
+            # mechanisms to make sure this happens for OOB discovery (except retrying
+            # to execute the data-path request)
+            time.sleep(autils.WAIT_FOR_CLUSTER)
 
-      for ndp_iteration in range(ndp_iterations):
-        if trigger_failure_on_index != ndp_iteration:
-          # Responder: request network
-          resp_req_key = autils.request_network(
-              resp_dut,
-              resp_dut.droid.wifiAwareCreateNetworkSpecifierOob(
-                  resp_id, aconsts.DATA_PATH_RESPONDER, init_mac, None))
+            for ndp_iteration in range(ndp_iterations):
+                if trigger_failure_on_index != ndp_iteration:
+                    # Responder: request network
+                    resp_req_key = autils.request_network(
+                        resp_dut,
+                        resp_dut.droid.wifiAwareCreateNetworkSpecifierOob(
+                            resp_id, aconsts.DATA_PATH_RESPONDER, init_mac,
+                            None))
 
-          # Wait a minimal amount of time to let the Responder configure itself
-          # and be ready for the request. While calling it first may be
-          # sufficient there are no guarantees that a glitch may slow the
-          # Responder slightly enough to invert the setup order.
-          time.sleep(1)
+                    # Wait a minimal amount of time to let the Responder configure itself
+                    # and be ready for the request. While calling it first may be
+                    # sufficient there are no guarantees that a glitch may slow the
+                    # Responder slightly enough to invert the setup order.
+                    time.sleep(1)
 
-          # Initiator: request network
-          init_req_key = autils.request_network(
-              init_dut,
-              init_dut.droid.wifiAwareCreateNetworkSpecifierOob(
-                  init_id, aconsts.DATA_PATH_INITIATOR, resp_mac, None))
-        else:
-          # Initiator: request network
-          init_req_key = autils.request_network(
-              init_dut,
-              init_dut.droid.wifiAwareCreateNetworkSpecifierOob(
-                  init_id, aconsts.DATA_PATH_INITIATOR, resp_mac, None))
+                    # Initiator: request network
+                    init_req_key = autils.request_network(
+                        init_dut,
+                        init_dut.droid.wifiAwareCreateNetworkSpecifierOob(
+                            init_id, aconsts.DATA_PATH_INITIATOR, resp_mac,
+                            None))
+                else:
+                    # Initiator: request network
+                    init_req_key = autils.request_network(
+                        init_dut,
+                        init_dut.droid.wifiAwareCreateNetworkSpecifierOob(
+                            init_id, aconsts.DATA_PATH_INITIATOR, resp_mac,
+                            None))
 
-          # Wait a minimal amount of time to let the Initiator configure itself
-          # to guarantee failure!
-          time.sleep(2)
+                    # Wait a minimal amount of time to let the Initiator configure itself
+                    # to guarantee failure!
+                    time.sleep(2)
 
-          # Responder: request network
-          resp_req_key = autils.request_network(
-              resp_dut,
-              resp_dut.droid.wifiAwareCreateNetworkSpecifierOob(
-                  resp_id, aconsts.DATA_PATH_RESPONDER, init_mac, None))
+                    # Responder: request network
+                    resp_req_key = autils.request_network(
+                        resp_dut,
+                        resp_dut.droid.wifiAwareCreateNetworkSpecifierOob(
+                            resp_id, aconsts.DATA_PATH_RESPONDER, init_mac,
+                            None))
 
-        # Initiator: wait for network formation
-        got_on_available = False
-        got_on_link_props = False
-        while not got_on_available or not got_on_link_props:
-          try:
-            nc_event = init_dut.ed.pop_event(cconsts.EVENT_NETWORK_CALLBACK,
-                                             autils.EVENT_NDP_TIMEOUT)
-            if nc_event['data'][
-                cconsts.NETWORK_CB_KEY_EVENT] == cconsts.NETWORK_CB_AVAILABLE:
-              got_on_available = True
-            elif (nc_event['data'][cconsts.NETWORK_CB_KEY_EVENT] ==
-                  cconsts.NETWORK_CB_LINK_PROPERTIES_CHANGED):
-              got_on_link_props = True
-          except queue.Empty:
-            ndp_init_setup_failures = ndp_init_setup_failures + 1
-            init_dut.log.info('[Initiator] Timed out while waiting for '
-                              'EVENT_NETWORK_CALLBACK')
-            break
+                # Initiator: wait for network formation
+                got_on_available = False
+                got_on_link_props = False
+                while not got_on_available or not got_on_link_props:
+                    try:
+                        nc_event = init_dut.ed.pop_event(
+                            cconsts.EVENT_NETWORK_CALLBACK,
+                            autils.EVENT_NDP_TIMEOUT)
+                        if nc_event['data'][
+                                cconsts.
+                                NETWORK_CB_KEY_EVENT] == cconsts.NETWORK_CB_AVAILABLE:
+                            got_on_available = True
+                        elif (nc_event['data'][cconsts.NETWORK_CB_KEY_EVENT] ==
+                              cconsts.NETWORK_CB_LINK_PROPERTIES_CHANGED):
+                            got_on_link_props = True
+                    except queue.Empty:
+                        ndp_init_setup_failures = ndp_init_setup_failures + 1
+                        init_dut.log.info(
+                            '[Initiator] Timed out while waiting for '
+                            'EVENT_NETWORK_CALLBACK')
+                        break
 
-        if got_on_available and got_on_link_props:
-          ndp_init_setup_success = ndp_init_setup_success + 1
+                if got_on_available and got_on_link_props:
+                    ndp_init_setup_success = ndp_init_setup_success + 1
 
-        # Responder: wait for network formation
-        got_on_available = False
-        got_on_link_props = False
-        while not got_on_available or not got_on_link_props:
-          try:
-            nc_event = resp_dut.ed.pop_event(cconsts.EVENT_NETWORK_CALLBACK,
-                                             autils.EVENT_NDP_TIMEOUT)
-            if nc_event['data'][
-                cconsts.NETWORK_CB_KEY_EVENT] == cconsts.NETWORK_CB_AVAILABLE:
-              got_on_available = True
-            elif (nc_event['data'][cconsts.NETWORK_CB_KEY_EVENT] ==
-                  cconsts.NETWORK_CB_LINK_PROPERTIES_CHANGED):
-              got_on_link_props = True
-          except queue.Empty:
-            ndp_resp_setup_failures = ndp_resp_setup_failures + 1
-            init_dut.log.info('[Responder] Timed out while waiting for '
-                              'EVENT_NETWORK_CALLBACK')
-            break
+                # Responder: wait for network formation
+                got_on_available = False
+                got_on_link_props = False
+                while not got_on_available or not got_on_link_props:
+                    try:
+                        nc_event = resp_dut.ed.pop_event(
+                            cconsts.EVENT_NETWORK_CALLBACK,
+                            autils.EVENT_NDP_TIMEOUT)
+                        if nc_event['data'][
+                                cconsts.
+                                NETWORK_CB_KEY_EVENT] == cconsts.NETWORK_CB_AVAILABLE:
+                            got_on_available = True
+                        elif (nc_event['data'][cconsts.NETWORK_CB_KEY_EVENT] ==
+                              cconsts.NETWORK_CB_LINK_PROPERTIES_CHANGED):
+                            got_on_link_props = True
+                    except queue.Empty:
+                        ndp_resp_setup_failures = ndp_resp_setup_failures + 1
+                        init_dut.log.info(
+                            '[Responder] Timed out while waiting for '
+                            'EVENT_NETWORK_CALLBACK')
+                        break
 
-        if got_on_available and got_on_link_props:
-          ndp_resp_setup_success = ndp_resp_setup_success + 1
+                if got_on_available and got_on_link_props:
+                    ndp_resp_setup_success = ndp_resp_setup_success + 1
 
-        # clean-up
-        init_dut.droid.connectivityUnregisterNetworkCallback(init_req_key)
-        resp_dut.droid.connectivityUnregisterNetworkCallback(resp_req_key)
+                # clean-up
+                init_dut.droid.connectivityUnregisterNetworkCallback(
+                    init_req_key)
+                resp_dut.droid.connectivityUnregisterNetworkCallback(
+                    resp_req_key)
 
-      # clean-up at end of iteration
-      init_dut.droid.wifiAwareDestroy(init_id)
-      resp_dut.droid.wifiAwareDestroy(resp_id)
+            # clean-up at end of iteration
+            init_dut.droid.wifiAwareDestroy(init_id)
+            resp_dut.droid.wifiAwareDestroy(resp_id)
 
-    results = {}
-    results['ndp_init_setup_success'] = ndp_init_setup_success
-    results['ndp_init_setup_failures'] = ndp_init_setup_failures
-    results['ndp_resp_setup_success'] = ndp_resp_setup_success
-    results['ndp_resp_setup_failures'] = ndp_resp_setup_failures
-    max_failures = (
-        self.MAX_FAILURE_PERCENTAGE * attach_iterations * ndp_iterations / 100)
-    if max_failures == 0:
-      max_failures = 1
-    if trigger_failure_on_index is not None:
-      max_failures = max_failures + 1 # for the triggered failure
-    asserts.assert_true(
-      (ndp_init_setup_failures + ndp_resp_setup_failures) < (2 * max_failures),
-      'NDP setup failure rate exceeds threshold', extras=results)
-    asserts.explicit_pass("test_oob_ndp_stress* done", extras=results)
+        results = {}
+        results['ndp_init_setup_success'] = ndp_init_setup_success
+        results['ndp_init_setup_failures'] = ndp_init_setup_failures
+        results['ndp_resp_setup_success'] = ndp_resp_setup_success
+        results['ndp_resp_setup_failures'] = ndp_resp_setup_failures
+        max_failures = (self.MAX_FAILURE_PERCENTAGE * attach_iterations *
+                        ndp_iterations / 100)
+        if max_failures == 0:
+            max_failures = 1
+        if trigger_failure_on_index is not None:
+            max_failures = max_failures + 1  # for the triggered failure
+        asserts.assert_true(
+            (ndp_init_setup_failures + ndp_resp_setup_failures) <
+            (2 * max_failures),
+            'NDP setup failure rate exceeds threshold',
+            extras=results)
+        asserts.explicit_pass("test_oob_ndp_stress* done", extras=results)
 
-  @test_tracker_info(uuid="a20a96ba-e71f-4d31-b850-b88a75381981")
-  def test_oob_ndp_stress(self):
-    """Run NDP (NAN data-path) stress test creating and destroying Aware
+    @test_tracker_info(uuid="a20a96ba-e71f-4d31-b850-b88a75381981")
+    def test_oob_ndp_stress(self):
+        """Run NDP (NAN data-path) stress test creating and destroying Aware
     attach sessions, discovery sessions, and NDPs."""
-    self.run_oob_ndp_stress(self.ATTACH_ITERATIONS, self.NDP_ITERATIONS)
+        self.run_oob_ndp_stress(self.ATTACH_ITERATIONS, self.NDP_ITERATIONS)
 
-  @test_tracker_info(uuid="1fb4a383-bf1a-411a-a904-489dd9e29c6a")
-  def test_oob_ndp_stress_failure_case(self):
-    """Run NDP (NAN data-path) stress test creating and destroying Aware
+    @test_tracker_info(uuid="1fb4a383-bf1a-411a-a904-489dd9e29c6a")
+    def test_oob_ndp_stress_failure_case(self):
+        """Run NDP (NAN data-path) stress test creating and destroying Aware
     attach sessions, discovery sessions, and NDPs.
 
     Verify recovery from failure by triggering an artifical failure and
     verifying that all subsequent iterations succeed.
     """
-    self.run_oob_ndp_stress(attach_iterations=1,
-                            ndp_iterations=10,
-                            trigger_failure_on_index=3)
+        self.run_oob_ndp_stress(
+            attach_iterations=1, ndp_iterations=10, trigger_failure_on_index=3)
diff --git a/acts/tests/google/wifi/aware/stress/DiscoveryStressTest.py b/acts/tests/google/wifi/aware/stress/DiscoveryStressTest.py
index eaa5d19..f9bb81e 100644
--- a/acts/tests/google/wifi/aware/stress/DiscoveryStressTest.py
+++ b/acts/tests/google/wifi/aware/stress/DiscoveryStressTest.py
@@ -24,84 +24,93 @@
 
 
 class DiscoveryStressTest(AwareBaseTest):
-  """Stress tests for Discovery sessions"""
+    """Stress tests for Discovery sessions"""
 
-  # Number of iterations on create/destroy Attach sessions.
-  ATTACH_ITERATIONS = 2
+    # Number of iterations on create/destroy Attach sessions.
+    ATTACH_ITERATIONS = 2
 
-  # Number of iterations on create/destroy Discovery sessions
-  DISCOVERY_ITERATIONS = 40
+    # Number of iterations on create/destroy Discovery sessions
+    DISCOVERY_ITERATIONS = 40
 
-  def __init__(self, controllers):
-    AwareBaseTest.__init__(self, controllers)
+    def __init__(self, controllers):
+        AwareBaseTest.__init__(self, controllers)
 
-  ####################################################################
+    ####################################################################
 
-  @test_tracker_info(uuid="783791e5-7726-44e0-ac5b-98c1dbf493cb")
-  def test_discovery_stress(self):
-    """Create and destroy a random array of discovery sessions, up to the
+    @test_tracker_info(uuid="783791e5-7726-44e0-ac5b-98c1dbf493cb")
+    def test_discovery_stress(self):
+        """Create and destroy a random array of discovery sessions, up to the
     limit of capabilities."""
-    dut = self.android_devices[0]
+        dut = self.android_devices[0]
 
-    discovery_setup_success = 0
-    discovery_setup_fail = 0
+        discovery_setup_success = 0
+        discovery_setup_fail = 0
 
-    for attach_iter in range(self.ATTACH_ITERATIONS):
-      # attach
-      session_id = dut.droid.wifiAwareAttach(True)
-      autils.wait_for_event(dut, aconsts.EVENT_CB_ON_ATTACHED)
+        for attach_iter in range(self.ATTACH_ITERATIONS):
+            # attach
+            session_id = dut.droid.wifiAwareAttach(True)
+            autils.wait_for_event(dut, aconsts.EVENT_CB_ON_ATTACHED)
 
-      p_discovery_ids = []
-      s_discovery_ids = []
-      for discovery_iter in range(self.DISCOVERY_ITERATIONS):
-        service_name = 'GoogleTestService-%d-%d' % (attach_iter, discovery_iter)
+            p_discovery_ids = []
+            s_discovery_ids = []
+            for discovery_iter in range(self.DISCOVERY_ITERATIONS):
+                service_name = 'GoogleTestService-%d-%d' % (attach_iter,
+                                                            discovery_iter)
 
-        p_config = None
-        s_config = None
+                p_config = None
+                s_config = None
 
-        if discovery_iter % 4 == 0:  # publish/unsolicited
-          p_config = autils.create_discovery_config(
-              service_name, aconsts.PUBLISH_TYPE_UNSOLICITED)
-        elif discovery_iter % 4 == 1:  # publish/solicited
-          p_config = autils.create_discovery_config(
-              service_name, aconsts.PUBLISH_TYPE_SOLICITED)
-        elif discovery_iter % 4 == 2:  # subscribe/passive
-          s_config = autils.create_discovery_config(
-              service_name, aconsts.SUBSCRIBE_TYPE_PASSIVE)
-        elif discovery_iter % 4 == 3:  # subscribe/active
-          s_config = autils.create_discovery_config(
-              service_name, aconsts.SUBSCRIBE_TYPE_ACTIVE)
+                if discovery_iter % 4 == 0:  # publish/unsolicited
+                    p_config = autils.create_discovery_config(
+                        service_name, aconsts.PUBLISH_TYPE_UNSOLICITED)
+                elif discovery_iter % 4 == 1:  # publish/solicited
+                    p_config = autils.create_discovery_config(
+                        service_name, aconsts.PUBLISH_TYPE_SOLICITED)
+                elif discovery_iter % 4 == 2:  # subscribe/passive
+                    s_config = autils.create_discovery_config(
+                        service_name, aconsts.SUBSCRIBE_TYPE_PASSIVE)
+                elif discovery_iter % 4 == 3:  # subscribe/active
+                    s_config = autils.create_discovery_config(
+                        service_name, aconsts.SUBSCRIBE_TYPE_ACTIVE)
 
-        if p_config is not None:
-          if len(p_discovery_ids) == dut.aware_capabilities[
-              aconsts.CAP_MAX_PUBLISHES]:
-            dut.droid.wifiAwareDestroyDiscoverySession(
-                p_discovery_ids.pop(dut.aware_capabilities[
-                    aconsts.CAP_MAX_PUBLISHES] // 2))
-          disc_id = dut.droid.wifiAwarePublish(session_id, p_config)
-          event_name = aconsts.SESSION_CB_ON_PUBLISH_STARTED
-          p_discovery_ids.append(disc_id)
-        else:
-          if len(s_discovery_ids) == dut.aware_capabilities[
-              aconsts.CAP_MAX_SUBSCRIBES]:
-            dut.droid.wifiAwareDestroyDiscoverySession(
-                s_discovery_ids.pop(dut.aware_capabilities[
-                    aconsts.CAP_MAX_SUBSCRIBES] // 2))
-          disc_id = dut.droid.wifiAwareSubscribe(session_id, s_config)
-          event_name = aconsts.SESSION_CB_ON_SUBSCRIBE_STARTED
-          s_discovery_ids.append(disc_id)
+                if p_config is not None:
+                    if len(p_discovery_ids) == dut.aware_capabilities[
+                            aconsts.CAP_MAX_PUBLISHES]:
+                        dut.droid.wifiAwareDestroyDiscoverySession(
+                            p_discovery_ids.pop(
+                                dut.aware_capabilities[aconsts.
+                                                       CAP_MAX_PUBLISHES] //
+                                2))
+                    disc_id = dut.droid.wifiAwarePublish(session_id, p_config)
+                    event_name = aconsts.SESSION_CB_ON_PUBLISH_STARTED
+                    p_discovery_ids.append(disc_id)
+                else:
+                    if len(s_discovery_ids) == dut.aware_capabilities[
+                            aconsts.CAP_MAX_SUBSCRIBES]:
+                        dut.droid.wifiAwareDestroyDiscoverySession(
+                            s_discovery_ids.pop(
+                                dut.aware_capabilities[aconsts.
+                                                       CAP_MAX_SUBSCRIBES] //
+                                2))
+                    disc_id = dut.droid.wifiAwareSubscribe(
+                        session_id, s_config)
+                    event_name = aconsts.SESSION_CB_ON_SUBSCRIBE_STARTED
+                    s_discovery_ids.append(disc_id)
 
-        try:
-          dut.ed.pop_event(event_name, autils.EVENT_TIMEOUT)
-          discovery_setup_success = discovery_setup_success + 1
-        except queue.Empty:
-          discovery_setup_fail = discovery_setup_fail + 1
+                try:
+                    dut.ed.pop_event(event_name, autils.EVENT_TIMEOUT)
+                    discovery_setup_success = discovery_setup_success + 1
+                except queue.Empty:
+                    discovery_setup_fail = discovery_setup_fail + 1
 
-      dut.droid.wifiAwareDestroy(session_id)
+            dut.droid.wifiAwareDestroy(session_id)
 
-    results = {}
-    results['discovery_setup_success'] = discovery_setup_success
-    results['discovery_setup_fail'] = discovery_setup_fail
-    asserts.assert_equal(discovery_setup_fail, 0,
-                         'Discovery setup failures', extras=results)
-    asserts.explicit_pass('test_discovery_stress done', extras=results)
+        results = {}
+        results['discovery_setup_success'] = discovery_setup_success
+        results['discovery_setup_fail'] = discovery_setup_fail
+        asserts.assert_equal(
+            discovery_setup_fail,
+            0,
+            'Discovery setup failures',
+            extras=results)
+        asserts.explicit_pass('test_discovery_stress done', extras=results)
diff --git a/acts/tests/google/wifi/aware/stress/InfraAssociationStressTest.py b/acts/tests/google/wifi/aware/stress/InfraAssociationStressTest.py
index 917a7d9..fc320c0 100644
--- a/acts/tests/google/wifi/aware/stress/InfraAssociationStressTest.py
+++ b/acts/tests/google/wifi/aware/stress/InfraAssociationStressTest.py
@@ -25,29 +25,28 @@
 
 
 class InfraAssociationStressTest(AwareBaseTest):
+    def __init__(self, controllers):
+        AwareBaseTest.__init__(self, controllers)
 
-  def __init__(self, controllers):
-    AwareBaseTest.__init__(self, controllers)
+    # Length of test in seconds
+    TEST_DURATION_SECONDS = 300
 
-  # Length of test in seconds
-  TEST_DURATION_SECONDS = 300
+    # Service name
+    SERVICE_NAME = "GoogleTestServiceXYXYXY"
 
-  # Service name
-  SERVICE_NAME = "GoogleTestServiceXYXYXY"
-
-  def is_associated(self, dut):
-    """Checks whether the device is associated (to any AP).
+    def is_associated(self, dut):
+        """Checks whether the device is associated (to any AP).
 
     Args:
       dut: Device under test.
 
     Returns: True if associated (to any AP), False otherwise.
     """
-    info = dut.droid.wifiGetConnectionInfo()
-    return info is not None and info["supplicant_state"] != "disconnected"
+        info = dut.droid.wifiGetConnectionInfo()
+        return info is not None and info["supplicant_state"] != "disconnected"
 
-  def wait_for_disassociation(self, q, dut):
-    """Waits for a disassociation event on the specified DUT for the given
+    def wait_for_disassociation(self, q, dut):
+        """Waits for a disassociation event on the specified DUT for the given
     timeout. Place a result into the queue (False) only if disassociation
     observed.
 
@@ -55,14 +54,15 @@
       q: The synchronization queue into which to place the results.
       dut: The device to track.
     """
-    try:
-      dut.ed.pop_event(wconsts.WIFI_DISCONNECTED, self.TEST_DURATION_SECONDS)
-      q.put(True)
-    except queue.Empty:
-      pass
+        try:
+            dut.ed.pop_event(wconsts.WIFI_DISCONNECTED,
+                             self.TEST_DURATION_SECONDS)
+            q.put(True)
+        except queue.Empty:
+            pass
 
-  def run_infra_assoc_oob_ndp_stress(self, with_ndp_traffic):
-    """Validates that Wi-Fi Aware NDP does not interfere with infrastructure
+    def run_infra_assoc_oob_ndp_stress(self, with_ndp_traffic):
+        """Validates that Wi-Fi Aware NDP does not interfere with infrastructure
     (AP) association.
 
     Test assumes (and verifies) that device is already associated to an AP.
@@ -70,92 +70,94 @@
     Args:
       with_ndp_traffic: True to run traffic over the NDP.
     """
-    init_dut = self.android_devices[0]
-    resp_dut = self.android_devices[1]
+        init_dut = self.android_devices[0]
+        resp_dut = self.android_devices[1]
 
-    # check that associated and start tracking
-    init_dut.droid.wifiStartTrackingStateChange()
-    resp_dut.droid.wifiStartTrackingStateChange()
-    asserts.assert_true(
-        self.is_associated(init_dut), "DUT is not associated to an AP!")
-    asserts.assert_true(
-        self.is_associated(resp_dut), "DUT is not associated to an AP!")
+        # check that associated and start tracking
+        init_dut.droid.wifiStartTrackingStateChange()
+        resp_dut.droid.wifiStartTrackingStateChange()
+        asserts.assert_true(
+            self.is_associated(init_dut), "DUT is not associated to an AP!")
+        asserts.assert_true(
+            self.is_associated(resp_dut), "DUT is not associated to an AP!")
 
-    # set up NDP
-    (init_req_key, resp_req_key, init_aware_if, resp_aware_if, init_ipv6,
-     resp_ipv6) = autils.create_oob_ndp(init_dut, resp_dut)
-    self.log.info("Interface names: I=%s, R=%s", init_aware_if, resp_aware_if)
-    self.log.info("Interface addresses (IPv6): I=%s, R=%s", init_ipv6,
-                  resp_ipv6)
+        # set up NDP
+        (init_req_key, resp_req_key, init_aware_if, resp_aware_if, init_ipv6,
+         resp_ipv6) = autils.create_oob_ndp(init_dut, resp_dut)
+        self.log.info("Interface names: I=%s, R=%s", init_aware_if,
+                      resp_aware_if)
+        self.log.info("Interface addresses (IPv6): I=%s, R=%s", init_ipv6,
+                      resp_ipv6)
 
-    # wait for any disassociation change events
-    q = queue.Queue()
-    init_thread = threading.Thread(
-        target=self.wait_for_disassociation, args=(q, init_dut))
-    resp_thread = threading.Thread(
-        target=self.wait_for_disassociation, args=(q, resp_dut))
+        # wait for any disassociation change events
+        q = queue.Queue()
+        init_thread = threading.Thread(
+            target=self.wait_for_disassociation, args=(q, init_dut))
+        resp_thread = threading.Thread(
+            target=self.wait_for_disassociation, args=(q, resp_dut))
 
-    init_thread.start()
-    resp_thread.start()
+        init_thread.start()
+        resp_thread.start()
 
-    any_disassociations = False
-    try:
-      q.get(True, self.TEST_DURATION_SECONDS)
-      any_disassociations = True  # only happens on any disassociation
-    except queue.Empty:
-      pass
-    finally:
-      # TODO: no way to terminate thread (so even if we fast fail we still have
-      # to wait for the full timeout.
-      init_dut.droid.wifiStopTrackingStateChange()
-      resp_dut.droid.wifiStopTrackingStateChange()
+        any_disassociations = False
+        try:
+            q.get(True, self.TEST_DURATION_SECONDS)
+            any_disassociations = True  # only happens on any disassociation
+        except queue.Empty:
+            pass
+        finally:
+            # TODO: no way to terminate thread (so even if we fast fail we still have
+            # to wait for the full timeout.
+            init_dut.droid.wifiStopTrackingStateChange()
+            resp_dut.droid.wifiStopTrackingStateChange()
 
-    asserts.assert_false(any_disassociations,
-                         "Wi-Fi disassociated during test run")
+        asserts.assert_false(any_disassociations,
+                             "Wi-Fi disassociated during test run")
 
-  ################################################################
+    ################################################################
 
-  def test_infra_assoc_discovery_stress(self):
-    """Validates that Wi-Fi Aware discovery does not interfere with
+    def test_infra_assoc_discovery_stress(self):
+        """Validates that Wi-Fi Aware discovery does not interfere with
     infrastructure (AP) association.
 
     Test assumes (and verifies) that device is already associated to an AP.
     """
-    dut = self.android_devices[0]
+        dut = self.android_devices[0]
 
-    # check that associated and start tracking
-    dut.droid.wifiStartTrackingStateChange()
-    asserts.assert_true(
-        self.is_associated(dut), "DUT is not associated to an AP!")
+        # check that associated and start tracking
+        dut.droid.wifiStartTrackingStateChange()
+        asserts.assert_true(
+            self.is_associated(dut), "DUT is not associated to an AP!")
 
-    # attach
-    session_id = dut.droid.wifiAwareAttach(True)
-    autils.wait_for_event(dut, aconsts.EVENT_CB_ON_ATTACHED)
+        # attach
+        session_id = dut.droid.wifiAwareAttach(True)
+        autils.wait_for_event(dut, aconsts.EVENT_CB_ON_ATTACHED)
 
-    # publish
-    p_disc_id = dut.droid.wifiAwarePublish(
-        session_id,
-        autils.create_discovery_config(self.SERVICE_NAME,
-                                       aconsts.PUBLISH_TYPE_UNSOLICITED))
-    autils.wait_for_event(dut, aconsts.SESSION_CB_ON_PUBLISH_STARTED)
+        # publish
+        p_disc_id = dut.droid.wifiAwarePublish(
+            session_id,
+            autils.create_discovery_config(self.SERVICE_NAME,
+                                           aconsts.PUBLISH_TYPE_UNSOLICITED))
+        autils.wait_for_event(dut, aconsts.SESSION_CB_ON_PUBLISH_STARTED)
 
-    # wait for any disassociation change events
-    any_disassociations = False
-    try:
-      dut.ed.pop_event(wconsts.WIFI_DISCONNECTED, self.TEST_DURATION_SECONDS)
-      any_disassociations = True
-    except queue.Empty:
-      pass
-    finally:
-      dut.droid.wifiStopTrackingStateChange()
+        # wait for any disassociation change events
+        any_disassociations = False
+        try:
+            dut.ed.pop_event(wconsts.WIFI_DISCONNECTED,
+                             self.TEST_DURATION_SECONDS)
+            any_disassociations = True
+        except queue.Empty:
+            pass
+        finally:
+            dut.droid.wifiStopTrackingStateChange()
 
-    asserts.assert_false(any_disassociations,
-                         "Wi-Fi disassociated during test run")
+        asserts.assert_false(any_disassociations,
+                             "Wi-Fi disassociated during test run")
 
-  def test_infra_assoc_ndp_no_traffic_stress(self):
-    """Validates that Wi-Fi Aware NDP (with no traffic) does not interfere with
+    def test_infra_assoc_ndp_no_traffic_stress(self):
+        """Validates that Wi-Fi Aware NDP (with no traffic) does not interfere with
     infrastructure (AP) association.
 
     Test assumes (and verifies) that devices are already associated to an AP.
     """
-    self.run_infra_assoc_oob_ndp_stress(with_ndp_traffic=False)
+        self.run_infra_assoc_oob_ndp_stress(with_ndp_traffic=False)
diff --git a/acts/tests/google/wifi/aware/stress/MessagesStressTest.py b/acts/tests/google/wifi/aware/stress/MessagesStressTest.py
index 34827f1..e01a543 100644
--- a/acts/tests/google/wifi/aware/stress/MessagesStressTest.py
+++ b/acts/tests/google/wifi/aware/stress/MessagesStressTest.py
@@ -29,27 +29,27 @@
 
 
 class MessagesStressTest(AwareBaseTest):
-  """Set of stress tests for Wi-Fi Aware L2 (layer 2) message exchanges."""
+    """Set of stress tests for Wi-Fi Aware L2 (layer 2) message exchanges."""
 
-  # Number of iterations in the stress test (number of messages)
-  NUM_ITERATIONS = 100
+    # Number of iterations in the stress test (number of messages)
+    NUM_ITERATIONS = 100
 
-  # Maximum permitted percentage of messages which fail to be transmitted
-  # correctly
-  MAX_TX_FAILURE_PERCENTAGE = 2
+    # Maximum permitted percentage of messages which fail to be transmitted
+    # correctly
+    MAX_TX_FAILURE_PERCENTAGE = 2
 
-  # Maximum permitted percentage of messages which are received more than once
-  # (indicating, most likely, that the ACK wasn't received and the message was
-  # retransmitted)
-  MAX_DUPLICATE_RX_PERCENTAGE = 2
+    # Maximum permitted percentage of messages which are received more than once
+    # (indicating, most likely, that the ACK wasn't received and the message was
+    # retransmitted)
+    MAX_DUPLICATE_RX_PERCENTAGE = 2
 
-  SERVICE_NAME = "GoogleTestServiceXY"
+    SERVICE_NAME = "GoogleTestServiceXY"
 
-  def __init__(self, controllers):
-    AwareBaseTest.__init__(self, controllers)
+    def __init__(self, controllers):
+        AwareBaseTest.__init__(self, controllers)
 
-  def init_info(self, msg, id, messages_by_msg, messages_by_id):
-    """Initialize the message data structures.
+    def init_info(self, msg, id, messages_by_msg, messages_by_id):
+        """Initialize the message data structures.
 
     Args:
       msg: message text
@@ -57,15 +57,16 @@
       messages_by_msg: {text -> {id, tx_ok_count, tx_fail_count, rx_count}}
       messages_by_id: {id -> text}
     """
-    messages_by_msg[msg] = {}
-    messages_by_msg[msg][KEY_ID] = id
-    messages_by_msg[msg][KEY_TX_OK_COUNT] = 0
-    messages_by_msg[msg][KEY_TX_FAIL_COUNT] = 0
-    messages_by_msg[msg][KEY_RX_COUNT] = 0
-    messages_by_id[id] = msg
+        messages_by_msg[msg] = {}
+        messages_by_msg[msg][KEY_ID] = id
+        messages_by_msg[msg][KEY_TX_OK_COUNT] = 0
+        messages_by_msg[msg][KEY_TX_FAIL_COUNT] = 0
+        messages_by_msg[msg][KEY_RX_COUNT] = 0
+        messages_by_id[id] = msg
 
-  def wait_for_tx_events(self, dut, num_msgs, messages_by_msg, messages_by_id):
-    """Wait for messages to be transmitted and update data structures.
+    def wait_for_tx_events(self, dut, num_msgs, messages_by_msg,
+                           messages_by_id):
+        """Wait for messages to be transmitted and update data structures.
 
     Args:
       dut: device under test
@@ -73,224 +74,238 @@
       messages_by_msg: {text -> {id, tx_ok_count, tx_fail_count, rx_count}}
       messages_by_id: {id -> text}
     """
-    num_ok_tx_confirmations = 0
-    num_fail_tx_confirmations = 0
-    num_unexpected_ids = 0
-    tx_events_regex = "%s|%s" % (aconsts.SESSION_CB_ON_MESSAGE_SEND_FAILED,
-                                 aconsts.SESSION_CB_ON_MESSAGE_SENT)
-    while num_ok_tx_confirmations + num_fail_tx_confirmations < num_msgs:
-      try:
-        events = dut.ed.pop_events(tx_events_regex, autils.EVENT_TIMEOUT)
-        for event in events:
-          if (event["name"] != aconsts.SESSION_CB_ON_MESSAGE_SENT and
-              event["name"] != aconsts.SESSION_CB_ON_MESSAGE_SEND_FAILED):
-            asserts.fail("Unexpected event: %s" % event)
-          is_tx_ok = event["name"] == aconsts.SESSION_CB_ON_MESSAGE_SENT
+        num_ok_tx_confirmations = 0
+        num_fail_tx_confirmations = 0
+        num_unexpected_ids = 0
+        tx_events_regex = "%s|%s" % (aconsts.SESSION_CB_ON_MESSAGE_SEND_FAILED,
+                                     aconsts.SESSION_CB_ON_MESSAGE_SENT)
+        while num_ok_tx_confirmations + num_fail_tx_confirmations < num_msgs:
+            try:
+                events = dut.ed.pop_events(tx_events_regex,
+                                           autils.EVENT_TIMEOUT)
+                for event in events:
+                    if (event["name"] != aconsts.SESSION_CB_ON_MESSAGE_SENT
+                            and event["name"] !=
+                            aconsts.SESSION_CB_ON_MESSAGE_SEND_FAILED):
+                        asserts.fail("Unexpected event: %s" % event)
+                    is_tx_ok = event[
+                        "name"] == aconsts.SESSION_CB_ON_MESSAGE_SENT
 
-          id = event["data"][aconsts.SESSION_CB_KEY_MESSAGE_ID]
-          if id in messages_by_id:
-            msg = messages_by_id[id]
-            if is_tx_ok:
-              messages_by_msg[msg][
-                  KEY_TX_OK_COUNT] = messages_by_msg[msg][KEY_TX_OK_COUNT] + 1
-              if messages_by_msg[msg][KEY_TX_OK_COUNT] == 1:
-                num_ok_tx_confirmations = num_ok_tx_confirmations + 1
-            else:
-              messages_by_msg[msg][KEY_TX_FAIL_COUNT] = (
-                  messages_by_msg[msg][KEY_TX_FAIL_COUNT] + 1)
-              if messages_by_msg[msg][KEY_TX_FAIL_COUNT] == 1:
-                num_fail_tx_confirmations = num_fail_tx_confirmations + 1
-          else:
-            self.log.warning(
-                "Tx confirmation of unknown message ID received: %s", event)
-            num_unexpected_ids = num_unexpected_ids + 1
-      except queue.Empty:
-        self.log.warning("[%s] Timed out waiting for any MESSAGE_SEND* event - "
-                         "assuming the rest are not coming", dut.pretty_name)
-        break
+                    id = event["data"][aconsts.SESSION_CB_KEY_MESSAGE_ID]
+                    if id in messages_by_id:
+                        msg = messages_by_id[id]
+                        if is_tx_ok:
+                            messages_by_msg[msg][
+                                KEY_TX_OK_COUNT] = messages_by_msg[msg][KEY_TX_OK_COUNT] + 1
+                            if messages_by_msg[msg][KEY_TX_OK_COUNT] == 1:
+                                num_ok_tx_confirmations = num_ok_tx_confirmations + 1
+                        else:
+                            messages_by_msg[msg][KEY_TX_FAIL_COUNT] = (
+                                messages_by_msg[msg][KEY_TX_FAIL_COUNT] + 1)
+                            if messages_by_msg[msg][KEY_TX_FAIL_COUNT] == 1:
+                                num_fail_tx_confirmations = num_fail_tx_confirmations + 1
+                    else:
+                        self.log.warning(
+                            "Tx confirmation of unknown message ID received: %s",
+                            event)
+                        num_unexpected_ids = num_unexpected_ids + 1
+            except queue.Empty:
+                self.log.warning(
+                    "[%s] Timed out waiting for any MESSAGE_SEND* event - "
+                    "assuming the rest are not coming", dut.pretty_name)
+                break
 
-    return (num_ok_tx_confirmations, num_fail_tx_confirmations,
-            num_unexpected_ids)
+        return (num_ok_tx_confirmations, num_fail_tx_confirmations,
+                num_unexpected_ids)
 
-  def wait_for_rx_events(self, dut, num_msgs, messages_by_msg):
-    """Wait for messages to be received and update data structures
+    def wait_for_rx_events(self, dut, num_msgs, messages_by_msg):
+        """Wait for messages to be received and update data structures
 
     Args:
       dut: device under test
       num_msgs: number of expected messages to receive
       messages_by_msg: {text -> {id, tx_ok_count, tx_fail_count, rx_count}}
     """
-    num_rx_msgs = 0
-    while num_rx_msgs < num_msgs:
-      try:
-        event = dut.ed.pop_event(aconsts.SESSION_CB_ON_MESSAGE_RECEIVED,
-                                 autils.EVENT_TIMEOUT)
-        msg = event["data"][aconsts.SESSION_CB_KEY_MESSAGE_AS_STRING]
-        if msg not in messages_by_msg:
-          messages_by_msg[msg] = {}
-          messages_by_msg[msg][KEY_ID] = -1
-          messages_by_msg[msg][KEY_TX_OK_COUNT] = 0
-          messages_by_msg[msg][KEY_TX_FAIL_COUNT] = 0
-          messages_by_msg[msg][KEY_RX_COUNT] = 1
+        num_rx_msgs = 0
+        while num_rx_msgs < num_msgs:
+            try:
+                event = dut.ed.pop_event(
+                    aconsts.SESSION_CB_ON_MESSAGE_RECEIVED,
+                    autils.EVENT_TIMEOUT)
+                msg = event["data"][aconsts.SESSION_CB_KEY_MESSAGE_AS_STRING]
+                if msg not in messages_by_msg:
+                    messages_by_msg[msg] = {}
+                    messages_by_msg[msg][KEY_ID] = -1
+                    messages_by_msg[msg][KEY_TX_OK_COUNT] = 0
+                    messages_by_msg[msg][KEY_TX_FAIL_COUNT] = 0
+                    messages_by_msg[msg][KEY_RX_COUNT] = 1
 
-        messages_by_msg[msg][
-            KEY_RX_COUNT] = messages_by_msg[msg][KEY_RX_COUNT] + 1
-        if messages_by_msg[msg][KEY_RX_COUNT] == 1:
-          num_rx_msgs = num_rx_msgs + 1
-      except queue.Empty:
-        self.log.warning(
-            "[%s] Timed out waiting for ON_MESSAGE_RECEIVED event - "
-            "assuming the rest are not coming", dut.pretty_name)
-        break
+                messages_by_msg[msg][
+                    KEY_RX_COUNT] = messages_by_msg[msg][KEY_RX_COUNT] + 1
+                if messages_by_msg[msg][KEY_RX_COUNT] == 1:
+                    num_rx_msgs = num_rx_msgs + 1
+            except queue.Empty:
+                self.log.warning(
+                    "[%s] Timed out waiting for ON_MESSAGE_RECEIVED event - "
+                    "assuming the rest are not coming", dut.pretty_name)
+                break
 
-  def analyze_results(self, results, messages_by_msg):
-    """Analyze the results of the stress message test and add to the results
+    def analyze_results(self, results, messages_by_msg):
+        """Analyze the results of the stress message test and add to the results
     dictionary
 
     Args:
       results: result dictionary into which to add data
       messages_by_msg: {text -> {id, tx_ok_count, tx_fail_count, rx_count}}
     """
-    results["raw_data"] = messages_by_msg
-    results["tx_count_success"] = 0
-    results["tx_count_duplicate_success"] = 0
-    results["tx_count_fail"] = 0
-    results["tx_count_duplicate_fail"] = 0
-    results["tx_count_neither"] = 0
-    results["tx_count_tx_ok_but_no_rx"] = 0
-    results["rx_count"] = 0
-    results["rx_count_duplicate"] = 0
-    results["rx_count_no_ok_tx_indication"] = 0
-    results["rx_count_fail_tx_indication"] = 0
-    results["rx_count_no_tx_message"] = 0
+        results["raw_data"] = messages_by_msg
+        results["tx_count_success"] = 0
+        results["tx_count_duplicate_success"] = 0
+        results["tx_count_fail"] = 0
+        results["tx_count_duplicate_fail"] = 0
+        results["tx_count_neither"] = 0
+        results["tx_count_tx_ok_but_no_rx"] = 0
+        results["rx_count"] = 0
+        results["rx_count_duplicate"] = 0
+        results["rx_count_no_ok_tx_indication"] = 0
+        results["rx_count_fail_tx_indication"] = 0
+        results["rx_count_no_tx_message"] = 0
 
-    for msg, data in messages_by_msg.items():
-      if data[KEY_TX_OK_COUNT] > 0:
-        results["tx_count_success"] = results["tx_count_success"] + 1
-      if data[KEY_TX_OK_COUNT] > 1:
-        results["tx_count_duplicate_success"] = (
-            results["tx_count_duplicate_success"] + 1)
-      if data[KEY_TX_FAIL_COUNT] > 0:
-        results["tx_count_fail"] = results["tx_count_fail"] + 1
-      if data[KEY_TX_FAIL_COUNT] > 1:
-        results[
-            "tx_count_duplicate_fail"] = results["tx_count_duplicate_fail"] + 1
-      if (data[KEY_TX_OK_COUNT] == 0 and data[KEY_TX_FAIL_COUNT] == 0 and
-          data[KEY_ID] != -1):
-        results["tx_count_neither"] = results["tx_count_neither"] + 1
-      if data[KEY_TX_OK_COUNT] > 0 and data[KEY_RX_COUNT] == 0:
-        results["tx_count_tx_ok_but_no_rx"] = (
-            results["tx_count_tx_ok_but_no_rx"] + 1)
-      if data[KEY_RX_COUNT] > 0:
-        results["rx_count"] = results["rx_count"] + 1
-      if data[KEY_RX_COUNT] > 1:
-        results["rx_count_duplicate"] = results["rx_count_duplicate"] + 1
-      if data[KEY_RX_COUNT] > 0 and data[KEY_TX_OK_COUNT] == 0:
-        results["rx_count_no_ok_tx_indication"] = (
-            results["rx_count_no_ok_tx_indication"] + 1)
-      if data[KEY_RX_COUNT] > 0 and data[KEY_TX_FAIL_COUNT] > 0:
-        results["rx_count_fail_tx_indication"] = (
-            results["rx_count_fail_tx_indication"] + 1)
-      if data[KEY_RX_COUNT] > 0 and data[KEY_ID] == -1:
-        results[
-            "rx_count_no_tx_message"] = results["rx_count_no_tx_message"] + 1
+        for msg, data in messages_by_msg.items():
+            if data[KEY_TX_OK_COUNT] > 0:
+                results["tx_count_success"] = results["tx_count_success"] + 1
+            if data[KEY_TX_OK_COUNT] > 1:
+                results["tx_count_duplicate_success"] = (
+                    results["tx_count_duplicate_success"] + 1)
+            if data[KEY_TX_FAIL_COUNT] > 0:
+                results["tx_count_fail"] = results["tx_count_fail"] + 1
+            if data[KEY_TX_FAIL_COUNT] > 1:
+                results[
+                    "tx_count_duplicate_fail"] = results["tx_count_duplicate_fail"] + 1
+            if (data[KEY_TX_OK_COUNT] == 0 and data[KEY_TX_FAIL_COUNT] == 0
+                    and data[KEY_ID] != -1):
+                results["tx_count_neither"] = results["tx_count_neither"] + 1
+            if data[KEY_TX_OK_COUNT] > 0 and data[KEY_RX_COUNT] == 0:
+                results["tx_count_tx_ok_but_no_rx"] = (
+                    results["tx_count_tx_ok_but_no_rx"] + 1)
+            if data[KEY_RX_COUNT] > 0:
+                results["rx_count"] = results["rx_count"] + 1
+            if data[KEY_RX_COUNT] > 1:
+                results[
+                    "rx_count_duplicate"] = results["rx_count_duplicate"] + 1
+            if data[KEY_RX_COUNT] > 0 and data[KEY_TX_OK_COUNT] == 0:
+                results["rx_count_no_ok_tx_indication"] = (
+                    results["rx_count_no_ok_tx_indication"] + 1)
+            if data[KEY_RX_COUNT] > 0 and data[KEY_TX_FAIL_COUNT] > 0:
+                results["rx_count_fail_tx_indication"] = (
+                    results["rx_count_fail_tx_indication"] + 1)
+            if data[KEY_RX_COUNT] > 0 and data[KEY_ID] == -1:
+                results[
+                    "rx_count_no_tx_message"] = results["rx_count_no_tx_message"] + 1
 
-  #######################################################################
+    #######################################################################
 
-  @test_tracker_info(uuid="e88c060f-4ca7-41c1-935a-d3d62878ec0b")
-  def test_stress_message(self):
-    """Stress test for bi-directional message transmission and reception."""
-    p_dut = self.android_devices[0]
-    s_dut = self.android_devices[1]
+    @test_tracker_info(uuid="e88c060f-4ca7-41c1-935a-d3d62878ec0b")
+    def test_stress_message(self):
+        """Stress test for bi-directional message transmission and reception."""
+        p_dut = self.android_devices[0]
+        s_dut = self.android_devices[1]
 
-    # Start up a discovery session
-    discovery_data = autils.create_discovery_pair(
-        p_dut,
-        s_dut,
-        p_config=autils.create_discovery_config(
-            self.SERVICE_NAME, aconsts.PUBLISH_TYPE_UNSOLICITED),
-        s_config=autils.create_discovery_config(self.SERVICE_NAME,
-                                                aconsts.SUBSCRIBE_TYPE_PASSIVE),
-        device_startup_offset=self.device_startup_offset,
-        msg_id=self.get_next_msg_id())
-    p_id = discovery_data[0]
-    s_id = discovery_data[1]
-    p_disc_id = discovery_data[2]
-    s_disc_id = discovery_data[3]
-    peer_id_on_sub = discovery_data[4]
-    peer_id_on_pub = discovery_data[5]
+        # Start up a discovery session
+        discovery_data = autils.create_discovery_pair(
+            p_dut,
+            s_dut,
+            p_config=autils.create_discovery_config(
+                self.SERVICE_NAME, aconsts.PUBLISH_TYPE_UNSOLICITED),
+            s_config=autils.create_discovery_config(
+                self.SERVICE_NAME, aconsts.SUBSCRIBE_TYPE_PASSIVE),
+            device_startup_offset=self.device_startup_offset,
+            msg_id=self.get_next_msg_id())
+        p_id = discovery_data[0]
+        s_id = discovery_data[1]
+        p_disc_id = discovery_data[2]
+        s_disc_id = discovery_data[3]
+        peer_id_on_sub = discovery_data[4]
+        peer_id_on_pub = discovery_data[5]
 
-    # Store information on Tx & Rx messages
-    messages_by_msg = {}  # keyed by message text
-    # {text -> {id, tx_ok_count, tx_fail_count, rx_count}}
-    messages_by_id = {}  # keyed by message ID {id -> text}
+        # Store information on Tx & Rx messages
+        messages_by_msg = {}  # keyed by message text
+        # {text -> {id, tx_ok_count, tx_fail_count, rx_count}}
+        messages_by_id = {}  # keyed by message ID {id -> text}
 
-    # send all messages at once (one in each direction)
-    for i in range(self.NUM_ITERATIONS):
-      msg_p2s = "Message Publisher -> Subscriber #%d" % i
-      next_msg_id = self.get_next_msg_id()
-      self.init_info(msg_p2s, next_msg_id, messages_by_msg, messages_by_id)
-      p_dut.droid.wifiAwareSendMessage(p_disc_id, peer_id_on_pub, next_msg_id,
-                                       msg_p2s, 0)
+        # send all messages at once (one in each direction)
+        for i in range(self.NUM_ITERATIONS):
+            msg_p2s = "Message Publisher -> Subscriber #%d" % i
+            next_msg_id = self.get_next_msg_id()
+            self.init_info(msg_p2s, next_msg_id, messages_by_msg,
+                           messages_by_id)
+            p_dut.droid.wifiAwareSendMessage(p_disc_id, peer_id_on_pub,
+                                             next_msg_id, msg_p2s, 0)
 
-      msg_s2p = "Message Subscriber -> Publisher #%d" % i
-      next_msg_id = self.get_next_msg_id()
-      self.init_info(msg_s2p, next_msg_id, messages_by_msg, messages_by_id)
-      s_dut.droid.wifiAwareSendMessage(s_disc_id, peer_id_on_sub, next_msg_id,
-                                       msg_s2p, 0)
+            msg_s2p = "Message Subscriber -> Publisher #%d" % i
+            next_msg_id = self.get_next_msg_id()
+            self.init_info(msg_s2p, next_msg_id, messages_by_msg,
+                           messages_by_id)
+            s_dut.droid.wifiAwareSendMessage(s_disc_id, peer_id_on_sub,
+                                             next_msg_id, msg_s2p, 0)
 
-    # wait for message tx confirmation
-    (p_tx_ok_count, p_tx_fail_count, p_tx_unknown_id) = self.wait_for_tx_events(
-        p_dut, self.NUM_ITERATIONS, messages_by_msg, messages_by_id)
-    (s_tx_ok_count, s_tx_fail_count, s_tx_unknown_id) = self.wait_for_tx_events(
-        s_dut, self.NUM_ITERATIONS, messages_by_msg, messages_by_id)
-    self.log.info("Transmission done: pub=%d, sub=%d transmitted successfully",
-                  p_tx_ok_count, s_tx_ok_count)
+        # wait for message tx confirmation
+        (p_tx_ok_count,
+         p_tx_fail_count, p_tx_unknown_id) = self.wait_for_tx_events(
+             p_dut, self.NUM_ITERATIONS, messages_by_msg, messages_by_id)
+        (s_tx_ok_count,
+         s_tx_fail_count, s_tx_unknown_id) = self.wait_for_tx_events(
+             s_dut, self.NUM_ITERATIONS, messages_by_msg, messages_by_id)
+        self.log.info(
+            "Transmission done: pub=%d, sub=%d transmitted successfully",
+            p_tx_ok_count, s_tx_ok_count)
 
-    # wait for message rx confirmation (giving it the total number of messages
-    # transmitted rather than just those transmitted correctly since sometimes
-    # the Tx doesn't get that information correctly. I.e. a message the Tx
-    # thought was not transmitted correctly is actually received - missing ACK?
-    # bug?)
-    self.wait_for_rx_events(p_dut, self.NUM_ITERATIONS, messages_by_msg)
-    self.wait_for_rx_events(s_dut, self.NUM_ITERATIONS, messages_by_msg)
+        # wait for message rx confirmation (giving it the total number of messages
+        # transmitted rather than just those transmitted correctly since sometimes
+        # the Tx doesn't get that information correctly. I.e. a message the Tx
+        # thought was not transmitted correctly is actually received - missing ACK?
+        # bug?)
+        self.wait_for_rx_events(p_dut, self.NUM_ITERATIONS, messages_by_msg)
+        self.wait_for_rx_events(s_dut, self.NUM_ITERATIONS, messages_by_msg)
 
-    # analyze results
-    results = {}
-    results["tx_count"] = 2 * self.NUM_ITERATIONS
-    results["tx_unknown_ids"] = p_tx_unknown_id + s_tx_unknown_id
-    self.analyze_results(results, messages_by_msg)
+        # analyze results
+        results = {}
+        results["tx_count"] = 2 * self.NUM_ITERATIONS
+        results["tx_unknown_ids"] = p_tx_unknown_id + s_tx_unknown_id
+        self.analyze_results(results, messages_by_msg)
 
-    # clear errors
-    asserts.assert_equal(results["tx_unknown_ids"], 0, "Message ID corruption",
-                         results)
-    asserts.assert_equal(results["tx_count_neither"], 0,
-                         "Tx message with no success or fail indication",
-                         results)
-    asserts.assert_equal(results["tx_count_duplicate_fail"], 0,
-                         "Duplicate Tx fail messages", results)
-    asserts.assert_equal(results["tx_count_duplicate_success"], 0,
-                         "Duplicate Tx success messages", results)
-    asserts.assert_equal(results["rx_count_no_tx_message"], 0,
-                         "Rx message which wasn't sent - message corruption?",
-                         results)
-    asserts.assert_equal(results["tx_count_tx_ok_but_no_rx"], 0,
-                         "Tx got ACK but Rx didn't get message", results)
+        # clear errors
+        asserts.assert_equal(results["tx_unknown_ids"], 0,
+                             "Message ID corruption", results)
+        asserts.assert_equal(results["tx_count_neither"], 0,
+                             "Tx message with no success or fail indication",
+                             results)
+        asserts.assert_equal(results["tx_count_duplicate_fail"], 0,
+                             "Duplicate Tx fail messages", results)
+        asserts.assert_equal(results["tx_count_duplicate_success"], 0,
+                             "Duplicate Tx success messages", results)
+        asserts.assert_equal(
+            results["rx_count_no_tx_message"], 0,
+            "Rx message which wasn't sent - message corruption?", results)
+        asserts.assert_equal(results["tx_count_tx_ok_but_no_rx"], 0,
+                             "Tx got ACK but Rx didn't get message", results)
 
-    # possibly ok - but flag since most frequently a bug
-    asserts.assert_equal(results["rx_count_no_ok_tx_indication"], 0,
-                         "Message received but Tx didn't get ACK", results)
-    asserts.assert_equal(results["rx_count_fail_tx_indication"], 0,
-                         "Message received but Tx didn't get ACK", results)
+        # possibly ok - but flag since most frequently a bug
+        asserts.assert_equal(results["rx_count_no_ok_tx_indication"], 0,
+                             "Message received but Tx didn't get ACK", results)
+        asserts.assert_equal(results["rx_count_fail_tx_indication"], 0,
+                             "Message received but Tx didn't get ACK", results)
 
-    # permissible failures based on thresholds
-    asserts.assert_true(results["tx_count_fail"] <= (
-          self.MAX_TX_FAILURE_PERCENTAGE * self.NUM_ITERATIONS / 100),
-                        "Number of Tx failures exceeds threshold",
-                        extras=results)
-    asserts.assert_true(results["rx_count_duplicate"] <= (
-        self.MAX_DUPLICATE_RX_PERCENTAGE * self.NUM_ITERATIONS / 100),
-                        "Number of duplicate Rx exceeds threshold",
-                        extras=results)
+        # permissible failures based on thresholds
+        asserts.assert_true(
+            results["tx_count_fail"] <=
+            (self.MAX_TX_FAILURE_PERCENTAGE * self.NUM_ITERATIONS / 100),
+            "Number of Tx failures exceeds threshold",
+            extras=results)
+        asserts.assert_true(
+            results["rx_count_duplicate"] <=
+            (self.MAX_DUPLICATE_RX_PERCENTAGE * self.NUM_ITERATIONS / 100),
+            "Number of duplicate Rx exceeds threshold",
+            extras=results)
 
-    asserts.explicit_pass("test_stress_message done", extras=results)
\ No newline at end of file
+        asserts.explicit_pass("test_stress_message done", extras=results)
diff --git a/acts/tests/google/wifi/example_config.json b/acts/tests/google/wifi/example_config_iot.json
similarity index 100%
rename from acts/tests/google/wifi/example_config.json
rename to acts/tests/google/wifi/example_config_iot.json
diff --git a/acts/tests/google/wifi/example_config_sanity.json b/acts/tests/google/wifi/example_config_sanity.json
new file mode 100644
index 0000000..b23d3bf
--- /dev/null
+++ b/acts/tests/google/wifi/example_config_sanity.json
@@ -0,0 +1,207 @@
+{
+    "testbed": [
+        {
+            "name": "test_station_name",
+            "AndroidDevice": [
+                "<serial number 1>",
+                "<serial number 2 if necessary and 3 etc>"
+            ],
+            "AccessPoint": [
+                    { "ssh_config" :
+                                    {
+                                            "user" : "root",
+                                            "host" : "<ip 1, e.g. 192.168.1.2>"
+                                    }
+                    },
+                    { "ssh_config" :
+                                    {
+                                            "user" : "root",
+                                            "host" : "<ip 2 (if necessary) and ip 3 ...>"
+                                    }
+                    }
+            ],
+            "Attenuator": [
+                {
+                    "Address": "<attenuator ip address>",
+                    "InstrumentCount": 4,
+                    "Model": "<model, e.g. minicircuits>",
+                    "Paths": [
+                        "AP1-2G",
+                        "AP1-5G",
+                        "AP2-2G",
+                        "AP2-5G"
+                    ],
+                    "Port": 22
+                }
+            ],
+            "IPerfServer": [
+                5004
+            ],
+            "bssid_2g": {
+                "BSSID": "<bssid, e.g. 00:01:02:03:04:05>",
+                "high": "-10",
+                "low": "-85"
+            },
+            "bssid_5g": {
+                "BSSID": "<bssid>",
+                "high": "-10",
+                "low": "-85"
+            },
+            "bssid_dfs": {
+                "BSSID": "<bssid>",
+                "high": "-10",
+                "low": "-85"
+            },
+            "iperf_server_address": "100.107.126.31"
+        }
+    ],
+    "atten_val": {
+        "Ap1_2g": [
+            10,
+            95,
+            95,
+            95
+        ],
+        "Ap1_2gto5g": [
+            45,
+            10,
+            95,
+            95
+        ],
+        "Ap1_5gto2g": [
+            10,
+            80,
+            95,
+            95
+        ],
+        "Ap2_2g": [
+            75,
+            75,
+            10,
+            75
+        ],
+        "Ap2_2gto5g": [
+            75,
+            75,
+            75,
+            10
+        ],
+        "Ap2_5gto2g": [
+            75,
+            75,
+            10,
+            75
+        ],
+        "Back_from_blacklist": [
+            40,
+            95,
+            95
+        ],
+        "In_AP1_5gto2g": [
+            10,
+            75,
+            95,
+            95
+        ],
+        "In_Ap2_5gto2g": [
+            75,
+            75,
+            10,
+            75
+        ],
+        "In_blacklist": [
+            95,
+            95,
+            0
+        ],
+        "Swtich_AP1toAp2": [
+            70,
+            70,
+            2,
+            70
+        ],
+        "Swtich_AP2toAp1": [
+            10,
+            70,
+            75,
+            75
+        ],
+        "Swtich_to_blacklist": [
+            60,
+            90,
+            40
+        ]
+    },
+    "attenuator_id": 0,
+    "roaming_attn": {
+        "AP1_on_AP2_off": [
+            0,
+            0,
+            95,
+            95
+        ],
+        "AP1_off_AP2_on": [
+            95,
+            95,
+            0,
+            0
+        ],
+        "default": [
+            0,
+            0,
+            0,
+            0
+        ]
+    },
+    "attn_vals": {
+        "a_b_on": [
+            0,
+            0
+        ],
+        "a_on_b_off": [
+            0,
+            95
+        ],
+        "b_on_a_off": [
+            95,
+            0
+        ],
+        "default": [
+            0,
+            0
+        ]
+    },
+    "device_password": "hahahaha",
+    "eap_password": "password",
+    "fqdn": "red.com",
+    "max_bugreports": 5,
+    "other_network": {
+        "SSID": "wh_ap3_2g",
+        "password": "hahahaha"
+    },
+    "ping_addr": "https://www.google.com/robots.txt",
+    "pno_interval": 120,
+    "provider_friendly_name": "red",
+    "realm": "red.com",
+    "roam_interval": 60,
+    "run_extended_test": false,
+    "two_ap_testbed": true,
+    "aware_default_power_mode": "INTERACTIVE",
+    "stress_count": 100,
+    "stress_hours": 5,
+    "dbs_supported_models": ["<product name 1>", "<product name 2>"],
+    "lci_reference": [],
+    "lcr_reference": [],
+    "rtt_reference_distance_mm": 4600,
+    "stress_test_min_iteration_count": 100,
+    "stress_test_target_run_time_sec" : 30,
+    "energy_info_models": [
+        "<product name 1 (adb shell getprop ro.build.product)>",
+        "<product name 2>"
+    ],
+    "tdls_models": [
+        "<product name 1>",
+        "<product name 2>"
+    ]
+}
+
diff --git a/acts/tests/google/wifi/rtt/README.md b/acts/tests/google/wifi/rtt/README.md
index 639c3d8..1e435eb 100644
--- a/acts/tests/google/wifi/rtt/README.md
+++ b/acts/tests/google/wifi/rtt/README.md
@@ -54,3 +54,6 @@
 * **stress_test_min_iteration_count**, **stress_test_target_run_time_sec**: Parameters used to
 control the length and duration of the stress tests. The stress test runs for the specified number
 of iterations or for the specified duration - whichever is longer.
+* **dbs_supported_models**: A list of device models which support DBS. Used to determine whether
+RTT will run while a SoftAP (SAP) is enabled. The model name corresponds to the value returned by
+*android_device.model*.
diff --git a/acts/tests/google/wifi/rtt/config/wifi_rtt.json b/acts/tests/google/wifi/rtt/config/wifi_rtt.json
index 41f77dc..315beac 100644
--- a/acts/tests/google/wifi/rtt/config/wifi_rtt.json
+++ b/acts/tests/google/wifi/rtt/config/wifi_rtt.json
@@ -16,5 +16,6 @@
     "lcr_reference": [],
     "rtt_reference_distance_mm": 100,
     "stress_test_min_iteration_count": 100,
-    "stress_test_target_run_time_sec" : 30
+    "stress_test_target_run_time_sec" : 30,
+    "dbs_supported_models" : []
 }
diff --git a/acts/tests/google/wifi/rtt/functional/AwareDiscoveryWithRangingTest.py b/acts/tests/google/wifi/rtt/functional/AwareDiscoveryWithRangingTest.py
index f6d7c8d..9421ef3 100644
--- a/acts/tests/google/wifi/rtt/functional/AwareDiscoveryWithRangingTest.py
+++ b/acts/tests/google/wifi/rtt/functional/AwareDiscoveryWithRangingTest.py
@@ -29,41 +29,44 @@
 
 
 class AwareDiscoveryWithRangingTest(AwareBaseTest, RttBaseTest):
-  """Set of tests for Wi-Fi Aware discovery configured with ranging (RTT)."""
+    """Set of tests for Wi-Fi Aware discovery configured with ranging (RTT)."""
 
-  SERVICE_NAME = "GoogleTestServiceRRRRR"
+    SERVICE_NAME = "GoogleTestServiceRRRRR"
 
-  # Flag indicating whether the device has a limitation that does not allow it
-  # to execute Aware-based Ranging (whether direct or as part of discovery)
-  # whenever NDP is enabled.
-  RANGING_NDP_CONCURRENCY_LIMITATION = True
+    # Flag indicating whether the device has a limitation that does not allow it
+    # to execute Aware-based Ranging (whether direct or as part of discovery)
+    # whenever NDP is enabled.
+    RANGING_NDP_CONCURRENCY_LIMITATION = True
 
-  # Flag indicating whether the device has a limitation that does not allow it
-  # to execute Aware-based Ranging (whether direct or as part of discovery)
-  # for both Initiators and Responders. Only the first mode works.
-  RANGING_INITIATOR_RESPONDER_CONCURRENCY_LIMITATION = True
+    # Flag indicating whether the device has a limitation that does not allow it
+    # to execute Aware-based Ranging (whether direct or as part of discovery)
+    # for both Initiators and Responders. Only the first mode works.
+    RANGING_INITIATOR_RESPONDER_CONCURRENCY_LIMITATION = True
 
-  def __init__(self, controllers):
-    AwareBaseTest.__init__(self, controllers)
-    RttBaseTest.__init__(self, controllers)
+    def __init__(self, controllers):
+        AwareBaseTest.__init__(self, controllers)
+        RttBaseTest.__init__(self, controllers)
 
-  def setup_test(self):
-    """Manual setup here due to multiple inheritance: explicitly execute the
+    def setup_test(self):
+        """Manual setup here due to multiple inheritance: explicitly execute the
     setup method from both parents."""
-    AwareBaseTest.setup_test(self)
-    RttBaseTest.setup_test(self)
+        AwareBaseTest.setup_test(self)
+        RttBaseTest.setup_test(self)
 
-  def teardown_test(self):
-    """Manual teardown here due to multiple inheritance: explicitly execute the
+    def teardown_test(self):
+        """Manual teardown here due to multiple inheritance: explicitly execute the
     teardown method from both parents."""
-    AwareBaseTest.teardown_test(self)
-    RttBaseTest.teardown_test(self)
+        AwareBaseTest.teardown_test(self)
+        RttBaseTest.teardown_test(self)
 
-  #########################################################################
+    #########################################################################
 
-  def run_discovery(self, p_config, s_config, expect_discovery,
-      expect_range=False):
-    """Run discovery on the 2 input devices with the specified configurations.
+    def run_discovery(self,
+                      p_config,
+                      s_config,
+                      expect_discovery,
+                      expect_range=False):
+        """Run discovery on the 2 input devices with the specified configurations.
 
     Args:
       p_config, s_config: Publisher and Subscriber discovery configuration.
@@ -75,50 +78,59 @@
       p_dut, s_dut: Publisher/Subscribe DUT
       p_disc_id, s_disc_id: Publisher/Subscribe discovery session ID
     """
-    p_dut = self.android_devices[0]
-    p_dut.pretty_name = "Publisher"
-    s_dut = self.android_devices[1]
-    s_dut.pretty_name = "Subscriber"
+        p_dut = self.android_devices[0]
+        p_dut.pretty_name = "Publisher"
+        s_dut = self.android_devices[1]
+        s_dut.pretty_name = "Subscriber"
 
-    # Publisher+Subscriber: attach and wait for confirmation
-    p_id = p_dut.droid.wifiAwareAttach(False)
-    autils.wait_for_event(p_dut, aconsts.EVENT_CB_ON_ATTACHED)
-    time.sleep(self.device_startup_offset)
-    s_id = s_dut.droid.wifiAwareAttach(False)
-    autils.wait_for_event(s_dut, aconsts.EVENT_CB_ON_ATTACHED)
+        # Publisher+Subscriber: attach and wait for confirmation
+        p_id = p_dut.droid.wifiAwareAttach(False)
+        autils.wait_for_event(p_dut, aconsts.EVENT_CB_ON_ATTACHED)
+        time.sleep(self.device_startup_offset)
+        s_id = s_dut.droid.wifiAwareAttach(False)
+        autils.wait_for_event(s_dut, aconsts.EVENT_CB_ON_ATTACHED)
 
-    # Publisher: start publish and wait for confirmation
-    p_disc_id = p_dut.droid.wifiAwarePublish(p_id, p_config)
-    autils.wait_for_event(p_dut, aconsts.SESSION_CB_ON_PUBLISH_STARTED)
+        # Publisher: start publish and wait for confirmation
+        p_disc_id = p_dut.droid.wifiAwarePublish(p_id, p_config)
+        autils.wait_for_event(p_dut, aconsts.SESSION_CB_ON_PUBLISH_STARTED)
 
-    # Subscriber: start subscribe and wait for confirmation
-    s_disc_id = s_dut.droid.wifiAwareSubscribe(s_id, s_config)
-    autils.wait_for_event(s_dut, aconsts.SESSION_CB_ON_SUBSCRIBE_STARTED)
+        # Subscriber: start subscribe and wait for confirmation
+        s_disc_id = s_dut.droid.wifiAwareSubscribe(s_id, s_config)
+        autils.wait_for_event(s_dut, aconsts.SESSION_CB_ON_SUBSCRIBE_STARTED)
 
-    # Subscriber: wait or fail on service discovery
-    if expect_discovery:
-      event = autils.wait_for_event(s_dut,
-                                    aconsts.SESSION_CB_ON_SERVICE_DISCOVERED)
-      if expect_range:
-        asserts.assert_true(aconsts.SESSION_CB_KEY_DISTANCE_MM in event["data"],
-                            "Discovery with ranging expected!")
-      else:
-        asserts.assert_false(
-          aconsts.SESSION_CB_KEY_DISTANCE_MM in event["data"],
-          "Discovery with ranging NOT expected!")
-    else:
-      autils.fail_on_event(s_dut, aconsts.SESSION_CB_ON_SERVICE_DISCOVERED)
+        # Subscriber: wait or fail on service discovery
+        if expect_discovery:
+            event = autils.wait_for_event(
+                s_dut, aconsts.SESSION_CB_ON_SERVICE_DISCOVERED)
+            if expect_range:
+                asserts.assert_true(
+                    aconsts.SESSION_CB_KEY_DISTANCE_MM in event["data"],
+                    "Discovery with ranging expected!")
+            else:
+                asserts.assert_false(
+                    aconsts.SESSION_CB_KEY_DISTANCE_MM in event["data"],
+                    "Discovery with ranging NOT expected!")
+        else:
+            autils.fail_on_event(s_dut,
+                                 aconsts.SESSION_CB_ON_SERVICE_DISCOVERED)
 
-    # (single) sleep for timeout period and then verify that no further events
-    time.sleep(autils.EVENT_TIMEOUT)
-    autils.verify_no_more_events(p_dut, timeout=0)
-    autils.verify_no_more_events(s_dut, timeout=0)
+        # (single) sleep for timeout period and then verify that no further events
+        time.sleep(autils.EVENT_TIMEOUT)
+        autils.verify_no_more_events(p_dut, timeout=0)
+        autils.verify_no_more_events(s_dut, timeout=0)
 
-    return p_dut, s_dut, p_disc_id, s_disc_id
+        return p_dut, s_dut, p_disc_id, s_disc_id
 
-  def run_discovery_update(self, p_dut, s_dut, p_disc_id, s_disc_id, p_config,
-      s_config, expect_discovery, expect_range=False):
-    """Run discovery on the 2 input devices with the specified update
+    def run_discovery_update(self,
+                             p_dut,
+                             s_dut,
+                             p_disc_id,
+                             s_disc_id,
+                             p_config,
+                             s_config,
+                             expect_discovery,
+                             expect_range=False):
+        """Run discovery on the 2 input devices with the specified update
     configurations. I.e. update the existing discovery sessions with the
     configurations.
 
@@ -132,39 +144,43 @@
                     Only relevant if expect_discovery is True.
     """
 
-    # try to perform reconfiguration at same time (and wait once for all
-    # confirmations)
-    if p_config is not None:
-      p_dut.droid.wifiAwareUpdatePublish(p_disc_id, p_config)
-    if s_config is not None:
-      s_dut.droid.wifiAwareUpdateSubscribe(s_disc_id, s_config)
+        # try to perform reconfiguration at same time (and wait once for all
+        # confirmations)
+        if p_config is not None:
+            p_dut.droid.wifiAwareUpdatePublish(p_disc_id, p_config)
+        if s_config is not None:
+            s_dut.droid.wifiAwareUpdateSubscribe(s_disc_id, s_config)
 
-    if p_config is not None:
-      autils.wait_for_event(p_dut, aconsts.SESSION_CB_ON_SESSION_CONFIG_UPDATED)
-    if s_config is not None:
-      autils.wait_for_event(s_dut, aconsts.SESSION_CB_ON_SESSION_CONFIG_UPDATED)
+        if p_config is not None:
+            autils.wait_for_event(p_dut,
+                                  aconsts.SESSION_CB_ON_SESSION_CONFIG_UPDATED)
+        if s_config is not None:
+            autils.wait_for_event(s_dut,
+                                  aconsts.SESSION_CB_ON_SESSION_CONFIG_UPDATED)
 
-    # Subscriber: wait or fail on service discovery
-    if expect_discovery:
-      event = autils.wait_for_event(s_dut,
-                                    aconsts.SESSION_CB_ON_SERVICE_DISCOVERED)
-      if expect_range:
-        asserts.assert_true(aconsts.SESSION_CB_KEY_DISTANCE_MM in event["data"],
-                            "Discovery with ranging expected!")
-      else:
-        asserts.assert_false(
-            aconsts.SESSION_CB_KEY_DISTANCE_MM in event["data"],
-            "Discovery with ranging NOT expected!")
-    else:
-      autils.fail_on_event(s_dut, aconsts.SESSION_CB_ON_SERVICE_DISCOVERED)
+        # Subscriber: wait or fail on service discovery
+        if expect_discovery:
+            event = autils.wait_for_event(
+                s_dut, aconsts.SESSION_CB_ON_SERVICE_DISCOVERED)
+            if expect_range:
+                asserts.assert_true(
+                    aconsts.SESSION_CB_KEY_DISTANCE_MM in event["data"],
+                    "Discovery with ranging expected!")
+            else:
+                asserts.assert_false(
+                    aconsts.SESSION_CB_KEY_DISTANCE_MM in event["data"],
+                    "Discovery with ranging NOT expected!")
+        else:
+            autils.fail_on_event(s_dut,
+                                 aconsts.SESSION_CB_ON_SERVICE_DISCOVERED)
 
-    # (single) sleep for timeout period and then verify that no further events
-    time.sleep(autils.EVENT_TIMEOUT)
-    autils.verify_no_more_events(p_dut, timeout=0)
-    autils.verify_no_more_events(s_dut, timeout=0)
+        # (single) sleep for timeout period and then verify that no further events
+        time.sleep(autils.EVENT_TIMEOUT)
+        autils.verify_no_more_events(p_dut, timeout=0)
+        autils.verify_no_more_events(s_dut, timeout=0)
 
-  def run_discovery_prange_sminmax_outofrange(self, is_unsolicited_passive):
-    """Run discovery with ranging:
+    def run_discovery_prange_sminmax_outofrange(self, is_unsolicited_passive):
+        """Run discovery with ranging:
     - Publisher enables ranging
     - Subscriber enables ranging with min/max such that out of range (min=large,
       max=large+1)
@@ -178,88 +194,92 @@
                               Solicited/Active.
     Returns: the return arguments of the run_discovery.
     """
-    pub_type = (aconsts.PUBLISH_TYPE_UNSOLICITED if is_unsolicited_passive
-                else aconsts.PUBLISH_TYPE_SOLICITED)
-    sub_type = (aconsts.SUBSCRIBE_TYPE_PASSIVE if is_unsolicited_passive
-                else aconsts.SUBSCRIBE_TYPE_ACTIVE)
-    return self.run_discovery(
-        p_config=autils.add_ranging_to_pub(
-            autils.create_discovery_config(self.SERVICE_NAME, pub_type,
-                                           ssi=self.getname(2)),
-            enable_ranging=True),
-        s_config=autils.add_ranging_to_sub(
-            autils.create_discovery_config(self.SERVICE_NAME, sub_type,
-                                           ssi=self.getname(2)),
-            min_distance_mm=1000000,
-            max_distance_mm=1000001),
-        expect_discovery=False)
+        pub_type = (aconsts.PUBLISH_TYPE_UNSOLICITED if is_unsolicited_passive
+                    else aconsts.PUBLISH_TYPE_SOLICITED)
+        sub_type = (aconsts.SUBSCRIBE_TYPE_PASSIVE if is_unsolicited_passive
+                    else aconsts.SUBSCRIBE_TYPE_ACTIVE)
+        return self.run_discovery(
+            p_config=autils.add_ranging_to_pub(
+                autils.create_discovery_config(
+                    self.SERVICE_NAME, pub_type, ssi=self.getname(2)),
+                enable_ranging=True),
+            s_config=autils.add_ranging_to_sub(
+                autils.create_discovery_config(
+                    self.SERVICE_NAME, sub_type, ssi=self.getname(2)),
+                min_distance_mm=1000000,
+                max_distance_mm=1000001),
+            expect_discovery=False)
 
-  def getname(self, level=1):
-    """Python magic to return the name of the *calling* function.
+    def getname(self, level=1):
+        """Python magic to return the name of the *calling* function.
 
     Args:
       level: How many levels up to go for the method name. Default = calling
              method.
     """
-    return sys._getframe(level).f_code.co_name
+        return sys._getframe(level).f_code.co_name
 
-  #########################################################################
-  # Run discovery with ranging configuration.
-  #
-  # Names: test_ranged_discovery_<ptype>_<stype>_<p_range>_<s_range>_<ref_dist>
-  #
-  # where:
-  # <ptype>_<stype>: unsolicited_passive or solicited_active
-  # <p_range>: prange or pnorange
-  # <s_range>: smin or smax or sminmax or snorange
-  # <ref_distance>: inrange or outoforange
-  #########################################################################
+    #########################################################################
+    # Run discovery with ranging configuration.
+    #
+    # Names: test_ranged_discovery_<ptype>_<stype>_<p_range>_<s_range>_<ref_dist>
+    #
+    # where:
+    # <ptype>_<stype>: unsolicited_passive or solicited_active
+    # <p_range>: prange or pnorange
+    # <s_range>: smin or smax or sminmax or snorange
+    # <ref_distance>: inrange or outoforange
+    #########################################################################
 
-  @test_tracker_info(uuid="3a216e9a-7a57-4741-89c0-84456975e1ac")
-  def test_ranged_discovery_unsolicited_passive_prange_snorange(self):
-    """Verify discovery with ranging:
+    @test_tracker_info(uuid="3a216e9a-7a57-4741-89c0-84456975e1ac")
+    def test_ranged_discovery_unsolicited_passive_prange_snorange(self):
+        """Verify discovery with ranging:
     - Unsolicited Publish/Passive Subscribe
     - Publisher enables ranging
     - Subscriber disables ranging
 
     Expect: normal discovery (as if no ranging performed) - no distance
     """
-    self.run_discovery(
-        p_config=autils.add_ranging_to_pub(
-            autils.create_discovery_config(self.SERVICE_NAME,
-                                           aconsts.PUBLISH_TYPE_UNSOLICITED,
-                                           ssi=self.getname()),
-            enable_ranging=True),
-        s_config=autils.create_discovery_config(self.SERVICE_NAME,
-                                                aconsts.SUBSCRIBE_TYPE_PASSIVE,
-                                                ssi=self.getname()),
-        expect_discovery=True,
-        expect_range=False)
+        self.run_discovery(
+            p_config=autils.add_ranging_to_pub(
+                autils.create_discovery_config(
+                    self.SERVICE_NAME,
+                    aconsts.PUBLISH_TYPE_UNSOLICITED,
+                    ssi=self.getname()),
+                enable_ranging=True),
+            s_config=autils.create_discovery_config(
+                self.SERVICE_NAME,
+                aconsts.SUBSCRIBE_TYPE_PASSIVE,
+                ssi=self.getname()),
+            expect_discovery=True,
+            expect_range=False)
 
-  @test_tracker_info(uuid="859a321e-18e2-437b-aa7a-2a45a42ee737")
-  def test_ranged_discovery_solicited_active_prange_snorange(self):
-    """Verify discovery with ranging:
+    @test_tracker_info(uuid="859a321e-18e2-437b-aa7a-2a45a42ee737")
+    def test_ranged_discovery_solicited_active_prange_snorange(self):
+        """Verify discovery with ranging:
     - Solicited Publish/Active Subscribe
     - Publisher enables ranging
     - Subscriber disables ranging
 
     Expect: normal discovery (as if no ranging performed) - no distance
     """
-    self.run_discovery(
-        p_config=autils.add_ranging_to_pub(
-            autils.create_discovery_config(self.SERVICE_NAME,
-                                           aconsts.PUBLISH_TYPE_SOLICITED,
-                                           ssi=self.getname()),
-            enable_ranging=True),
-        s_config=autils.create_discovery_config(self.SERVICE_NAME,
-                                                aconsts.SUBSCRIBE_TYPE_ACTIVE,
-                                                ssi=self.getname()),
-        expect_discovery=True,
-        expect_range=False)
+        self.run_discovery(
+            p_config=autils.add_ranging_to_pub(
+                autils.create_discovery_config(
+                    self.SERVICE_NAME,
+                    aconsts.PUBLISH_TYPE_SOLICITED,
+                    ssi=self.getname()),
+                enable_ranging=True),
+            s_config=autils.create_discovery_config(
+                self.SERVICE_NAME,
+                aconsts.SUBSCRIBE_TYPE_ACTIVE,
+                ssi=self.getname()),
+            expect_discovery=True,
+            expect_range=False)
 
-  @test_tracker_info(uuid="12a4f899-4f70-4641-8f3c-351004669b71")
-  def test_ranged_discovery_unsolicited_passive_pnorange_smax_inrange(self):
-    """Verify discovery with ranging:
+    @test_tracker_info(uuid="12a4f899-4f70-4641-8f3c-351004669b71")
+    def test_ranged_discovery_unsolicited_passive_pnorange_smax_inrange(self):
+        """Verify discovery with ranging:
     - Unsolicited Publish/Passive Subscribe
     - Publisher disables ranging
     - Subscriber enables ranging with max such that always within range (large
@@ -267,24 +287,26 @@
 
     Expect: normal discovery (as if no ranging performed) - no distance
     """
-    self.run_discovery(
-        p_config=autils.add_ranging_to_pub(
-            autils.create_discovery_config(self.SERVICE_NAME,
-                                           aconsts.PUBLISH_TYPE_UNSOLICITED,
-                                           ssi=self.getname()),
-            enable_ranging=False),
-        s_config=autils.add_ranging_to_sub(
-            autils.create_discovery_config(self.SERVICE_NAME,
-                                           aconsts.SUBSCRIBE_TYPE_PASSIVE,
-                                           ssi=self.getname()),
-            min_distance_mm=None,
-            max_distance_mm=1000000),
-        expect_discovery=True,
-        expect_range=False)
+        self.run_discovery(
+            p_config=autils.add_ranging_to_pub(
+                autils.create_discovery_config(
+                    self.SERVICE_NAME,
+                    aconsts.PUBLISH_TYPE_UNSOLICITED,
+                    ssi=self.getname()),
+                enable_ranging=False),
+            s_config=autils.add_ranging_to_sub(
+                autils.create_discovery_config(
+                    self.SERVICE_NAME,
+                    aconsts.SUBSCRIBE_TYPE_PASSIVE,
+                    ssi=self.getname()),
+                min_distance_mm=None,
+                max_distance_mm=1000000),
+            expect_discovery=True,
+            expect_range=False)
 
-  @test_tracker_info(uuid="b7f90793-113d-4355-be20-856d92ac939f")
-  def test_ranged_discovery_solicited_active_pnorange_smax_inrange(self):
-    """Verify discovery with ranging:
+    @test_tracker_info(uuid="b7f90793-113d-4355-be20-856d92ac939f")
+    def test_ranged_discovery_solicited_active_pnorange_smax_inrange(self):
+        """Verify discovery with ranging:
     - Solicited Publish/Active Subscribe
     - Publisher disables ranging
     - Subscriber enables ranging with max such that always within range (large
@@ -292,24 +314,27 @@
 
     Expect: normal discovery (as if no ranging performed) - no distance
     """
-    self.run_discovery(
-        p_config=autils.add_ranging_to_pub(
-            autils.create_discovery_config(self.SERVICE_NAME,
-                                           aconsts.PUBLISH_TYPE_SOLICITED,
-                                           ssi=self.getname()),
-            enable_ranging=False),
-        s_config=autils.add_ranging_to_sub(
-            autils.create_discovery_config(self.SERVICE_NAME,
-                                           aconsts.SUBSCRIBE_TYPE_ACTIVE,
-                                           ssi=self.getname()),
-            min_distance_mm=None,
-            max_distance_mm=1000000),
-        expect_discovery=True,
-        expect_range=False)
+        self.run_discovery(
+            p_config=autils.add_ranging_to_pub(
+                autils.create_discovery_config(
+                    self.SERVICE_NAME,
+                    aconsts.PUBLISH_TYPE_SOLICITED,
+                    ssi=self.getname()),
+                enable_ranging=False),
+            s_config=autils.add_ranging_to_sub(
+                autils.create_discovery_config(
+                    self.SERVICE_NAME,
+                    aconsts.SUBSCRIBE_TYPE_ACTIVE,
+                    ssi=self.getname()),
+                min_distance_mm=None,
+                max_distance_mm=1000000),
+            expect_discovery=True,
+            expect_range=False)
 
-  @test_tracker_info(uuid="da3ab6df-58f9-44ae-b7be-8200d9e1bb76")
-  def test_ranged_discovery_unsolicited_passive_pnorange_smin_outofrange(self):
-    """Verify discovery with ranging:
+    @test_tracker_info(uuid="da3ab6df-58f9-44ae-b7be-8200d9e1bb76")
+    def test_ranged_discovery_unsolicited_passive_pnorange_smin_outofrange(
+            self):
+        """Verify discovery with ranging:
     - Unsolicited Publish/Passive Subscribe
     - Publisher disables ranging
     - Subscriber enables ranging with min such that always out of range (large
@@ -317,24 +342,26 @@
 
     Expect: normal discovery (as if no ranging performed) - no distance
     """
-    self.run_discovery(
-        p_config=autils.add_ranging_to_pub(
-            autils.create_discovery_config(self.SERVICE_NAME,
-                                           aconsts.PUBLISH_TYPE_UNSOLICITED,
-                                           ssi=self.getname()),
-            enable_ranging=False),
-        s_config=autils.add_ranging_to_sub(
-            autils.create_discovery_config(self.SERVICE_NAME,
-                                           aconsts.SUBSCRIBE_TYPE_PASSIVE,
-                                           ssi=self.getname()),
-            min_distance_mm=1000000,
-            max_distance_mm=None),
-        expect_discovery=True,
-        expect_range=False)
+        self.run_discovery(
+            p_config=autils.add_ranging_to_pub(
+                autils.create_discovery_config(
+                    self.SERVICE_NAME,
+                    aconsts.PUBLISH_TYPE_UNSOLICITED,
+                    ssi=self.getname()),
+                enable_ranging=False),
+            s_config=autils.add_ranging_to_sub(
+                autils.create_discovery_config(
+                    self.SERVICE_NAME,
+                    aconsts.SUBSCRIBE_TYPE_PASSIVE,
+                    ssi=self.getname()),
+                min_distance_mm=1000000,
+                max_distance_mm=None),
+            expect_discovery=True,
+            expect_range=False)
 
-  @test_tracker_info(uuid="275e0806-f266-4fa6-9ca0-1cfd7b65a6ca")
-  def test_ranged_discovery_solicited_active_pnorange_smin_outofrange(self):
-    """Verify discovery with ranging:
+    @test_tracker_info(uuid="275e0806-f266-4fa6-9ca0-1cfd7b65a6ca")
+    def test_ranged_discovery_solicited_active_pnorange_smin_outofrange(self):
+        """Verify discovery with ranging:
     - Solicited Publish/Active Subscribe
     - Publisher disables ranging
     - Subscriber enables ranging with min such that always out of range (large
@@ -342,72 +369,78 @@
 
     Expect: normal discovery (as if no ranging performed) - no distance
     """
-    self.run_discovery(
-        p_config=autils.add_ranging_to_pub(
-            autils.create_discovery_config(self.SERVICE_NAME,
-                                           aconsts.PUBLISH_TYPE_SOLICITED,
-                                           ssi=self.getname()),
-            enable_ranging=False),
-        s_config=autils.add_ranging_to_sub(
-            autils.create_discovery_config(self.SERVICE_NAME,
-                                           aconsts.SUBSCRIBE_TYPE_ACTIVE,
-                                           ssi=self.getname()),
-            min_distance_mm=1000000,
-            max_distance_mm=None),
-        expect_discovery=True,
-        expect_range=False)
+        self.run_discovery(
+            p_config=autils.add_ranging_to_pub(
+                autils.create_discovery_config(
+                    self.SERVICE_NAME,
+                    aconsts.PUBLISH_TYPE_SOLICITED,
+                    ssi=self.getname()),
+                enable_ranging=False),
+            s_config=autils.add_ranging_to_sub(
+                autils.create_discovery_config(
+                    self.SERVICE_NAME,
+                    aconsts.SUBSCRIBE_TYPE_ACTIVE,
+                    ssi=self.getname()),
+                min_distance_mm=1000000,
+                max_distance_mm=None),
+            expect_discovery=True,
+            expect_range=False)
 
-  @test_tracker_info(uuid="8cd0aa1e-6866-4a5d-a550-f25483eebea1")
-  def test_ranged_discovery_unsolicited_passive_prange_smin_inrange(self):
-    """Verify discovery with ranging:
+    @test_tracker_info(uuid="8cd0aa1e-6866-4a5d-a550-f25483eebea1")
+    def test_ranged_discovery_unsolicited_passive_prange_smin_inrange(self):
+        """Verify discovery with ranging:
     - Unsolicited Publish/Passive Subscribe
     - Publisher enables ranging
     - Subscriber enables ranging with min such that in range (min=0)
 
     Expect: discovery with distance
     """
-    self.run_discovery(
-        p_config=autils.add_ranging_to_pub(
-            autils.create_discovery_config(self.SERVICE_NAME,
-                                           aconsts.PUBLISH_TYPE_UNSOLICITED,
-                                           ssi=self.getname()),
-            enable_ranging=True),
-        s_config=autils.add_ranging_to_sub(
-            autils.create_discovery_config(self.SERVICE_NAME,
-                                           aconsts.SUBSCRIBE_TYPE_PASSIVE,
-                                           ssi=self.getname()),
-            min_distance_mm=0,
-            max_distance_mm=None),
-        expect_discovery=True,
-        expect_range=True)
+        self.run_discovery(
+            p_config=autils.add_ranging_to_pub(
+                autils.create_discovery_config(
+                    self.SERVICE_NAME,
+                    aconsts.PUBLISH_TYPE_UNSOLICITED,
+                    ssi=self.getname()),
+                enable_ranging=True),
+            s_config=autils.add_ranging_to_sub(
+                autils.create_discovery_config(
+                    self.SERVICE_NAME,
+                    aconsts.SUBSCRIBE_TYPE_PASSIVE,
+                    ssi=self.getname()),
+                min_distance_mm=0,
+                max_distance_mm=None),
+            expect_discovery=True,
+            expect_range=True)
 
-  @test_tracker_info(uuid="97c22c54-669b-4f7a-bf51-2f484e5f3e74")
-  def test_ranged_discovery_unsolicited_passive_prange_smax_inrange(self):
-    """Verify discovery with ranging:
+    @test_tracker_info(uuid="97c22c54-669b-4f7a-bf51-2f484e5f3e74")
+    def test_ranged_discovery_unsolicited_passive_prange_smax_inrange(self):
+        """Verify discovery with ranging:
     - Unsolicited Publish/Passive Subscribe
     - Publisher enables ranging
     - Subscriber enables ranging with max such that in range (max=large)
 
     Expect: discovery with distance
     """
-    self.run_discovery(
-        p_config=autils.add_ranging_to_pub(
-            autils.create_discovery_config(self.SERVICE_NAME,
-                                           aconsts.PUBLISH_TYPE_UNSOLICITED,
-                                           ssi=self.getname()),
-            enable_ranging=True),
-        s_config=autils.add_ranging_to_sub(
-            autils.create_discovery_config(self.SERVICE_NAME,
-                                           aconsts.SUBSCRIBE_TYPE_PASSIVE,
-                                           ssi=self.getname()),
-            min_distance_mm=None,
-            max_distance_mm=1000000),
-        expect_discovery=True,
-        expect_range=True)
+        self.run_discovery(
+            p_config=autils.add_ranging_to_pub(
+                autils.create_discovery_config(
+                    self.SERVICE_NAME,
+                    aconsts.PUBLISH_TYPE_UNSOLICITED,
+                    ssi=self.getname()),
+                enable_ranging=True),
+            s_config=autils.add_ranging_to_sub(
+                autils.create_discovery_config(
+                    self.SERVICE_NAME,
+                    aconsts.SUBSCRIBE_TYPE_PASSIVE,
+                    ssi=self.getname()),
+                min_distance_mm=None,
+                max_distance_mm=1000000),
+            expect_discovery=True,
+            expect_range=True)
 
-  @test_tracker_info(uuid="616673d7-9d0b-43de-a378-e5e949b51b32")
-  def test_ranged_discovery_unsolicited_passive_prange_sminmax_inrange(self):
-    """Verify discovery with ranging:
+    @test_tracker_info(uuid="616673d7-9d0b-43de-a378-e5e949b51b32")
+    def test_ranged_discovery_unsolicited_passive_prange_sminmax_inrange(self):
+        """Verify discovery with ranging:
     - Unsolicited Publish/Passive Subscribe
     - Publisher enables ranging
     - Subscriber enables ranging with min/max such that in range (min=0,
@@ -415,72 +448,78 @@
 
     Expect: discovery with distance
     """
-    self.run_discovery(
-        p_config=autils.add_ranging_to_pub(
-            autils.create_discovery_config(self.SERVICE_NAME,
-                                           aconsts.PUBLISH_TYPE_UNSOLICITED,
-                                           ssi=self.getname()),
-            enable_ranging=True),
-        s_config=autils.add_ranging_to_sub(
-            autils.create_discovery_config(self.SERVICE_NAME,
-                                           aconsts.SUBSCRIBE_TYPE_PASSIVE,
-                                           ssi=self.getname()),
-            min_distance_mm=0,
-            max_distance_mm=1000000),
-        expect_discovery=True,
-        expect_range=True)
+        self.run_discovery(
+            p_config=autils.add_ranging_to_pub(
+                autils.create_discovery_config(
+                    self.SERVICE_NAME,
+                    aconsts.PUBLISH_TYPE_UNSOLICITED,
+                    ssi=self.getname()),
+                enable_ranging=True),
+            s_config=autils.add_ranging_to_sub(
+                autils.create_discovery_config(
+                    self.SERVICE_NAME,
+                    aconsts.SUBSCRIBE_TYPE_PASSIVE,
+                    ssi=self.getname()),
+                min_distance_mm=0,
+                max_distance_mm=1000000),
+            expect_discovery=True,
+            expect_range=True)
 
-  @test_tracker_info(uuid="2bf84912-dcad-4a8f-971f-e445a07f05ce")
-  def test_ranged_discovery_solicited_active_prange_smin_inrange(self):
-    """Verify discovery with ranging:
+    @test_tracker_info(uuid="2bf84912-dcad-4a8f-971f-e445a07f05ce")
+    def test_ranged_discovery_solicited_active_prange_smin_inrange(self):
+        """Verify discovery with ranging:
     - Solicited Publish/Active Subscribe
     - Publisher enables ranging
     - Subscriber enables ranging with min such that in range (min=0)
 
     Expect: discovery with distance
     """
-    self.run_discovery(
-        p_config=autils.add_ranging_to_pub(
-            autils.create_discovery_config(self.SERVICE_NAME,
-                                           aconsts.PUBLISH_TYPE_SOLICITED,
-                                           ssi=self.getname()),
-            enable_ranging=True),
-        s_config=autils.add_ranging_to_sub(
-            autils.create_discovery_config(self.SERVICE_NAME,
-                                           aconsts.SUBSCRIBE_TYPE_ACTIVE,
-                                           ssi=self.getname()),
-            min_distance_mm=0,
-            max_distance_mm=None),
-        expect_discovery=True,
-        expect_range=True)
+        self.run_discovery(
+            p_config=autils.add_ranging_to_pub(
+                autils.create_discovery_config(
+                    self.SERVICE_NAME,
+                    aconsts.PUBLISH_TYPE_SOLICITED,
+                    ssi=self.getname()),
+                enable_ranging=True),
+            s_config=autils.add_ranging_to_sub(
+                autils.create_discovery_config(
+                    self.SERVICE_NAME,
+                    aconsts.SUBSCRIBE_TYPE_ACTIVE,
+                    ssi=self.getname()),
+                min_distance_mm=0,
+                max_distance_mm=None),
+            expect_discovery=True,
+            expect_range=True)
 
-  @test_tracker_info(uuid="5cfd7961-9665-4742-a1b5-2d1fc97f9795")
-  def test_ranged_discovery_solicited_active_prange_smax_inrange(self):
-    """Verify discovery with ranging:
+    @test_tracker_info(uuid="5cfd7961-9665-4742-a1b5-2d1fc97f9795")
+    def test_ranged_discovery_solicited_active_prange_smax_inrange(self):
+        """Verify discovery with ranging:
     - Solicited Publish/Active Subscribe
     - Publisher enables ranging
     - Subscriber enables ranging with max such that in range (max=large)
 
     Expect: discovery with distance
     """
-    self.run_discovery(
-        p_config=autils.add_ranging_to_pub(
-            autils.create_discovery_config(self.SERVICE_NAME,
-                                           aconsts.PUBLISH_TYPE_SOLICITED,
-                                           ssi=self.getname()),
-            enable_ranging=True),
-        s_config=autils.add_ranging_to_sub(
-            autils.create_discovery_config(self.SERVICE_NAME,
-                                           aconsts.SUBSCRIBE_TYPE_ACTIVE,
-                                           ssi=self.getname()),
-            min_distance_mm=None,
-            max_distance_mm=1000000),
-        expect_discovery=True,
-        expect_range=True)
+        self.run_discovery(
+            p_config=autils.add_ranging_to_pub(
+                autils.create_discovery_config(
+                    self.SERVICE_NAME,
+                    aconsts.PUBLISH_TYPE_SOLICITED,
+                    ssi=self.getname()),
+                enable_ranging=True),
+            s_config=autils.add_ranging_to_sub(
+                autils.create_discovery_config(
+                    self.SERVICE_NAME,
+                    aconsts.SUBSCRIBE_TYPE_ACTIVE,
+                    ssi=self.getname()),
+                min_distance_mm=None,
+                max_distance_mm=1000000),
+            expect_discovery=True,
+            expect_range=True)
 
-  @test_tracker_info(uuid="5cf650ad-0b42-4b7d-9e05-d5f45fe0554d")
-  def test_ranged_discovery_solicited_active_prange_sminmax_inrange(self):
-    """Verify discovery with ranging:
+    @test_tracker_info(uuid="5cf650ad-0b42-4b7d-9e05-d5f45fe0554d")
+    def test_ranged_discovery_solicited_active_prange_sminmax_inrange(self):
+        """Verify discovery with ranging:
     - Solicited Publish/Active Subscribe
     - Publisher enables ranging
     - Subscriber enables ranging with min/max such that in range (min=0,
@@ -488,70 +527,77 @@
 
     Expect: discovery with distance
     """
-    self.run_discovery(
-        p_config=autils.add_ranging_to_pub(
-            autils.create_discovery_config(self.SERVICE_NAME,
-                                           aconsts.PUBLISH_TYPE_SOLICITED,
-                                           ssi=self.getname()),
-            enable_ranging=True),
-        s_config=autils.add_ranging_to_sub(
-            autils.create_discovery_config(self.SERVICE_NAME,
-                                           aconsts.SUBSCRIBE_TYPE_ACTIVE,
-                                           ssi=self.getname()),
-            min_distance_mm=0,
-            max_distance_mm=1000000),
-        expect_discovery=True,
-        expect_range=True)
+        self.run_discovery(
+            p_config=autils.add_ranging_to_pub(
+                autils.create_discovery_config(
+                    self.SERVICE_NAME,
+                    aconsts.PUBLISH_TYPE_SOLICITED,
+                    ssi=self.getname()),
+                enable_ranging=True),
+            s_config=autils.add_ranging_to_sub(
+                autils.create_discovery_config(
+                    self.SERVICE_NAME,
+                    aconsts.SUBSCRIBE_TYPE_ACTIVE,
+                    ssi=self.getname()),
+                min_distance_mm=0,
+                max_distance_mm=1000000),
+            expect_discovery=True,
+            expect_range=True)
 
-  @test_tracker_info(uuid="5277f418-ac35-43ce-9b30-3c895272898e")
-  def test_ranged_discovery_unsolicited_passive_prange_smin_outofrange(self):
-    """Verify discovery with ranging:
+    @test_tracker_info(uuid="5277f418-ac35-43ce-9b30-3c895272898e")
+    def test_ranged_discovery_unsolicited_passive_prange_smin_outofrange(self):
+        """Verify discovery with ranging:
     - Unsolicited Publish/Passive Subscribe
     - Publisher enables ranging
     - Subscriber enables ranging with min such that out of range (min=large)
 
     Expect: no discovery
     """
-    self.run_discovery(
-        p_config=autils.add_ranging_to_pub(
-            autils.create_discovery_config(self.SERVICE_NAME,
-                                           aconsts.PUBLISH_TYPE_UNSOLICITED,
-                                           ssi=self.getname()),
-            enable_ranging=True),
-        s_config=autils.add_ranging_to_sub(
-            autils.create_discovery_config(self.SERVICE_NAME,
-                                           aconsts.SUBSCRIBE_TYPE_PASSIVE,
-                                           ssi=self.getname()),
-            min_distance_mm=1000000,
-            max_distance_mm=None),
-        expect_discovery=False)
+        self.run_discovery(
+            p_config=autils.add_ranging_to_pub(
+                autils.create_discovery_config(
+                    self.SERVICE_NAME,
+                    aconsts.PUBLISH_TYPE_UNSOLICITED,
+                    ssi=self.getname()),
+                enable_ranging=True),
+            s_config=autils.add_ranging_to_sub(
+                autils.create_discovery_config(
+                    self.SERVICE_NAME,
+                    aconsts.SUBSCRIBE_TYPE_PASSIVE,
+                    ssi=self.getname()),
+                min_distance_mm=1000000,
+                max_distance_mm=None),
+            expect_discovery=False)
 
-  @test_tracker_info(uuid="8a7e6ab1-acf4-41a7-a5fb-8c164d593b5f")
-  def test_ranged_discovery_unsolicited_passive_prange_smax_outofrange(self):
-    """Verify discovery with ranging:
+    @test_tracker_info(uuid="8a7e6ab1-acf4-41a7-a5fb-8c164d593b5f")
+    def test_ranged_discovery_unsolicited_passive_prange_smax_outofrange(self):
+        """Verify discovery with ranging:
     - Unsolicited Publish/Passive Subscribe
     - Publisher enables ranging
     - Subscriber enables ranging with max such that in range (max=0)
 
     Expect: no discovery
     """
-    self.run_discovery(
-        p_config=autils.add_ranging_to_pub(
-            autils.create_discovery_config(self.SERVICE_NAME,
-                                           aconsts.PUBLISH_TYPE_UNSOLICITED,
-                                           ssi=self.getname()),
-            enable_ranging=True),
-        s_config=autils.add_ranging_to_sub(
-            autils.create_discovery_config(self.SERVICE_NAME,
-                                           aconsts.SUBSCRIBE_TYPE_PASSIVE,
-                                           ssi=self.getname()),
-            min_distance_mm=None,
-            max_distance_mm=0),
-        expect_discovery=False)
+        self.run_discovery(
+            p_config=autils.add_ranging_to_pub(
+                autils.create_discovery_config(
+                    self.SERVICE_NAME,
+                    aconsts.PUBLISH_TYPE_UNSOLICITED,
+                    ssi=self.getname()),
+                enable_ranging=True),
+            s_config=autils.add_ranging_to_sub(
+                autils.create_discovery_config(
+                    self.SERVICE_NAME,
+                    aconsts.SUBSCRIBE_TYPE_PASSIVE,
+                    ssi=self.getname()),
+                min_distance_mm=None,
+                max_distance_mm=0),
+            expect_discovery=False)
 
-  @test_tracker_info(uuid="b744f5f9-2641-4373-bf86-3752e2f9aace")
-  def test_ranged_discovery_unsolicited_passive_prange_sminmax_outofrange(self):
-    """Verify discovery with ranging:
+    @test_tracker_info(uuid="b744f5f9-2641-4373-bf86-3752e2f9aace")
+    def test_ranged_discovery_unsolicited_passive_prange_sminmax_outofrange(
+            self):
+        """Verify discovery with ranging:
     - Unsolicited Publish/Passive Subscribe
     - Publisher enables ranging
     - Subscriber enables ranging with min/max such that out of range (min=large,
@@ -559,69 +605,75 @@
 
     Expect: no discovery
     """
-    self.run_discovery(
-        p_config=autils.add_ranging_to_pub(
-            autils.create_discovery_config(self.SERVICE_NAME,
-                                           aconsts.PUBLISH_TYPE_UNSOLICITED,
-                                           ssi=self.getname()),
-            enable_ranging=True),
-        s_config=autils.add_ranging_to_sub(
-            autils.create_discovery_config(self.SERVICE_NAME,
-                                           aconsts.SUBSCRIBE_TYPE_PASSIVE,
-                                           ssi=self.getname()),
-            min_distance_mm=1000000,
-            max_distance_mm=1000001),
-        expect_discovery=False)
+        self.run_discovery(
+            p_config=autils.add_ranging_to_pub(
+                autils.create_discovery_config(
+                    self.SERVICE_NAME,
+                    aconsts.PUBLISH_TYPE_UNSOLICITED,
+                    ssi=self.getname()),
+                enable_ranging=True),
+            s_config=autils.add_ranging_to_sub(
+                autils.create_discovery_config(
+                    self.SERVICE_NAME,
+                    aconsts.SUBSCRIBE_TYPE_PASSIVE,
+                    ssi=self.getname()),
+                min_distance_mm=1000000,
+                max_distance_mm=1000001),
+            expect_discovery=False)
 
-  @test_tracker_info(uuid="d2e94199-b2e6-4fa5-a347-24594883c801")
-  def test_ranged_discovery_solicited_active_prange_smin_outofrange(self):
-    """Verify discovery with ranging:
+    @test_tracker_info(uuid="d2e94199-b2e6-4fa5-a347-24594883c801")
+    def test_ranged_discovery_solicited_active_prange_smin_outofrange(self):
+        """Verify discovery with ranging:
     - Solicited Publish/Active Subscribe
     - Publisher enables ranging
     - Subscriber enables ranging with min such that out of range (min=large)
 
     Expect: no discovery
     """
-    self.run_discovery(
-        p_config=autils.add_ranging_to_pub(
-            autils.create_discovery_config(self.SERVICE_NAME,
-                                           aconsts.PUBLISH_TYPE_SOLICITED,
-                                           ssi=self.getname()),
-            enable_ranging=True),
-        s_config=autils.add_ranging_to_sub(
-            autils.create_discovery_config(self.SERVICE_NAME,
-                                           aconsts.SUBSCRIBE_TYPE_ACTIVE,
-                                           ssi=self.getname()),
-            min_distance_mm=1000000,
-            max_distance_mm=None),
-        expect_discovery=False)
+        self.run_discovery(
+            p_config=autils.add_ranging_to_pub(
+                autils.create_discovery_config(
+                    self.SERVICE_NAME,
+                    aconsts.PUBLISH_TYPE_SOLICITED,
+                    ssi=self.getname()),
+                enable_ranging=True),
+            s_config=autils.add_ranging_to_sub(
+                autils.create_discovery_config(
+                    self.SERVICE_NAME,
+                    aconsts.SUBSCRIBE_TYPE_ACTIVE,
+                    ssi=self.getname()),
+                min_distance_mm=1000000,
+                max_distance_mm=None),
+            expect_discovery=False)
 
-  @test_tracker_info(uuid="a5619835-496a-4244-a428-f85cba3d4115")
-  def test_ranged_discovery_solicited_active_prange_smax_outofrange(self):
-    """Verify discovery with ranging:
+    @test_tracker_info(uuid="a5619835-496a-4244-a428-f85cba3d4115")
+    def test_ranged_discovery_solicited_active_prange_smax_outofrange(self):
+        """Verify discovery with ranging:
     - Solicited Publish/Active Subscribe
     - Publisher enables ranging
     - Subscriber enables ranging with max such that out of range (max=0)
 
     Expect: no discovery
     """
-    self.run_discovery(
-        p_config=autils.add_ranging_to_pub(
-            autils.create_discovery_config(self.SERVICE_NAME,
-                                           aconsts.PUBLISH_TYPE_SOLICITED,
-                                           ssi=self.getname()),
-            enable_ranging=True),
-        s_config=autils.add_ranging_to_sub(
-            autils.create_discovery_config(self.SERVICE_NAME,
-                                           aconsts.SUBSCRIBE_TYPE_ACTIVE,
-                                           ssi=self.getname()),
-            min_distance_mm=None,
-            max_distance_mm=0),
-        expect_discovery=False)
+        self.run_discovery(
+            p_config=autils.add_ranging_to_pub(
+                autils.create_discovery_config(
+                    self.SERVICE_NAME,
+                    aconsts.PUBLISH_TYPE_SOLICITED,
+                    ssi=self.getname()),
+                enable_ranging=True),
+            s_config=autils.add_ranging_to_sub(
+                autils.create_discovery_config(
+                    self.SERVICE_NAME,
+                    aconsts.SUBSCRIBE_TYPE_ACTIVE,
+                    ssi=self.getname()),
+                min_distance_mm=None,
+                max_distance_mm=0),
+            expect_discovery=False)
 
-  @test_tracker_info(uuid="12ebd91f-a973-410b-8ee1-0bd86024b921")
-  def test_ranged_discovery_solicited_active_prange_sminmax_outofrange(self):
-    """Verify discovery with ranging:
+    @test_tracker_info(uuid="12ebd91f-a973-410b-8ee1-0bd86024b921")
+    def test_ranged_discovery_solicited_active_prange_sminmax_outofrange(self):
+        """Verify discovery with ranging:
     - Solicited Publish/Active Subscribe
     - Publisher enables ranging
     - Subscriber enables ranging with min/max such that out of range (min=large,
@@ -629,34 +681,36 @@
 
     Expect: no discovery
     """
-    self.run_discovery(
-        p_config=autils.add_ranging_to_pub(
-            autils.create_discovery_config(self.SERVICE_NAME,
-                                           aconsts.PUBLISH_TYPE_SOLICITED,
-                                           ssi=self.getname()),
-            enable_ranging=True),
-        s_config=autils.add_ranging_to_sub(
-            autils.create_discovery_config(self.SERVICE_NAME,
-                                           aconsts.SUBSCRIBE_TYPE_ACTIVE,
-                                           ssi=self.getname()),
-            min_distance_mm=1000000,
-            max_distance_mm=1000001),
-        expect_discovery=False)
+        self.run_discovery(
+            p_config=autils.add_ranging_to_pub(
+                autils.create_discovery_config(
+                    self.SERVICE_NAME,
+                    aconsts.PUBLISH_TYPE_SOLICITED,
+                    ssi=self.getname()),
+                enable_ranging=True),
+            s_config=autils.add_ranging_to_sub(
+                autils.create_discovery_config(
+                    self.SERVICE_NAME,
+                    aconsts.SUBSCRIBE_TYPE_ACTIVE,
+                    ssi=self.getname()),
+                min_distance_mm=1000000,
+                max_distance_mm=1000001),
+            expect_discovery=False)
 
-  #########################################################################
-  # Run discovery with ranging configuration & update configurations after
-  # first run.
-  #
-  # Names: test_ranged_updated_discovery_<ptype>_<stype>_<scenario>
-  #
-  # where:
-  # <ptype>_<stype>: unsolicited_passive or solicited_active
-  # <scenario>: test scenario (details in name)
-  #########################################################################
+    #########################################################################
+    # Run discovery with ranging configuration & update configurations after
+    # first run.
+    #
+    # Names: test_ranged_updated_discovery_<ptype>_<stype>_<scenario>
+    #
+    # where:
+    # <ptype>_<stype>: unsolicited_passive or solicited_active
+    # <scenario>: test scenario (details in name)
+    #########################################################################
 
-  @test_tracker_info(uuid="59442180-4a6c-428f-b926-86000e8339b4")
-  def test_ranged_updated_discovery_unsolicited_passive_oor_to_ir(self):
-    """Verify discovery with ranging operation with updated configuration:
+    @test_tracker_info(uuid="59442180-4a6c-428f-b926-86000e8339b4")
+    def test_ranged_updated_discovery_unsolicited_passive_oor_to_ir(self):
+        """Verify discovery with ranging operation with updated configuration:
     - Unsolicited Publish/Passive Subscribe
     - Publisher enables ranging
     - Subscriber:
@@ -667,22 +721,27 @@
 
     Expect: discovery + ranging after update
     """
-    (p_dut, s_dut, p_disc_id,
-     s_disc_id) = self.run_discovery_prange_sminmax_outofrange(True)
-    self.run_discovery_update(p_dut, s_dut, p_disc_id, s_disc_id,
-        p_config=None, # no updates
-        s_config=autils.add_ranging_to_sub(
-            autils.create_discovery_config(self.SERVICE_NAME,
-                                           aconsts.SUBSCRIBE_TYPE_PASSIVE,
-                                           ssi=self.getname()),
-            min_distance_mm=0,
-            max_distance_mm=1000000),
-        expect_discovery=True,
-        expect_range=True)
+        (p_dut, s_dut, p_disc_id,
+         s_disc_id) = self.run_discovery_prange_sminmax_outofrange(True)
+        self.run_discovery_update(
+            p_dut,
+            s_dut,
+            p_disc_id,
+            s_disc_id,
+            p_config=None,  # no updates
+            s_config=autils.add_ranging_to_sub(
+                autils.create_discovery_config(
+                    self.SERVICE_NAME,
+                    aconsts.SUBSCRIBE_TYPE_PASSIVE,
+                    ssi=self.getname()),
+                min_distance_mm=0,
+                max_distance_mm=1000000),
+            expect_discovery=True,
+            expect_range=True)
 
-  @test_tracker_info(uuid="60188508-104d-42d5-ac3a-3605093c45d7")
-  def test_ranged_updated_discovery_unsolicited_passive_pub_unrange(self):
-    """Verify discovery with ranging operation with updated configuration:
+    @test_tracker_info(uuid="60188508-104d-42d5-ac3a-3605093c45d7")
+    def test_ranged_updated_discovery_unsolicited_passive_pub_unrange(self):
+        """Verify discovery with ranging operation with updated configuration:
     - Unsolicited Publish/Passive Subscribe
     - Publisher enables ranging
     - Subscriber: Ranging enabled, min/max such that out of range (min=large,
@@ -691,19 +750,24 @@
 
     Expect: discovery w/o ranging after update
     """
-    (p_dut, s_dut, p_disc_id,
-     s_disc_id) = self.run_discovery_prange_sminmax_outofrange(True)
-    self.run_discovery_update(p_dut, s_dut, p_disc_id, s_disc_id,
-        p_config=autils.create_discovery_config(self.SERVICE_NAME,
-                                             aconsts.PUBLISH_TYPE_UNSOLICITED,
-                                             ssi=self.getname()),
-        s_config=None, # no updates
-        expect_discovery=True,
-        expect_range=False)
+        (p_dut, s_dut, p_disc_id,
+         s_disc_id) = self.run_discovery_prange_sminmax_outofrange(True)
+        self.run_discovery_update(
+            p_dut,
+            s_dut,
+            p_disc_id,
+            s_disc_id,
+            p_config=autils.create_discovery_config(
+                self.SERVICE_NAME,
+                aconsts.PUBLISH_TYPE_UNSOLICITED,
+                ssi=self.getname()),
+            s_config=None,  # no updates
+            expect_discovery=True,
+            expect_range=False)
 
-  @test_tracker_info(uuid="f96b434e-751d-4eb5-ae01-0c5c3a6fb4a2")
-  def test_ranged_updated_discovery_unsolicited_passive_sub_unrange(self):
-    """Verify discovery with ranging operation with updated configuration:
+    @test_tracker_info(uuid="f96b434e-751d-4eb5-ae01-0c5c3a6fb4a2")
+    def test_ranged_updated_discovery_unsolicited_passive_sub_unrange(self):
+        """Verify discovery with ranging operation with updated configuration:
     - Unsolicited Publish/Passive Subscribe
     - Publisher enables ranging
     - Subscriber:
@@ -713,19 +777,24 @@
 
     Expect: discovery w/o ranging after update
     """
-    (p_dut, s_dut, p_disc_id,
-     s_disc_id) = self.run_discovery_prange_sminmax_outofrange(True)
-    self.run_discovery_update(p_dut, s_dut, p_disc_id, s_disc_id,
-        p_config=None, # no updates
-        s_config=autils.create_discovery_config(self.SERVICE_NAME,
-                                           aconsts.SUBSCRIBE_TYPE_PASSIVE,
-                                           ssi=self.getname()),
-        expect_discovery=True,
-        expect_range=False)
+        (p_dut, s_dut, p_disc_id,
+         s_disc_id) = self.run_discovery_prange_sminmax_outofrange(True)
+        self.run_discovery_update(
+            p_dut,
+            s_dut,
+            p_disc_id,
+            s_disc_id,
+            p_config=None,  # no updates
+            s_config=autils.create_discovery_config(
+                self.SERVICE_NAME,
+                aconsts.SUBSCRIBE_TYPE_PASSIVE,
+                ssi=self.getname()),
+            expect_discovery=True,
+            expect_range=False)
 
-  @test_tracker_info(uuid="78970de8-9362-4647-931a-3513bcf58e80")
-  def test_ranged_updated_discovery_unsolicited_passive_sub_oor(self):
-    """Verify discovery with ranging operation with updated configuration:
+    @test_tracker_info(uuid="78970de8-9362-4647-931a-3513bcf58e80")
+    def test_ranged_updated_discovery_unsolicited_passive_sub_oor(self):
+        """Verify discovery with ranging operation with updated configuration:
     - Unsolicited Publish/Passive Subscribe
     - Publisher enables ranging
     - Subscriber:
@@ -735,21 +804,26 @@
 
     Expect: no discovery after update
     """
-    (p_dut, s_dut, p_disc_id,
-     s_disc_id) = self.run_discovery_prange_sminmax_outofrange(True)
-    self.run_discovery_update(p_dut, s_dut, p_disc_id, s_disc_id,
-        p_config=None, # no updates
-        s_config=autils.add_ranging_to_sub(
-            autils.create_discovery_config(self.SERVICE_NAME,
-                                           aconsts.SUBSCRIBE_TYPE_PASSIVE,
-                                           ssi=self.getname()),
-            min_distance_mm=100000,
-            max_distance_mm=100001),
-        expect_discovery=False)
+        (p_dut, s_dut, p_disc_id,
+         s_disc_id) = self.run_discovery_prange_sminmax_outofrange(True)
+        self.run_discovery_update(
+            p_dut,
+            s_dut,
+            p_disc_id,
+            s_disc_id,
+            p_config=None,  # no updates
+            s_config=autils.add_ranging_to_sub(
+                autils.create_discovery_config(
+                    self.SERVICE_NAME,
+                    aconsts.SUBSCRIBE_TYPE_PASSIVE,
+                    ssi=self.getname()),
+                min_distance_mm=100000,
+                max_distance_mm=100001),
+            expect_discovery=False)
 
-  @test_tracker_info(uuid="0841ad05-4899-4521-bd24-04a8e2e345ac")
-  def test_ranged_updated_discovery_unsolicited_passive_pub_same(self):
-    """Verify discovery with ranging operation with updated configuration:
+    @test_tracker_info(uuid="0841ad05-4899-4521-bd24-04a8e2e345ac")
+    def test_ranged_updated_discovery_unsolicited_passive_pub_same(self):
+        """Verify discovery with ranging operation with updated configuration:
     - Unsolicited Publish/Passive Subscribe
     - Publisher enables ranging
     - Subscriber: Ranging enabled, min/max such that out of range (min=large,
@@ -758,20 +832,25 @@
 
     Expect: no discovery after update
     """
-    (p_dut, s_dut, p_disc_id,
-     s_disc_id) = self.run_discovery_prange_sminmax_outofrange(True)
-    self.run_discovery_update(p_dut, s_dut, p_disc_id, s_disc_id,
-        p_config=autils.add_ranging_to_pub(
-            autils.create_discovery_config(self.SERVICE_NAME,
-                                           aconsts.PUBLISH_TYPE_UNSOLICITED,
-                                           ssi=self.getname()),
-            enable_ranging=True),
-        s_config=None, # no updates
-        expect_discovery=False)
+        (p_dut, s_dut, p_disc_id,
+         s_disc_id) = self.run_discovery_prange_sminmax_outofrange(True)
+        self.run_discovery_update(
+            p_dut,
+            s_dut,
+            p_disc_id,
+            s_disc_id,
+            p_config=autils.add_ranging_to_pub(
+                autils.create_discovery_config(
+                    self.SERVICE_NAME,
+                    aconsts.PUBLISH_TYPE_UNSOLICITED,
+                    ssi=self.getname()),
+                enable_ranging=True),
+            s_config=None,  # no updates
+            expect_discovery=False)
 
-  @test_tracker_info(uuid="ec6ca57b-f115-4516-813a-4572b930c8d3")
-  def test_ranged_updated_discovery_unsolicited_passive_multi_step(self):
-    """Verify discovery with ranging operation with updated configuration:
+    @test_tracker_info(uuid="ec6ca57b-f115-4516-813a-4572b930c8d3")
+    def test_ranged_updated_discovery_unsolicited_passive_multi_step(self):
+        """Verify discovery with ranging operation with updated configuration:
     - Unsolicited Publish/Passive Subscribe
     - Publisher enables ranging
     - Subscriber: Ranging enabled, min/max such that out of range (min=large,
@@ -785,38 +864,53 @@
     - Reconfigured to: Ranging disabled
       - Expect: discovery without ranging
     """
-    (p_dut, s_dut, p_disc_id,
-     s_disc_id) = self.run_discovery_prange_sminmax_outofrange(True)
-    self.run_discovery_update(p_dut, s_dut, p_disc_id, s_disc_id,
-            p_config=None, # no updates
+        (p_dut, s_dut, p_disc_id,
+         s_disc_id) = self.run_discovery_prange_sminmax_outofrange(True)
+        self.run_discovery_update(
+            p_dut,
+            s_dut,
+            p_disc_id,
+            s_disc_id,
+            p_config=None,  # no updates
             s_config=autils.add_ranging_to_sub(
-                autils.create_discovery_config(self.SERVICE_NAME,
-                                               aconsts.SUBSCRIBE_TYPE_PASSIVE,
-                                               ssi=self.getname()),
+                autils.create_discovery_config(
+                    self.SERVICE_NAME,
+                    aconsts.SUBSCRIBE_TYPE_PASSIVE,
+                    ssi=self.getname()),
                 min_distance_mm=0,
                 max_distance_mm=None),
             expect_discovery=True,
             expect_range=True)
-    self.run_discovery_update(p_dut, s_dut, p_disc_id, s_disc_id,
-            p_config=None, # no updates
+        self.run_discovery_update(
+            p_dut,
+            s_dut,
+            p_disc_id,
+            s_disc_id,
+            p_config=None,  # no updates
             s_config=autils.add_ranging_to_sub(
-                autils.create_discovery_config(self.SERVICE_NAME,
-                                               aconsts.SUBSCRIBE_TYPE_PASSIVE,
-                                               ssi=self.getname()),
+                autils.create_discovery_config(
+                    self.SERVICE_NAME,
+                    aconsts.SUBSCRIBE_TYPE_PASSIVE,
+                    ssi=self.getname()),
                 min_distance_mm=1000000,
                 max_distance_mm=None),
             expect_discovery=False)
-    self.run_discovery_update(p_dut, s_dut, p_disc_id, s_disc_id,
-            p_config=None, # no updates
-            s_config=autils.create_discovery_config(self.SERVICE_NAME,
-                                               aconsts.SUBSCRIBE_TYPE_PASSIVE,
-                                               ssi=self.getname()),
+        self.run_discovery_update(
+            p_dut,
+            s_dut,
+            p_disc_id,
+            s_disc_id,
+            p_config=None,  # no updates
+            s_config=autils.create_discovery_config(
+                self.SERVICE_NAME,
+                aconsts.SUBSCRIBE_TYPE_PASSIVE,
+                ssi=self.getname()),
             expect_discovery=True,
             expect_range=False)
 
-  @test_tracker_info(uuid="bbaac63b-000c-415f-bf19-0906f04031cd")
-  def test_ranged_updated_discovery_solicited_active_oor_to_ir(self):
-    """Verify discovery with ranging operation with updated configuration:
+    @test_tracker_info(uuid="bbaac63b-000c-415f-bf19-0906f04031cd")
+    def test_ranged_updated_discovery_solicited_active_oor_to_ir(self):
+        """Verify discovery with ranging operation with updated configuration:
     - Solicited Publish/Active Subscribe
     - Publisher enables ranging
     - Subscriber:
@@ -827,22 +921,27 @@
 
     Expect: discovery + ranging after update
     """
-    (p_dut, s_dut, p_disc_id,
-     s_disc_id) = self.run_discovery_prange_sminmax_outofrange(False)
-    self.run_discovery_update(p_dut, s_dut, p_disc_id, s_disc_id,
-        p_config=None, # no updates
-        s_config=autils.add_ranging_to_sub(
-            autils.create_discovery_config(self.SERVICE_NAME,
-                                           aconsts.SUBSCRIBE_TYPE_ACTIVE,
-                                           ssi=self.getname()),
-            min_distance_mm=0,
-            max_distance_mm=1000000),
-        expect_discovery=True,
-        expect_range=True)
+        (p_dut, s_dut, p_disc_id,
+         s_disc_id) = self.run_discovery_prange_sminmax_outofrange(False)
+        self.run_discovery_update(
+            p_dut,
+            s_dut,
+            p_disc_id,
+            s_disc_id,
+            p_config=None,  # no updates
+            s_config=autils.add_ranging_to_sub(
+                autils.create_discovery_config(
+                    self.SERVICE_NAME,
+                    aconsts.SUBSCRIBE_TYPE_ACTIVE,
+                    ssi=self.getname()),
+                min_distance_mm=0,
+                max_distance_mm=1000000),
+            expect_discovery=True,
+            expect_range=True)
 
-  @test_tracker_info(uuid="c385b361-7955-4f34-9109-8d8ca81cb4cc")
-  def test_ranged_updated_discovery_solicited_active_pub_unrange(self):
-    """Verify discovery with ranging operation with updated configuration:
+    @test_tracker_info(uuid="c385b361-7955-4f34-9109-8d8ca81cb4cc")
+    def test_ranged_updated_discovery_solicited_active_pub_unrange(self):
+        """Verify discovery with ranging operation with updated configuration:
     - Solicited Publish/Active Subscribe
     - Publisher enables ranging
     - Subscriber: Ranging enabled, min/max such that out of range (min=large,
@@ -851,19 +950,24 @@
 
     Expect: discovery w/o ranging after update
     """
-    (p_dut, s_dut, p_disc_id,
-     s_disc_id) = self.run_discovery_prange_sminmax_outofrange(False)
-    self.run_discovery_update(p_dut, s_dut, p_disc_id, s_disc_id,
-        p_config=autils.create_discovery_config(self.SERVICE_NAME,
-                                                 aconsts.PUBLISH_TYPE_SOLICITED,
-                                                 ssi=self.getname()),
-        s_config=None, # no updates
-        expect_discovery=True,
-        expect_range=False)
+        (p_dut, s_dut, p_disc_id,
+         s_disc_id) = self.run_discovery_prange_sminmax_outofrange(False)
+        self.run_discovery_update(
+            p_dut,
+            s_dut,
+            p_disc_id,
+            s_disc_id,
+            p_config=autils.create_discovery_config(
+                self.SERVICE_NAME,
+                aconsts.PUBLISH_TYPE_SOLICITED,
+                ssi=self.getname()),
+            s_config=None,  # no updates
+            expect_discovery=True,
+            expect_range=False)
 
-  @test_tracker_info(uuid="ec5120ea-77ec-48c6-8820-48b82ad3dfd4")
-  def test_ranged_updated_discovery_solicited_active_sub_unrange(self):
-    """Verify discovery with ranging operation with updated configuration:
+    @test_tracker_info(uuid="ec5120ea-77ec-48c6-8820-48b82ad3dfd4")
+    def test_ranged_updated_discovery_solicited_active_sub_unrange(self):
+        """Verify discovery with ranging operation with updated configuration:
     - Solicited Publish/Active Subscribe
     - Publisher enables ranging
     - Subscriber:
@@ -873,19 +977,24 @@
 
     Expect: discovery w/o ranging after update
     """
-    (p_dut, s_dut, p_disc_id,
-     s_disc_id) = self.run_discovery_prange_sminmax_outofrange(False)
-    self.run_discovery_update(p_dut, s_dut, p_disc_id, s_disc_id,
-        p_config=None, # no updates
-        s_config=autils.create_discovery_config(self.SERVICE_NAME,
-                                                 aconsts.SUBSCRIBE_TYPE_ACTIVE,
-                                                 ssi=self.getname()),
-        expect_discovery=True,
-        expect_range=False)
+        (p_dut, s_dut, p_disc_id,
+         s_disc_id) = self.run_discovery_prange_sminmax_outofrange(False)
+        self.run_discovery_update(
+            p_dut,
+            s_dut,
+            p_disc_id,
+            s_disc_id,
+            p_config=None,  # no updates
+            s_config=autils.create_discovery_config(
+                self.SERVICE_NAME,
+                aconsts.SUBSCRIBE_TYPE_ACTIVE,
+                ssi=self.getname()),
+            expect_discovery=True,
+            expect_range=False)
 
-  @test_tracker_info(uuid="6231cb42-91e4-48d3-b9db-b37efbe8537c")
-  def test_ranged_updated_discovery_solicited_active_sub_oor(self):
-    """Verify discovery with ranging operation with updated configuration:
+    @test_tracker_info(uuid="6231cb42-91e4-48d3-b9db-b37efbe8537c")
+    def test_ranged_updated_discovery_solicited_active_sub_oor(self):
+        """Verify discovery with ranging operation with updated configuration:
     - Solicited Publish/Active Subscribe
     - Publisher enables ranging
     - Subscriber:
@@ -895,21 +1004,26 @@
 
     Expect: no discovery after update
     """
-    (p_dut, s_dut, p_disc_id,
-     s_disc_id) = self.run_discovery_prange_sminmax_outofrange(False)
-    self.run_discovery_update(p_dut, s_dut, p_disc_id, s_disc_id,
-        p_config=None, # no updates
-        s_config=autils.add_ranging_to_sub(
-            autils.create_discovery_config(self.SERVICE_NAME,
-                                           aconsts.SUBSCRIBE_TYPE_ACTIVE,
-                                           ssi=self.getname()),
-            min_distance_mm=100000,
-            max_distance_mm=100001),
-        expect_discovery=False)
+        (p_dut, s_dut, p_disc_id,
+         s_disc_id) = self.run_discovery_prange_sminmax_outofrange(False)
+        self.run_discovery_update(
+            p_dut,
+            s_dut,
+            p_disc_id,
+            s_disc_id,
+            p_config=None,  # no updates
+            s_config=autils.add_ranging_to_sub(
+                autils.create_discovery_config(
+                    self.SERVICE_NAME,
+                    aconsts.SUBSCRIBE_TYPE_ACTIVE,
+                    ssi=self.getname()),
+                min_distance_mm=100000,
+                max_distance_mm=100001),
+            expect_discovery=False)
 
-  @test_tracker_info(uuid="ec999420-6a50-455e-b624-f4c9b4cb7ea5")
-  def test_ranged_updated_discovery_solicited_active_pub_same(self):
-    """Verify discovery with ranging operation with updated configuration:
+    @test_tracker_info(uuid="ec999420-6a50-455e-b624-f4c9b4cb7ea5")
+    def test_ranged_updated_discovery_solicited_active_pub_same(self):
+        """Verify discovery with ranging operation with updated configuration:
     - Solicited Publish/Active Subscribe
     - Publisher enables ranging
     - Subscriber: Ranging enabled, min/max such that out of range (min=large,
@@ -918,20 +1032,25 @@
 
     Expect: no discovery after update
     """
-    (p_dut, s_dut, p_disc_id,
-     s_disc_id) = self.run_discovery_prange_sminmax_outofrange(False)
-    self.run_discovery_update(p_dut, s_dut, p_disc_id, s_disc_id,
-        p_config=autils.add_ranging_to_pub(
-            autils.create_discovery_config(self.SERVICE_NAME,
-                                           aconsts.PUBLISH_TYPE_SOLICITED,
-                                           ssi=self.getname()),
-            enable_ranging=True),
-        s_config=None, # no updates
-        expect_discovery=False)
+        (p_dut, s_dut, p_disc_id,
+         s_disc_id) = self.run_discovery_prange_sminmax_outofrange(False)
+        self.run_discovery_update(
+            p_dut,
+            s_dut,
+            p_disc_id,
+            s_disc_id,
+            p_config=autils.add_ranging_to_pub(
+                autils.create_discovery_config(
+                    self.SERVICE_NAME,
+                    aconsts.PUBLISH_TYPE_SOLICITED,
+                    ssi=self.getname()),
+                enable_ranging=True),
+            s_config=None,  # no updates
+            expect_discovery=False)
 
-  @test_tracker_info(uuid="ec6ca57b-f115-4516-813a-4572b930c8d3")
-  def test_ranged_updated_discovery_solicited_active_multi_step(self):
-    """Verify discovery with ranging operation with updated configuration:
+    @test_tracker_info(uuid="ec6ca57b-f115-4516-813a-4572b930c8d3")
+    def test_ranged_updated_discovery_solicited_active_multi_step(self):
+        """Verify discovery with ranging operation with updated configuration:
     - Unsolicited Publish/Passive Subscribe
     - Publisher enables ranging
     - Subscriber: Ranging enabled, min/max such that out of range (min=large,
@@ -945,40 +1064,55 @@
     - Reconfigured to: Ranging disabled
       - Expect: discovery without ranging
     """
-    (p_dut, s_dut, p_disc_id,
-     s_disc_id) = self.run_discovery_prange_sminmax_outofrange(True)
-    self.run_discovery_update(p_dut, s_dut, p_disc_id, s_disc_id,
-            p_config=None, # no updates
+        (p_dut, s_dut, p_disc_id,
+         s_disc_id) = self.run_discovery_prange_sminmax_outofrange(True)
+        self.run_discovery_update(
+            p_dut,
+            s_dut,
+            p_disc_id,
+            s_disc_id,
+            p_config=None,  # no updates
             s_config=autils.add_ranging_to_sub(
-                autils.create_discovery_config(self.SERVICE_NAME,
-                                               aconsts.SUBSCRIBE_TYPE_ACTIVE,
-                                               ssi=self.getname()),
+                autils.create_discovery_config(
+                    self.SERVICE_NAME,
+                    aconsts.SUBSCRIBE_TYPE_ACTIVE,
+                    ssi=self.getname()),
                 min_distance_mm=0,
                 max_distance_mm=None),
             expect_discovery=True,
             expect_range=True)
-    self.run_discovery_update(p_dut, s_dut, p_disc_id, s_disc_id,
-            p_config=None, # no updates
+        self.run_discovery_update(
+            p_dut,
+            s_dut,
+            p_disc_id,
+            s_disc_id,
+            p_config=None,  # no updates
             s_config=autils.add_ranging_to_sub(
-                autils.create_discovery_config(self.SERVICE_NAME,
-                                               aconsts.SUBSCRIBE_TYPE_ACTIVE,
-                                               ssi=self.getname()),
+                autils.create_discovery_config(
+                    self.SERVICE_NAME,
+                    aconsts.SUBSCRIBE_TYPE_ACTIVE,
+                    ssi=self.getname()),
                 min_distance_mm=1000000,
                 max_distance_mm=None),
             expect_discovery=False)
-    self.run_discovery_update(p_dut, s_dut, p_disc_id, s_disc_id,
-            p_config=None, # no updates
-            s_config=autils.create_discovery_config(self.SERVICE_NAME,
-                                                aconsts.SUBSCRIBE_TYPE_ACTIVE,
-                                                ssi=self.getname()),
+        self.run_discovery_update(
+            p_dut,
+            s_dut,
+            p_disc_id,
+            s_disc_id,
+            p_config=None,  # no updates
+            s_config=autils.create_discovery_config(
+                self.SERVICE_NAME,
+                aconsts.SUBSCRIBE_TYPE_ACTIVE,
+                ssi=self.getname()),
             expect_discovery=True,
             expect_range=False)
 
-  #########################################################################
+    #########################################################################
 
-  @test_tracker_info(uuid="6edc47ab-7300-4bff-b7dd-5de83f58928a")
-  def test_ranged_discovery_multi_session(self):
-    """Verify behavior with multiple concurrent discovery session with different
+    @test_tracker_info(uuid="6edc47ab-7300-4bff-b7dd-5de83f58928a")
+    def test_ranged_discovery_multi_session(self):
+        """Verify behavior with multiple concurrent discovery session with different
     configurations:
 
     Device A (Publisher):
@@ -992,118 +1126,140 @@
       Subscriber CC: ranging disabled -> match w/o range
       Subscriber DD: ranging out-of-range -> match w/o range
     """
-    p_dut = self.android_devices[0]
-    p_dut.pretty_name = "Publisher"
-    s_dut = self.android_devices[1]
-    s_dut.pretty_name = "Subscriber"
+        p_dut = self.android_devices[0]
+        p_dut.pretty_name = "Publisher"
+        s_dut = self.android_devices[1]
+        s_dut.pretty_name = "Subscriber"
 
-    # Publisher+Subscriber: attach and wait for confirmation
-    p_id = p_dut.droid.wifiAwareAttach(False)
-    autils.wait_for_event(p_dut, aconsts.EVENT_CB_ON_ATTACHED)
-    time.sleep(self.device_startup_offset)
-    s_id = s_dut.droid.wifiAwareAttach(False)
-    autils.wait_for_event(s_dut, aconsts.EVENT_CB_ON_ATTACHED)
+        # Publisher+Subscriber: attach and wait for confirmation
+        p_id = p_dut.droid.wifiAwareAttach(False)
+        autils.wait_for_event(p_dut, aconsts.EVENT_CB_ON_ATTACHED)
+        time.sleep(self.device_startup_offset)
+        s_id = s_dut.droid.wifiAwareAttach(False)
+        autils.wait_for_event(s_dut, aconsts.EVENT_CB_ON_ATTACHED)
 
-    # Subscriber: start sessions
-    aa_s_disc_id = s_dut.droid.wifiAwareSubscribe(
-        s_id,
-        autils.add_ranging_to_sub(
-            autils.create_discovery_config("AA",
-                                           aconsts.SUBSCRIBE_TYPE_PASSIVE),
-            min_distance_mm=1000000, max_distance_mm=1000001),
-        True)
-    bb_s_disc_id = s_dut.droid.wifiAwareSubscribe(
-        s_id,
-        autils.add_ranging_to_sub(
-            autils.create_discovery_config("BB",
-                                           aconsts.SUBSCRIBE_TYPE_PASSIVE),
-            min_distance_mm=0, max_distance_mm=1000000),
-        True)
-    cc_s_disc_id = s_dut.droid.wifiAwareSubscribe(
-        s_id,
-        autils.create_discovery_config("CC", aconsts.SUBSCRIBE_TYPE_PASSIVE),
-        True)
-    dd_s_disc_id = s_dut.droid.wifiAwareSubscribe(
-        s_id,
-        autils.add_ranging_to_sub(
-            autils.create_discovery_config("DD",
-                                           aconsts.SUBSCRIBE_TYPE_PASSIVE),
-            min_distance_mm=1000000, max_distance_mm=1000001),
-        True)
+        # Subscriber: start sessions
+        aa_s_disc_id = s_dut.droid.wifiAwareSubscribe(
+            s_id,
+            autils.add_ranging_to_sub(
+                autils.create_discovery_config("AA",
+                                               aconsts.SUBSCRIBE_TYPE_PASSIVE),
+                min_distance_mm=1000000,
+                max_distance_mm=1000001), True)
+        bb_s_disc_id = s_dut.droid.wifiAwareSubscribe(
+            s_id,
+            autils.add_ranging_to_sub(
+                autils.create_discovery_config("BB",
+                                               aconsts.SUBSCRIBE_TYPE_PASSIVE),
+                min_distance_mm=0,
+                max_distance_mm=1000000), True)
+        cc_s_disc_id = s_dut.droid.wifiAwareSubscribe(
+            s_id,
+            autils.create_discovery_config(
+                "CC", aconsts.SUBSCRIBE_TYPE_PASSIVE), True)
+        dd_s_disc_id = s_dut.droid.wifiAwareSubscribe(
+            s_id,
+            autils.add_ranging_to_sub(
+                autils.create_discovery_config("DD",
+                                               aconsts.SUBSCRIBE_TYPE_PASSIVE),
+                min_distance_mm=1000000,
+                max_distance_mm=1000001), True)
 
-    autils.wait_for_event(s_dut, autils.decorate_event(
-      aconsts.SESSION_CB_ON_SUBSCRIBE_STARTED, aa_s_disc_id))
-    autils.wait_for_event(s_dut, autils.decorate_event(
-      aconsts.SESSION_CB_ON_SUBSCRIBE_STARTED, bb_s_disc_id))
-    autils.wait_for_event(s_dut, autils.decorate_event(
-      aconsts.SESSION_CB_ON_SUBSCRIBE_STARTED, cc_s_disc_id))
-    autils.wait_for_event(s_dut, autils.decorate_event(
-      aconsts.SESSION_CB_ON_SUBSCRIBE_STARTED, dd_s_disc_id))
+        autils.wait_for_event(
+            s_dut,
+            autils.decorate_event(aconsts.SESSION_CB_ON_SUBSCRIBE_STARTED,
+                                  aa_s_disc_id))
+        autils.wait_for_event(
+            s_dut,
+            autils.decorate_event(aconsts.SESSION_CB_ON_SUBSCRIBE_STARTED,
+                                  bb_s_disc_id))
+        autils.wait_for_event(
+            s_dut,
+            autils.decorate_event(aconsts.SESSION_CB_ON_SUBSCRIBE_STARTED,
+                                  cc_s_disc_id))
+        autils.wait_for_event(
+            s_dut,
+            autils.decorate_event(aconsts.SESSION_CB_ON_SUBSCRIBE_STARTED,
+                                  dd_s_disc_id))
 
-    # Publisher: start sessions
-    aa_p_disc_id = p_dut.droid.wifiAwarePublish(
-        p_id,
-        autils.add_ranging_to_pub(
-            autils.create_discovery_config("AA",
-                                           aconsts.PUBLISH_TYPE_UNSOLICITED),
-            enable_ranging=True),
-        True)
-    bb_p_disc_id = p_dut.droid.wifiAwarePublish(
-        p_id,
-        autils.add_ranging_to_pub(
-            autils.create_discovery_config("BB",
-                                           aconsts.PUBLISH_TYPE_UNSOLICITED),
-            enable_ranging=True),
-        True)
-    cc_p_disc_id = p_dut.droid.wifiAwarePublish(
-        p_id,
-        autils.add_ranging_to_pub(
-            autils.create_discovery_config("CC",
-                                           aconsts.PUBLISH_TYPE_UNSOLICITED),
-            enable_ranging=True),
-        True)
-    dd_p_disc_id = p_dut.droid.wifiAwarePublish(
-        p_id,
-        autils.create_discovery_config("DD", aconsts.PUBLISH_TYPE_UNSOLICITED),
-        True)
+        # Publisher: start sessions
+        aa_p_disc_id = p_dut.droid.wifiAwarePublish(
+            p_id,
+            autils.add_ranging_to_pub(
+                autils.create_discovery_config(
+                    "AA", aconsts.PUBLISH_TYPE_UNSOLICITED),
+                enable_ranging=True), True)
+        bb_p_disc_id = p_dut.droid.wifiAwarePublish(
+            p_id,
+            autils.add_ranging_to_pub(
+                autils.create_discovery_config(
+                    "BB", aconsts.PUBLISH_TYPE_UNSOLICITED),
+                enable_ranging=True), True)
+        cc_p_disc_id = p_dut.droid.wifiAwarePublish(
+            p_id,
+            autils.add_ranging_to_pub(
+                autils.create_discovery_config(
+                    "CC", aconsts.PUBLISH_TYPE_UNSOLICITED),
+                enable_ranging=True), True)
+        dd_p_disc_id = p_dut.droid.wifiAwarePublish(
+            p_id,
+            autils.create_discovery_config(
+                "DD", aconsts.PUBLISH_TYPE_UNSOLICITED), True)
 
-    autils.wait_for_event(p_dut, autils.decorate_event(
-        aconsts.SESSION_CB_ON_PUBLISH_STARTED, aa_p_disc_id))
-    autils.wait_for_event(p_dut, autils.decorate_event(
-        aconsts.SESSION_CB_ON_PUBLISH_STARTED, bb_p_disc_id))
-    autils.wait_for_event(p_dut, autils.decorate_event(
-        aconsts.SESSION_CB_ON_PUBLISH_STARTED, cc_p_disc_id))
-    autils.wait_for_event(p_dut, autils.decorate_event(
-        aconsts.SESSION_CB_ON_PUBLISH_STARTED, dd_p_disc_id))
+        autils.wait_for_event(
+            p_dut,
+            autils.decorate_event(aconsts.SESSION_CB_ON_PUBLISH_STARTED,
+                                  aa_p_disc_id))
+        autils.wait_for_event(
+            p_dut,
+            autils.decorate_event(aconsts.SESSION_CB_ON_PUBLISH_STARTED,
+                                  bb_p_disc_id))
+        autils.wait_for_event(
+            p_dut,
+            autils.decorate_event(aconsts.SESSION_CB_ON_PUBLISH_STARTED,
+                                  cc_p_disc_id))
+        autils.wait_for_event(
+            p_dut,
+            autils.decorate_event(aconsts.SESSION_CB_ON_PUBLISH_STARTED,
+                                  dd_p_disc_id))
 
-    # Expected and unexpected service discovery
-    event = autils.wait_for_event(s_dut, autils.decorate_event(
-      aconsts.SESSION_CB_ON_SERVICE_DISCOVERED, bb_s_disc_id))
-    asserts.assert_true(aconsts.SESSION_CB_KEY_DISTANCE_MM in event["data"],
-                        "Discovery with ranging for BB expected!")
-    event = autils.wait_for_event(s_dut, autils.decorate_event(
-      aconsts.SESSION_CB_ON_SERVICE_DISCOVERED, cc_s_disc_id))
-    asserts.assert_false(
-        aconsts.SESSION_CB_KEY_DISTANCE_MM in event["data"],
-        "Discovery with ranging for CC NOT expected!")
-    event = autils.wait_for_event(s_dut, autils.decorate_event(
-      aconsts.SESSION_CB_ON_SERVICE_DISCOVERED, dd_s_disc_id))
-    asserts.assert_false(
-        aconsts.SESSION_CB_KEY_DISTANCE_MM in event["data"],
-        "Discovery with ranging for DD NOT expected!")
-    autils.fail_on_event(s_dut, autils.decorate_event(
-      aconsts.SESSION_CB_ON_SERVICE_DISCOVERED, aa_s_disc_id))
+        # Expected and unexpected service discovery
+        event = autils.wait_for_event(
+            s_dut,
+            autils.decorate_event(aconsts.SESSION_CB_ON_SERVICE_DISCOVERED,
+                                  bb_s_disc_id))
+        asserts.assert_true(
+            aconsts.SESSION_CB_KEY_DISTANCE_MM in event["data"],
+            "Discovery with ranging for BB expected!")
+        event = autils.wait_for_event(
+            s_dut,
+            autils.decorate_event(aconsts.SESSION_CB_ON_SERVICE_DISCOVERED,
+                                  cc_s_disc_id))
+        asserts.assert_false(
+            aconsts.SESSION_CB_KEY_DISTANCE_MM in event["data"],
+            "Discovery with ranging for CC NOT expected!")
+        event = autils.wait_for_event(
+            s_dut,
+            autils.decorate_event(aconsts.SESSION_CB_ON_SERVICE_DISCOVERED,
+                                  dd_s_disc_id))
+        asserts.assert_false(
+            aconsts.SESSION_CB_KEY_DISTANCE_MM in event["data"],
+            "Discovery with ranging for DD NOT expected!")
+        autils.fail_on_event(
+            s_dut,
+            autils.decorate_event(aconsts.SESSION_CB_ON_SERVICE_DISCOVERED,
+                                  aa_s_disc_id))
 
-    # (single) sleep for timeout period and then verify that no further events
-    time.sleep(autils.EVENT_TIMEOUT)
-    autils.verify_no_more_events(p_dut, timeout=0)
-    autils.verify_no_more_events(s_dut, timeout=0)
+        # (single) sleep for timeout period and then verify that no further events
+        time.sleep(autils.EVENT_TIMEOUT)
+        autils.verify_no_more_events(p_dut, timeout=0)
+        autils.verify_no_more_events(s_dut, timeout=0)
 
-  #########################################################################
+    #########################################################################
 
-  @test_tracker_info(uuid="deede47f-a54c-46d9-88bb-f4482fbd8470")
-  def test_ndp_concurrency(self):
-    """Verify the behavior of Wi-Fi Aware Ranging whenever an NDP is created -
+    @test_tracker_info(uuid="deede47f-a54c-46d9-88bb-f4482fbd8470")
+    def test_ndp_concurrency(self):
+        """Verify the behavior of Wi-Fi Aware Ranging whenever an NDP is created -
     for those devices that have a concurrency limitation that does not allow
     Aware Ranging, whether direct or as part of discovery.
 
@@ -1135,185 +1291,276 @@
       Update configuration to be in-range
       Verify that get match with ranging information
     """
-    p_dut = self.android_devices[0]
-    p_dut.pretty_name = "Publisher"
-    s_dut = self.android_devices[1]
-    s_dut.pretty_name = "Subscriber"
+        p_dut = self.android_devices[0]
+        p_dut.pretty_name = "Publisher"
+        s_dut = self.android_devices[1]
+        s_dut.pretty_name = "Subscriber"
 
-    # Publisher+Subscriber: attach and wait for confirmation
-    p_id = p_dut.droid.wifiAwareAttach(False)
-    autils.wait_for_event(p_dut, aconsts.EVENT_CB_ON_ATTACHED)
-    time.sleep(self.device_startup_offset)
-    s_id = s_dut.droid.wifiAwareAttach(False)
-    autils.wait_for_event(s_dut, aconsts.EVENT_CB_ON_ATTACHED)
+        # Publisher+Subscriber: attach and wait for confirmation
+        p_id = p_dut.droid.wifiAwareAttach(False)
+        autils.wait_for_event(p_dut, aconsts.EVENT_CB_ON_ATTACHED)
+        time.sleep(self.device_startup_offset)
+        s_id = s_dut.droid.wifiAwareAttach(False)
+        autils.wait_for_event(s_dut, aconsts.EVENT_CB_ON_ATTACHED)
 
-    # Publisher: AA w/o ranging, BB w/ ranging, CC w/ ranging, DD w/ ranging
-    aa_p_id = p_dut.droid.wifiAwarePublish(p_id,
-        autils.create_discovery_config("AA", aconsts.PUBLISH_TYPE_SOLICITED),
-                                           True)
-    autils.wait_for_event(p_dut, autils.decorate_event(
-        aconsts.SESSION_CB_ON_PUBLISH_STARTED, aa_p_id))
-    bb_p_id = p_dut.droid.wifiAwarePublish(p_id, autils.add_ranging_to_pub(
-        autils.create_discovery_config("BB", aconsts.PUBLISH_TYPE_UNSOLICITED),
-        enable_ranging=True), True)
-    autils.wait_for_event(p_dut, autils.decorate_event(
-        aconsts.SESSION_CB_ON_PUBLISH_STARTED, bb_p_id))
-    cc_p_id = p_dut.droid.wifiAwarePublish(p_id, autils.add_ranging_to_pub(
-      autils.create_discovery_config("CC", aconsts.PUBLISH_TYPE_UNSOLICITED),
-      enable_ranging=True), True)
-    autils.wait_for_event(p_dut, autils.decorate_event(
-        aconsts.SESSION_CB_ON_PUBLISH_STARTED, cc_p_id))
-    dd_p_id = p_dut.droid.wifiAwarePublish(p_id, autils.add_ranging_to_pub(
-        autils.create_discovery_config("DD", aconsts.PUBLISH_TYPE_UNSOLICITED),
-        enable_ranging=True), True)
-    autils.wait_for_event(p_dut, autils.decorate_event(
-        aconsts.SESSION_CB_ON_PUBLISH_STARTED, dd_p_id))
+        # Publisher: AA w/o ranging, BB w/ ranging, CC w/ ranging, DD w/ ranging
+        aa_p_id = p_dut.droid.wifiAwarePublish(
+            p_id,
+            autils.create_discovery_config(
+                "AA", aconsts.PUBLISH_TYPE_SOLICITED), True)
+        autils.wait_for_event(
+            p_dut,
+            autils.decorate_event(aconsts.SESSION_CB_ON_PUBLISH_STARTED,
+                                  aa_p_id))
+        bb_p_id = p_dut.droid.wifiAwarePublish(
+            p_id,
+            autils.add_ranging_to_pub(
+                autils.create_discovery_config(
+                    "BB", aconsts.PUBLISH_TYPE_UNSOLICITED),
+                enable_ranging=True), True)
+        autils.wait_for_event(
+            p_dut,
+            autils.decorate_event(aconsts.SESSION_CB_ON_PUBLISH_STARTED,
+                                  bb_p_id))
+        cc_p_id = p_dut.droid.wifiAwarePublish(
+            p_id,
+            autils.add_ranging_to_pub(
+                autils.create_discovery_config(
+                    "CC", aconsts.PUBLISH_TYPE_UNSOLICITED),
+                enable_ranging=True), True)
+        autils.wait_for_event(
+            p_dut,
+            autils.decorate_event(aconsts.SESSION_CB_ON_PUBLISH_STARTED,
+                                  cc_p_id))
+        dd_p_id = p_dut.droid.wifiAwarePublish(
+            p_id,
+            autils.add_ranging_to_pub(
+                autils.create_discovery_config(
+                    "DD", aconsts.PUBLISH_TYPE_UNSOLICITED),
+                enable_ranging=True), True)
+        autils.wait_for_event(
+            p_dut,
+            autils.decorate_event(aconsts.SESSION_CB_ON_PUBLISH_STARTED,
+                                  dd_p_id))
 
-    # Subscriber: AA w/o ranging, BB w/ranging out-of-range,
-    #             DD w /ranging in-range
-    aa_s_id = s_dut.droid.wifiAwareSubscribe(s_id,
-        autils.create_discovery_config("AA", aconsts.SUBSCRIBE_TYPE_ACTIVE),
-                                             True)
-    autils.wait_for_event(s_dut, autils.decorate_event(
-        aconsts.SESSION_CB_ON_SUBSCRIBE_STARTED, aa_s_id))
-    bb_s_id = s_dut.droid.wifiAwareSubscribe(s_id, autils.add_ranging_to_sub(
-      autils.create_discovery_config("BB", aconsts.SUBSCRIBE_TYPE_PASSIVE),
-      min_distance_mm=1000000, max_distance_mm=1000001), True)
-    autils.wait_for_event(s_dut, autils.decorate_event(
-        aconsts.SESSION_CB_ON_SUBSCRIBE_STARTED, bb_s_id))
-    dd_s_id = s_dut.droid.wifiAwareSubscribe(s_id, autils.add_ranging_to_sub(
-        autils.create_discovery_config("DD", aconsts.SUBSCRIBE_TYPE_PASSIVE),
-        min_distance_mm=None, max_distance_mm=1000000), True)
-    autils.wait_for_event(s_dut, autils.decorate_event(
-        aconsts.SESSION_CB_ON_SUBSCRIBE_STARTED, dd_s_id))
+        # Subscriber: AA w/o ranging, BB w/ranging out-of-range,
+        #             DD w /ranging in-range
+        aa_s_id = s_dut.droid.wifiAwareSubscribe(
+            s_id,
+            autils.create_discovery_config(
+                "AA", aconsts.SUBSCRIBE_TYPE_ACTIVE), True)
+        autils.wait_for_event(
+            s_dut,
+            autils.decorate_event(aconsts.SESSION_CB_ON_SUBSCRIBE_STARTED,
+                                  aa_s_id))
+        bb_s_id = s_dut.droid.wifiAwareSubscribe(
+            s_id,
+            autils.add_ranging_to_sub(
+                autils.create_discovery_config("BB",
+                                               aconsts.SUBSCRIBE_TYPE_PASSIVE),
+                min_distance_mm=1000000,
+                max_distance_mm=1000001), True)
+        autils.wait_for_event(
+            s_dut,
+            autils.decorate_event(aconsts.SESSION_CB_ON_SUBSCRIBE_STARTED,
+                                  bb_s_id))
+        dd_s_id = s_dut.droid.wifiAwareSubscribe(
+            s_id,
+            autils.add_ranging_to_sub(
+                autils.create_discovery_config("DD",
+                                               aconsts.SUBSCRIBE_TYPE_PASSIVE),
+                min_distance_mm=None,
+                max_distance_mm=1000000), True)
+        autils.wait_for_event(
+            s_dut,
+            autils.decorate_event(aconsts.SESSION_CB_ON_SUBSCRIBE_STARTED,
+                                  dd_s_id))
 
-    # verify: AA discovered, BB not discovered, DD discovery w/range
-    event = autils.wait_for_event(s_dut, autils.decorate_event(
-        aconsts.SESSION_CB_ON_SERVICE_DISCOVERED, aa_s_id))
-    asserts.assert_false(aconsts.SESSION_CB_KEY_DISTANCE_MM in event["data"],
-                         "Discovery with ranging for AA NOT expected!")
-    aa_peer_id_on_sub = event['data'][aconsts.SESSION_CB_KEY_PEER_ID]
-    autils.fail_on_event(s_dut, autils.decorate_event(
-        aconsts.SESSION_CB_ON_SERVICE_DISCOVERED, bb_s_id))
-    event = autils.wait_for_event(s_dut, autils.decorate_event(
-        aconsts.SESSION_CB_ON_SERVICE_DISCOVERED, dd_s_id))
-    asserts.assert_true(aconsts.SESSION_CB_KEY_DISTANCE_MM in event["data"],
-                        "Discovery with ranging for DD expected!")
+        # verify: AA discovered, BB not discovered, DD discovery w/range
+        event = autils.wait_for_event(
+            s_dut,
+            autils.decorate_event(aconsts.SESSION_CB_ON_SERVICE_DISCOVERED,
+                                  aa_s_id))
+        asserts.assert_false(
+            aconsts.SESSION_CB_KEY_DISTANCE_MM in event["data"],
+            "Discovery with ranging for AA NOT expected!")
+        aa_peer_id_on_sub = event['data'][aconsts.SESSION_CB_KEY_PEER_ID]
+        autils.fail_on_event(
+            s_dut,
+            autils.decorate_event(aconsts.SESSION_CB_ON_SERVICE_DISCOVERED,
+                                  bb_s_id))
+        event = autils.wait_for_event(
+            s_dut,
+            autils.decorate_event(aconsts.SESSION_CB_ON_SERVICE_DISCOVERED,
+                                  dd_s_id))
+        asserts.assert_true(
+            aconsts.SESSION_CB_KEY_DISTANCE_MM in event["data"],
+            "Discovery with ranging for DD expected!")
 
-    # start NDP in context of AA:
+        # start NDP in context of AA:
 
-    # Publisher: request network (from ANY)
-    p_req_key = autils.request_network(p_dut,
-        p_dut.droid.wifiAwareCreateNetworkSpecifier(aa_p_id, None))
+        # Publisher: request network (from ANY)
+        p_req_key = autils.request_network(
+            p_dut, p_dut.droid.wifiAwareCreateNetworkSpecifier(aa_p_id, None))
 
-    # Subscriber: request network
-    s_req_key = autils.request_network(s_dut,
-        s_dut.droid.wifiAwareCreateNetworkSpecifier(aa_s_id, aa_peer_id_on_sub))
+        # Subscriber: request network
+        s_req_key = autils.request_network(
+            s_dut,
+            s_dut.droid.wifiAwareCreateNetworkSpecifier(
+                aa_s_id, aa_peer_id_on_sub))
 
-    # Publisher & Subscriber: wait for network formation
-    p_net_event = autils.wait_for_event_with_keys(p_dut,
-                                    cconsts.EVENT_NETWORK_CALLBACK,
-                                    autils.EVENT_TIMEOUT, (
-                                    cconsts.NETWORK_CB_KEY_EVENT,
-                                    cconsts.NETWORK_CB_LINK_PROPERTIES_CHANGED),
-                                    (cconsts.NETWORK_CB_KEY_ID,
-                                     p_req_key))
-    s_net_event = autils.wait_for_event_with_keys(s_dut,
-                                    cconsts.EVENT_NETWORK_CALLBACK,
-                                    autils.EVENT_TIMEOUT, (
-                                    cconsts.NETWORK_CB_KEY_EVENT,
-                                    cconsts.NETWORK_CB_LINK_PROPERTIES_CHANGED),
-                                    (cconsts.NETWORK_CB_KEY_ID,
-                                     s_req_key))
+        # Publisher & Subscriber: wait for network formation
+        p_net_event = autils.wait_for_event_with_keys(
+            p_dut, cconsts.EVENT_NETWORK_CALLBACK, autils.EVENT_TIMEOUT,
+            (cconsts.NETWORK_CB_KEY_EVENT,
+             cconsts.NETWORK_CB_LINK_PROPERTIES_CHANGED),
+            (cconsts.NETWORK_CB_KEY_ID, p_req_key))
+        s_net_event = autils.wait_for_event_with_keys(
+            s_dut, cconsts.EVENT_NETWORK_CALLBACK, autils.EVENT_TIMEOUT,
+            (cconsts.NETWORK_CB_KEY_EVENT,
+             cconsts.NETWORK_CB_LINK_PROPERTIES_CHANGED),
+            (cconsts.NETWORK_CB_KEY_ID, s_req_key))
 
-    p_aware_if = p_net_event["data"][cconsts.NETWORK_CB_KEY_INTERFACE_NAME]
-    s_aware_if = s_net_event["data"][cconsts.NETWORK_CB_KEY_INTERFACE_NAME]
+        p_aware_if = p_net_event["data"][cconsts.NETWORK_CB_KEY_INTERFACE_NAME]
+        s_aware_if = s_net_event["data"][cconsts.NETWORK_CB_KEY_INTERFACE_NAME]
 
-    p_ipv6 = p_dut.droid.connectivityGetLinkLocalIpv6Address(p_aware_if).split(
-        "%")[0]
-    s_ipv6 = s_dut.droid.connectivityGetLinkLocalIpv6Address(s_aware_if).split(
-        "%")[0]
+        p_ipv6 = p_dut.droid.connectivityGetLinkLocalIpv6Address(
+            p_aware_if).split("%")[0]
+        s_ipv6 = s_dut.droid.connectivityGetLinkLocalIpv6Address(
+            s_aware_if).split("%")[0]
 
-    self.log.info("AA NDP Interface names: P=%s, S=%s", p_aware_if, s_aware_if)
-    self.log.info("AA NDP Interface addresses (IPv6): P=%s, S=%s", p_ipv6,
-                  s_ipv6)
+        self.log.info("AA NDP Interface names: P=%s, S=%s", p_aware_if,
+                      s_aware_if)
+        self.log.info("AA NDP Interface addresses (IPv6): P=%s, S=%s", p_ipv6,
+                      s_ipv6)
 
-    if self.RANGING_NDP_CONCURRENCY_LIMITATION:
-      # Expect BB to now discover w/o ranging
-      event = autils.wait_for_event(s_dut, autils.decorate_event(
-          aconsts.SESSION_CB_ON_SERVICE_DISCOVERED, bb_s_id))
-      asserts.assert_false(aconsts.SESSION_CB_KEY_DISTANCE_MM in event["data"],
-                           "Discovery with ranging for BB NOT expected!")
+        if self.RANGING_NDP_CONCURRENCY_LIMITATION:
+            # Expect BB to now discover w/o ranging
+            event = autils.wait_for_event(
+                s_dut,
+                autils.decorate_event(aconsts.SESSION_CB_ON_SERVICE_DISCOVERED,
+                                      bb_s_id))
+            asserts.assert_false(
+                aconsts.SESSION_CB_KEY_DISTANCE_MM in event["data"],
+                "Discovery with ranging for BB NOT expected!")
 
-    # Publishers: EE, FF w/ ranging
-    ee_p_id = p_dut.droid.wifiAwarePublish(p_id, autils.add_ranging_to_pub(
-        autils.create_discovery_config("EE", aconsts.PUBLISH_TYPE_SOLICITED),
-        enable_ranging=True), True)
-    autils.wait_for_event(p_dut, autils.decorate_event(
-        aconsts.SESSION_CB_ON_PUBLISH_STARTED, ee_p_id))
-    ff_p_id = p_dut.droid.wifiAwarePublish(p_id, autils.add_ranging_to_pub(
-        autils.create_discovery_config("FF", aconsts.PUBLISH_TYPE_UNSOLICITED),
-        enable_ranging=True), True)
-    autils.wait_for_event(p_dut, autils.decorate_event(
-        aconsts.SESSION_CB_ON_PUBLISH_STARTED, ff_p_id))
+        # Publishers: EE, FF w/ ranging
+        ee_p_id = p_dut.droid.wifiAwarePublish(
+            p_id,
+            autils.add_ranging_to_pub(
+                autils.create_discovery_config("EE",
+                                               aconsts.PUBLISH_TYPE_SOLICITED),
+                enable_ranging=True), True)
+        autils.wait_for_event(
+            p_dut,
+            autils.decorate_event(aconsts.SESSION_CB_ON_PUBLISH_STARTED,
+                                  ee_p_id))
+        ff_p_id = p_dut.droid.wifiAwarePublish(
+            p_id,
+            autils.add_ranging_to_pub(
+                autils.create_discovery_config(
+                    "FF", aconsts.PUBLISH_TYPE_UNSOLICITED),
+                enable_ranging=True), True)
+        autils.wait_for_event(
+            p_dut,
+            autils.decorate_event(aconsts.SESSION_CB_ON_PUBLISH_STARTED,
+                                  ff_p_id))
 
-    # Subscribers: EE out-of-range, FF in-range
-    ee_s_id = s_dut.droid.wifiAwareSubscribe(s_id, autils.add_ranging_to_sub(
-        autils.create_discovery_config("EE", aconsts.SUBSCRIBE_TYPE_ACTIVE),
-        min_distance_mm=1000000, max_distance_mm=1000001), True)
-    autils.wait_for_event(s_dut, autils.decorate_event(
-        aconsts.SESSION_CB_ON_SUBSCRIBE_STARTED, ee_s_id))
-    ff_s_id = s_dut.droid.wifiAwareSubscribe(s_id, autils.add_ranging_to_sub(
-        autils.create_discovery_config("FF", aconsts.SUBSCRIBE_TYPE_PASSIVE),
-        min_distance_mm=None, max_distance_mm=1000000), True)
-    autils.wait_for_event(s_dut, autils.decorate_event(
-        aconsts.SESSION_CB_ON_SUBSCRIBE_STARTED, ff_s_id))
+        # Subscribers: EE out-of-range, FF in-range
+        ee_s_id = s_dut.droid.wifiAwareSubscribe(
+            s_id,
+            autils.add_ranging_to_sub(
+                autils.create_discovery_config("EE",
+                                               aconsts.SUBSCRIBE_TYPE_ACTIVE),
+                min_distance_mm=1000000,
+                max_distance_mm=1000001), True)
+        autils.wait_for_event(
+            s_dut,
+            autils.decorate_event(aconsts.SESSION_CB_ON_SUBSCRIBE_STARTED,
+                                  ee_s_id))
+        ff_s_id = s_dut.droid.wifiAwareSubscribe(
+            s_id,
+            autils.add_ranging_to_sub(
+                autils.create_discovery_config("FF",
+                                               aconsts.SUBSCRIBE_TYPE_PASSIVE),
+                min_distance_mm=None,
+                max_distance_mm=1000000), True)
+        autils.wait_for_event(
+            s_dut,
+            autils.decorate_event(aconsts.SESSION_CB_ON_SUBSCRIBE_STARTED,
+                                  ff_s_id))
 
-    if self.RANGING_NDP_CONCURRENCY_LIMITATION:
-      # Expect EE & FF discovery w/o range
-      event = autils.wait_for_event(s_dut, autils.decorate_event(
-          aconsts.SESSION_CB_ON_SERVICE_DISCOVERED, ee_s_id))
-      asserts.assert_false(aconsts.SESSION_CB_KEY_DISTANCE_MM in event["data"],
-                           "Discovery with ranging for EE NOT expected!")
-      event = autils.wait_for_event(s_dut, autils.decorate_event(
-          aconsts.SESSION_CB_ON_SERVICE_DISCOVERED, ff_s_id))
-      asserts.assert_false(aconsts.SESSION_CB_KEY_DISTANCE_MM in event["data"],
-                           "Discovery with ranging for FF NOT expected!")
-    else:
-      event = autils.wait_for_event(s_dut, autils.decorate_event(
-          aconsts.SESSION_CB_ON_SERVICE_DISCOVERED, ff_s_id))
-      asserts.assert_true(aconsts.SESSION_CB_KEY_DISTANCE_MM in event["data"],
-                           "Discovery with ranging for FF expected!")
+        if self.RANGING_NDP_CONCURRENCY_LIMITATION:
+            # Expect EE & FF discovery w/o range
+            event = autils.wait_for_event(
+                s_dut,
+                autils.decorate_event(aconsts.SESSION_CB_ON_SERVICE_DISCOVERED,
+                                      ee_s_id))
+            asserts.assert_false(
+                aconsts.SESSION_CB_KEY_DISTANCE_MM in event["data"],
+                "Discovery with ranging for EE NOT expected!")
+            event = autils.wait_for_event(
+                s_dut,
+                autils.decorate_event(aconsts.SESSION_CB_ON_SERVICE_DISCOVERED,
+                                      ff_s_id))
+            asserts.assert_false(
+                aconsts.SESSION_CB_KEY_DISTANCE_MM in event["data"],
+                "Discovery with ranging for FF NOT expected!")
+        else:
+            event = autils.wait_for_event(
+                s_dut,
+                autils.decorate_event(aconsts.SESSION_CB_ON_SERVICE_DISCOVERED,
+                                      ff_s_id))
+            asserts.assert_true(
+                aconsts.SESSION_CB_KEY_DISTANCE_MM in event["data"],
+                "Discovery with ranging for FF expected!")
 
-    # tear down NDP
-    p_dut.droid.connectivityUnregisterNetworkCallback(p_req_key)
-    s_dut.droid.connectivityUnregisterNetworkCallback(s_req_key)
+        # tear down NDP
+        p_dut.droid.connectivityUnregisterNetworkCallback(p_req_key)
+        s_dut.droid.connectivityUnregisterNetworkCallback(s_req_key)
 
-    time.sleep(5) # give time for NDP termination to finish
+        time.sleep(5)  # give time for NDP termination to finish
 
-    # Subscriber: start CC out-of-range - no discovery expected!
-    cc_s_id = s_dut.droid.wifiAwareSubscribe(s_id, autils.add_ranging_to_sub(
-        autils.create_discovery_config("CC", aconsts.SUBSCRIBE_TYPE_PASSIVE),
-        min_distance_mm=1000000, max_distance_mm=1000001), True)
-    autils.wait_for_event(s_dut, autils.decorate_event(
-        aconsts.SESSION_CB_ON_SUBSCRIBE_STARTED, cc_s_id))
-    autils.fail_on_event(s_dut, autils.decorate_event(
-        aconsts.SESSION_CB_ON_SERVICE_DISCOVERED, cc_s_id))
+        # Subscriber: start CC out-of-range - no discovery expected!
+        cc_s_id = s_dut.droid.wifiAwareSubscribe(
+            s_id,
+            autils.add_ranging_to_sub(
+                autils.create_discovery_config("CC",
+                                               aconsts.SUBSCRIBE_TYPE_PASSIVE),
+                min_distance_mm=1000000,
+                max_distance_mm=1000001), True)
+        autils.wait_for_event(
+            s_dut,
+            autils.decorate_event(aconsts.SESSION_CB_ON_SUBSCRIBE_STARTED,
+                                  cc_s_id))
+        autils.fail_on_event(
+            s_dut,
+            autils.decorate_event(aconsts.SESSION_CB_ON_SERVICE_DISCOVERED,
+                                  cc_s_id))
 
-    # Subscriber: modify CC to in-range - expect discovery w/ range
-    s_dut.droid.wifiAwareUpdateSubscribe(cc_s_id, autils.add_ranging_to_sub(
-        autils.create_discovery_config("CC", aconsts.SUBSCRIBE_TYPE_PASSIVE),
-        min_distance_mm=None, max_distance_mm=1000001))
-    autils.wait_for_event(s_dut, autils.decorate_event(
-        aconsts.SESSION_CB_ON_SESSION_CONFIG_UPDATED, cc_s_id))
-    event = autils.wait_for_event(s_dut, autils.decorate_event(
-        aconsts.SESSION_CB_ON_SERVICE_DISCOVERED, cc_s_id))
-    asserts.assert_true(aconsts.SESSION_CB_KEY_DISTANCE_MM in event["data"],
-                        "Discovery with ranging for CC expected!")
+        # Subscriber: modify CC to in-range - expect discovery w/ range
+        s_dut.droid.wifiAwareUpdateSubscribe(
+            cc_s_id,
+            autils.add_ranging_to_sub(
+                autils.create_discovery_config("CC",
+                                               aconsts.SUBSCRIBE_TYPE_PASSIVE),
+                min_distance_mm=None,
+                max_distance_mm=1000001))
+        autils.wait_for_event(
+            s_dut,
+            autils.decorate_event(aconsts.SESSION_CB_ON_SESSION_CONFIG_UPDATED,
+                                  cc_s_id))
+        event = autils.wait_for_event(
+            s_dut,
+            autils.decorate_event(aconsts.SESSION_CB_ON_SERVICE_DISCOVERED,
+                                  cc_s_id))
+        asserts.assert_true(
+            aconsts.SESSION_CB_KEY_DISTANCE_MM in event["data"],
+            "Discovery with ranging for CC expected!")
 
-  @test_tracker_info(uuid="d94dac91-4090-4c03-a867-6dfac6558ba3")
-  def test_role_concurrency(self):
-    """Verify the behavior of Wi-Fi Aware Ranging (in the context of discovery)
+    @test_tracker_info(uuid="d94dac91-4090-4c03-a867-6dfac6558ba3")
+    def test_role_concurrency(self):
+        """Verify the behavior of Wi-Fi Aware Ranging (in the context of discovery)
      when the device has concurrency limitations which do not permit concurrent
      Initiator and Responder roles on the same device. In such case it is
      expected that normal discovery without ranging is executed AND that ranging
@@ -1345,128 +1592,201 @@
      DUT2: Start Publish FF w/ ranging (solicited)
      DUT1: expect FF w/ ranging information - should finally be back up
      """
-    dut1 = self.android_devices[0]
-    dut1.pretty_name = "DUT1"
-    dut2 = self.android_devices[1]
-    dut2.pretty_name = "DUT2"
+        dut1 = self.android_devices[0]
+        dut1.pretty_name = "DUT1"
+        dut2 = self.android_devices[1]
+        dut2.pretty_name = "DUT2"
 
-    # Publisher+Subscriber: attach and wait for confirmation
-    dut1_id = dut1.droid.wifiAwareAttach(False)
-    autils.wait_for_event(dut1, aconsts.EVENT_CB_ON_ATTACHED)
-    time.sleep(self.device_startup_offset)
-    dut2_id = dut2.droid.wifiAwareAttach(False)
-    autils.wait_for_event(dut2, aconsts.EVENT_CB_ON_ATTACHED)
+        # Publisher+Subscriber: attach and wait for confirmation
+        dut1_id = dut1.droid.wifiAwareAttach(False)
+        autils.wait_for_event(dut1, aconsts.EVENT_CB_ON_ATTACHED)
+        time.sleep(self.device_startup_offset)
+        dut2_id = dut2.droid.wifiAwareAttach(False)
+        autils.wait_for_event(dut2, aconsts.EVENT_CB_ON_ATTACHED)
 
-    # DUT1: initial service bringup
-    aa_p_id = dut1.droid.wifiAwarePublish(dut1_id, autils.add_ranging_to_pub(
-        autils.create_discovery_config("AA", aconsts.PUBLISH_TYPE_UNSOLICITED),
-        enable_ranging=True), True)
-    autils.wait_for_event(dut1, autils.decorate_event(
-        aconsts.SESSION_CB_ON_PUBLISH_STARTED, aa_p_id))
-    bb_s_id = dut1.droid.wifiAwareSubscribe(dut1_id, autils.add_ranging_to_sub(
-        autils.create_discovery_config("BB", aconsts.SUBSCRIBE_TYPE_ACTIVE),
-        min_distance_mm=None, max_distance_mm=1000000), True)
-    autils.wait_for_event(dut1, autils.decorate_event(
-        aconsts.SESSION_CB_ON_SUBSCRIBE_STARTED, bb_s_id))
-    cc_p_id = dut1.droid.wifiAwarePublish(dut1_id, autils.add_ranging_to_pub(
-        autils.create_discovery_config("CC", aconsts.PUBLISH_TYPE_UNSOLICITED),
-        enable_ranging=True), True)
-    autils.wait_for_event(dut1, autils.decorate_event(
-        aconsts.SESSION_CB_ON_PUBLISH_STARTED, cc_p_id))
-    dd_p_id = dut1.droid.wifiAwarePublish(dut1_id,
-      autils.create_discovery_config("DD", aconsts.PUBLISH_TYPE_SOLICITED),
-                                           True)
-    autils.wait_for_event(dut1, autils.decorate_event(
-        aconsts.SESSION_CB_ON_PUBLISH_STARTED, dd_p_id))
-    ee_s_id = dut1.droid.wifiAwareSubscribe(dut1_id, autils.add_ranging_to_sub(
-        autils.create_discovery_config("EE", aconsts.SUBSCRIBE_TYPE_PASSIVE),
-        min_distance_mm=None, max_distance_mm=1000000), True)
-    autils.wait_for_event(dut1, autils.decorate_event(
-        aconsts.SESSION_CB_ON_SUBSCRIBE_STARTED, ee_s_id))
-    ff_s_id = dut1.droid.wifiAwareSubscribe(dut1_id, autils.add_ranging_to_sub(
-        autils.create_discovery_config("FF", aconsts.SUBSCRIBE_TYPE_ACTIVE),
-        min_distance_mm=None, max_distance_mm=1000000), True)
-    autils.wait_for_event(dut1, autils.decorate_event(
-        aconsts.SESSION_CB_ON_SUBSCRIBE_STARTED, ff_s_id))
+        # DUT1: initial service bringup
+        aa_p_id = dut1.droid.wifiAwarePublish(
+            dut1_id,
+            autils.add_ranging_to_pub(
+                autils.create_discovery_config(
+                    "AA", aconsts.PUBLISH_TYPE_UNSOLICITED),
+                enable_ranging=True), True)
+        autils.wait_for_event(
+            dut1,
+            autils.decorate_event(aconsts.SESSION_CB_ON_PUBLISH_STARTED,
+                                  aa_p_id))
+        bb_s_id = dut1.droid.wifiAwareSubscribe(
+            dut1_id,
+            autils.add_ranging_to_sub(
+                autils.create_discovery_config("BB",
+                                               aconsts.SUBSCRIBE_TYPE_ACTIVE),
+                min_distance_mm=None,
+                max_distance_mm=1000000), True)
+        autils.wait_for_event(
+            dut1,
+            autils.decorate_event(aconsts.SESSION_CB_ON_SUBSCRIBE_STARTED,
+                                  bb_s_id))
+        cc_p_id = dut1.droid.wifiAwarePublish(
+            dut1_id,
+            autils.add_ranging_to_pub(
+                autils.create_discovery_config(
+                    "CC", aconsts.PUBLISH_TYPE_UNSOLICITED),
+                enable_ranging=True), True)
+        autils.wait_for_event(
+            dut1,
+            autils.decorate_event(aconsts.SESSION_CB_ON_PUBLISH_STARTED,
+                                  cc_p_id))
+        dd_p_id = dut1.droid.wifiAwarePublish(
+            dut1_id,
+            autils.create_discovery_config(
+                "DD", aconsts.PUBLISH_TYPE_SOLICITED), True)
+        autils.wait_for_event(
+            dut1,
+            autils.decorate_event(aconsts.SESSION_CB_ON_PUBLISH_STARTED,
+                                  dd_p_id))
+        ee_s_id = dut1.droid.wifiAwareSubscribe(
+            dut1_id,
+            autils.add_ranging_to_sub(
+                autils.create_discovery_config("EE",
+                                               aconsts.SUBSCRIBE_TYPE_PASSIVE),
+                min_distance_mm=None,
+                max_distance_mm=1000000), True)
+        autils.wait_for_event(
+            dut1,
+            autils.decorate_event(aconsts.SESSION_CB_ON_SUBSCRIBE_STARTED,
+                                  ee_s_id))
+        ff_s_id = dut1.droid.wifiAwareSubscribe(
+            dut1_id,
+            autils.add_ranging_to_sub(
+                autils.create_discovery_config("FF",
+                                               aconsts.SUBSCRIBE_TYPE_ACTIVE),
+                min_distance_mm=None,
+                max_distance_mm=1000000), True)
+        autils.wait_for_event(
+            dut1,
+            autils.decorate_event(aconsts.SESSION_CB_ON_SUBSCRIBE_STARTED,
+                                  ff_s_id))
 
-    # DUT2: initial service bringup
-    aa_s_id = dut2.droid.wifiAwareSubscribe(dut2_id, autils.add_ranging_to_sub(
-        autils.create_discovery_config("AA", aconsts.SUBSCRIBE_TYPE_PASSIVE),
-        min_distance_mm=None, max_distance_mm=1000000), True)
-    autils.wait_for_event(dut2, autils.decorate_event(
-        aconsts.SESSION_CB_ON_SUBSCRIBE_STARTED, aa_s_id))
-    bb_p_id = dut2.droid.wifiAwarePublish(dut2_id, autils.add_ranging_to_pub(
-        autils.create_discovery_config("BB", aconsts.PUBLISH_TYPE_SOLICITED),
-        enable_ranging=True), True)
-    autils.wait_for_event(dut2, autils.decorate_event(
-        aconsts.SESSION_CB_ON_PUBLISH_STARTED, bb_p_id))
-    dd_s_id = dut2.droid.wifiAwareSubscribe(dut2_id,
-        autils.create_discovery_config("AA", aconsts.SUBSCRIBE_TYPE_ACTIVE),
-        True)
-    autils.wait_for_event(dut2, autils.decorate_event(
-        aconsts.SESSION_CB_ON_SUBSCRIBE_STARTED, dd_s_id))
+        # DUT2: initial service bringup
+        aa_s_id = dut2.droid.wifiAwareSubscribe(
+            dut2_id,
+            autils.add_ranging_to_sub(
+                autils.create_discovery_config("AA",
+                                               aconsts.SUBSCRIBE_TYPE_PASSIVE),
+                min_distance_mm=None,
+                max_distance_mm=1000000), True)
+        autils.wait_for_event(
+            dut2,
+            autils.decorate_event(aconsts.SESSION_CB_ON_SUBSCRIBE_STARTED,
+                                  aa_s_id))
+        bb_p_id = dut2.droid.wifiAwarePublish(
+            dut2_id,
+            autils.add_ranging_to_pub(
+                autils.create_discovery_config("BB",
+                                               aconsts.PUBLISH_TYPE_SOLICITED),
+                enable_ranging=True), True)
+        autils.wait_for_event(
+            dut2,
+            autils.decorate_event(aconsts.SESSION_CB_ON_PUBLISH_STARTED,
+                                  bb_p_id))
+        dd_s_id = dut2.droid.wifiAwareSubscribe(
+            dut2_id,
+            autils.create_discovery_config(
+                "AA", aconsts.SUBSCRIBE_TYPE_ACTIVE), True)
+        autils.wait_for_event(
+            dut2,
+            autils.decorate_event(aconsts.SESSION_CB_ON_SUBSCRIBE_STARTED,
+                                  dd_s_id))
 
-    # Initial set of discovery events for AA, BB, and DD (which are up)
-    event = autils.wait_for_event(dut2, autils.decorate_event(
-        aconsts.SESSION_CB_ON_SERVICE_DISCOVERED, aa_s_id))
-    asserts.assert_true(aconsts.SESSION_CB_KEY_DISTANCE_MM in event["data"],
-                        "Discovery with ranging for AA expected!")
-    event = autils.wait_for_event(dut1, autils.decorate_event(
-        aconsts.SESSION_CB_ON_SERVICE_DISCOVERED, bb_s_id))
-    if self.RANGING_INITIATOR_RESPONDER_CONCURRENCY_LIMITATION:
-      asserts.assert_false(aconsts.SESSION_CB_KEY_DISTANCE_MM in event["data"],
-                           "Discovery with ranging for BB NOT expected!")
-    else:
-      asserts.assert_true(aconsts.SESSION_CB_KEY_DISTANCE_MM in event["data"],
-                           "Discovery with ranging for BB expected!")
-    event = autils.wait_for_event(dut2, autils.decorate_event(
-        aconsts.SESSION_CB_ON_SERVICE_DISCOVERED, dd_s_id))
-    asserts.assert_false(aconsts.SESSION_CB_KEY_DISTANCE_MM in event["data"],
-                         "Discovery with ranging for DD NOT expected!")
+        # Initial set of discovery events for AA, BB, and DD (which are up)
+        event = autils.wait_for_event(
+            dut2,
+            autils.decorate_event(aconsts.SESSION_CB_ON_SERVICE_DISCOVERED,
+                                  aa_s_id))
+        asserts.assert_true(
+            aconsts.SESSION_CB_KEY_DISTANCE_MM in event["data"],
+            "Discovery with ranging for AA expected!")
+        event = autils.wait_for_event(
+            dut1,
+            autils.decorate_event(aconsts.SESSION_CB_ON_SERVICE_DISCOVERED,
+                                  bb_s_id))
+        if self.RANGING_INITIATOR_RESPONDER_CONCURRENCY_LIMITATION:
+            asserts.assert_false(
+                aconsts.SESSION_CB_KEY_DISTANCE_MM in event["data"],
+                "Discovery with ranging for BB NOT expected!")
+        else:
+            asserts.assert_true(
+                aconsts.SESSION_CB_KEY_DISTANCE_MM in event["data"],
+                "Discovery with ranging for BB expected!")
+        event = autils.wait_for_event(
+            dut2,
+            autils.decorate_event(aconsts.SESSION_CB_ON_SERVICE_DISCOVERED,
+                                  dd_s_id))
+        asserts.assert_false(
+            aconsts.SESSION_CB_KEY_DISTANCE_MM in event["data"],
+            "Discovery with ranging for DD NOT expected!")
 
-    # DUT1/DUT2: terminate AA
-    dut1.droid.wifiAwareDestroyDiscoverySession(aa_p_id)
-    dut2.droid.wifiAwareDestroyDiscoverySession(aa_s_id)
+        # DUT1/DUT2: terminate AA
+        dut1.droid.wifiAwareDestroyDiscoverySession(aa_p_id)
+        dut2.droid.wifiAwareDestroyDiscoverySession(aa_s_id)
 
-    time.sleep(5) # guarantee that session terminated (and host recovered?)
+        time.sleep(
+            5)  # guarantee that session terminated (and host recovered?)
 
-    # DUT2: try EE service - ranging still disabled
-    ee_p_id = dut2.droid.wifiAwarePublish(dut2_id, autils.add_ranging_to_pub(
-        autils.create_discovery_config("EE", aconsts.PUBLISH_TYPE_UNSOLICITED),
-        enable_ranging=True), True)
-    autils.wait_for_event(dut2, autils.decorate_event(
-        aconsts.SESSION_CB_ON_PUBLISH_STARTED, ee_p_id))
+        # DUT2: try EE service - ranging still disabled
+        ee_p_id = dut2.droid.wifiAwarePublish(
+            dut2_id,
+            autils.add_ranging_to_pub(
+                autils.create_discovery_config(
+                    "EE", aconsts.PUBLISH_TYPE_UNSOLICITED),
+                enable_ranging=True), True)
+        autils.wait_for_event(
+            dut2,
+            autils.decorate_event(aconsts.SESSION_CB_ON_PUBLISH_STARTED,
+                                  ee_p_id))
 
-    event = autils.wait_for_event(dut1, autils.decorate_event(
-        aconsts.SESSION_CB_ON_SERVICE_DISCOVERED, ee_s_id))
-    if self.RANGING_INITIATOR_RESPONDER_CONCURRENCY_LIMITATION:
-      asserts.assert_false(aconsts.SESSION_CB_KEY_DISTANCE_MM in event["data"],
-                           "Discovery with ranging for EE NOT expected!")
-    else:
-      asserts.assert_true(aconsts.SESSION_CB_KEY_DISTANCE_MM in event["data"],
-                          "Discovery with ranging for EE expected!")
+        event = autils.wait_for_event(
+            dut1,
+            autils.decorate_event(aconsts.SESSION_CB_ON_SERVICE_DISCOVERED,
+                                  ee_s_id))
+        if self.RANGING_INITIATOR_RESPONDER_CONCURRENCY_LIMITATION:
+            asserts.assert_false(
+                aconsts.SESSION_CB_KEY_DISTANCE_MM in event["data"],
+                "Discovery with ranging for EE NOT expected!")
+        else:
+            asserts.assert_true(
+                aconsts.SESSION_CB_KEY_DISTANCE_MM in event["data"],
+                "Discovery with ranging for EE expected!")
 
-    # DUT1: terminate CC - last publish w/ ranging on DUT!
-    dut1.droid.wifiAwareDestroyDiscoverySession(cc_p_id)
+        # DUT1: terminate CC - last publish w/ ranging on DUT!
+        dut1.droid.wifiAwareDestroyDiscoverySession(cc_p_id)
 
-    time.sleep(5) # guarantee that session terminated (and host recovered?)
+        time.sleep(
+            5)  # guarantee that session terminated (and host recovered?)
 
-    # DUT2: try FF service - ranging should now function
-    ff_p_id = dut2.droid.wifiAwarePublish(dut2_id, autils.add_ranging_to_pub(
-        autils.create_discovery_config("FF", aconsts.PUBLISH_TYPE_SOLICITED),
-        enable_ranging=True), True)
-    autils.wait_for_event(dut2, autils.decorate_event(
-        aconsts.SESSION_CB_ON_PUBLISH_STARTED, ff_p_id))
+        # DUT2: try FF service - ranging should now function
+        ff_p_id = dut2.droid.wifiAwarePublish(
+            dut2_id,
+            autils.add_ranging_to_pub(
+                autils.create_discovery_config("FF",
+                                               aconsts.PUBLISH_TYPE_SOLICITED),
+                enable_ranging=True), True)
+        autils.wait_for_event(
+            dut2,
+            autils.decorate_event(aconsts.SESSION_CB_ON_PUBLISH_STARTED,
+                                  ff_p_id))
 
-    event = autils.wait_for_event(dut1, autils.decorate_event(
-        aconsts.SESSION_CB_ON_SERVICE_DISCOVERED, ff_s_id))
-    asserts.assert_true(aconsts.SESSION_CB_KEY_DISTANCE_MM in event["data"],
-                        "Discovery with ranging for FF expected!")
+        event = autils.wait_for_event(
+            dut1,
+            autils.decorate_event(aconsts.SESSION_CB_ON_SERVICE_DISCOVERED,
+                                  ff_s_id))
+        asserts.assert_true(
+            aconsts.SESSION_CB_KEY_DISTANCE_MM in event["data"],
+            "Discovery with ranging for FF expected!")
 
-
-  @test_tracker_info(uuid="6700eab8-a172-43cd-aed3-e6577ce8fd89")
-  def test_discovery_direct_concurrency(self):
-    """Verify the behavior of Wi-Fi Aware Ranging used as part of discovery and
+    @test_tracker_info(uuid="6700eab8-a172-43cd-aed3-e6577ce8fd89")
+    def test_discovery_direct_concurrency(self):
+        """Verify the behavior of Wi-Fi Aware Ranging used as part of discovery and
     as direct ranging to a peer device.
 
     Process:
@@ -1477,91 +1797,136 @@
     - Keep performing direct Ranging in context of YYY
     - Stop direct Ranging and look for XXX to discover
     """
-    dut1 = self.android_devices[0]
-    dut1.pretty_name = "DUT1"
-    dut2 = self.android_devices[1]
-    dut2.pretty_name = "DUT2"
+        dut1 = self.android_devices[0]
+        dut1.pretty_name = "DUT1"
+        dut2 = self.android_devices[1]
+        dut2.pretty_name = "DUT2"
 
-    # DUTs: attach and wait for confirmation
-    dut1_id = dut1.droid.wifiAwareAttach(False)
-    autils.wait_for_event(dut1, aconsts.EVENT_CB_ON_ATTACHED)
-    time.sleep(self.device_startup_offset)
-    dut2_id = dut2.droid.wifiAwareAttach(True)
-    event = autils.wait_for_event(dut2, aconsts.EVENT_CB_ON_IDENTITY_CHANGED)
-    dut2_mac = event['data']['mac']
+        # DUTs: attach and wait for confirmation
+        dut1_id = dut1.droid.wifiAwareAttach(False)
+        autils.wait_for_event(dut1, aconsts.EVENT_CB_ON_ATTACHED)
+        time.sleep(self.device_startup_offset)
+        dut2_id = dut2.droid.wifiAwareAttach(True)
+        event = autils.wait_for_event(dut2,
+                                      aconsts.EVENT_CB_ON_IDENTITY_CHANGED)
+        dut2_mac = event['data']['mac']
 
-    # DUT1: publishers bring-up
-    xxx_p_id = dut1.droid.wifiAwarePublish(dut1_id, autils.add_ranging_to_pub(
-      autils.create_discovery_config("XXX", aconsts.PUBLISH_TYPE_UNSOLICITED),
-      enable_ranging=True), True)
-    autils.wait_for_event(dut1, autils.decorate_event(
-      aconsts.SESSION_CB_ON_PUBLISH_STARTED, xxx_p_id))
-    yyy_p_id = dut1.droid.wifiAwarePublish(dut1_id, autils.add_ranging_to_pub(
-        autils.create_discovery_config("YYY", aconsts.PUBLISH_TYPE_UNSOLICITED),
-        enable_ranging=True), True)
-    autils.wait_for_event(dut1, autils.decorate_event(
-        aconsts.SESSION_CB_ON_PUBLISH_STARTED, yyy_p_id))
+        # DUT1: publishers bring-up
+        xxx_p_id = dut1.droid.wifiAwarePublish(
+            dut1_id,
+            autils.add_ranging_to_pub(
+                autils.create_discovery_config(
+                    "XXX", aconsts.PUBLISH_TYPE_UNSOLICITED),
+                enable_ranging=True), True)
+        autils.wait_for_event(
+            dut1,
+            autils.decorate_event(aconsts.SESSION_CB_ON_PUBLISH_STARTED,
+                                  xxx_p_id))
+        yyy_p_id = dut1.droid.wifiAwarePublish(
+            dut1_id,
+            autils.add_ranging_to_pub(
+                autils.create_discovery_config(
+                    "YYY", aconsts.PUBLISH_TYPE_UNSOLICITED),
+                enable_ranging=True), True)
+        autils.wait_for_event(
+            dut1,
+            autils.decorate_event(aconsts.SESSION_CB_ON_PUBLISH_STARTED,
+                                  yyy_p_id))
 
-    # DUT2: subscribers bring-up
-    xxx_s_id = dut2.droid.wifiAwareSubscribe(dut2_id, autils.add_ranging_to_sub(
-      autils.create_discovery_config("XXX", aconsts.SUBSCRIBE_TYPE_PASSIVE),
-      min_distance_mm=1000000, max_distance_mm=1000001), True)
-    autils.wait_for_event(dut2, autils.decorate_event(
-      aconsts.SESSION_CB_ON_SUBSCRIBE_STARTED, xxx_s_id))
-    yyy_s_id = dut2.droid.wifiAwareSubscribe(dut2_id, autils.add_ranging_to_sub(
-        autils.create_discovery_config("YYY", aconsts.SUBSCRIBE_TYPE_PASSIVE),
-        min_distance_mm=None, max_distance_mm=1000000), True)
-    autils.wait_for_event(dut2, autils.decorate_event(
-        aconsts.SESSION_CB_ON_SUBSCRIBE_STARTED, yyy_s_id))
+        # DUT2: subscribers bring-up
+        xxx_s_id = dut2.droid.wifiAwareSubscribe(
+            dut2_id,
+            autils.add_ranging_to_sub(
+                autils.create_discovery_config("XXX",
+                                               aconsts.SUBSCRIBE_TYPE_PASSIVE),
+                min_distance_mm=1000000,
+                max_distance_mm=1000001), True)
+        autils.wait_for_event(
+            dut2,
+            autils.decorate_event(aconsts.SESSION_CB_ON_SUBSCRIBE_STARTED,
+                                  xxx_s_id))
+        yyy_s_id = dut2.droid.wifiAwareSubscribe(
+            dut2_id,
+            autils.add_ranging_to_sub(
+                autils.create_discovery_config("YYY",
+                                               aconsts.SUBSCRIBE_TYPE_PASSIVE),
+                min_distance_mm=None,
+                max_distance_mm=1000000), True)
+        autils.wait_for_event(
+            dut2,
+            autils.decorate_event(aconsts.SESSION_CB_ON_SUBSCRIBE_STARTED,
+                                  yyy_s_id))
 
-    # Service discovery: YYY (with range info), but no XXX
-    event = autils.wait_for_event(dut2, autils.decorate_event(
-      aconsts.SESSION_CB_ON_SERVICE_DISCOVERED, yyy_s_id))
-    asserts.assert_true(aconsts.SESSION_CB_KEY_DISTANCE_MM in event["data"],
-                        "Discovery with ranging for YYY expected!")
-    yyy_peer_id_on_sub = event['data'][aconsts.SESSION_CB_KEY_PEER_ID]
+        # Service discovery: YYY (with range info), but no XXX
+        event = autils.wait_for_event(
+            dut2,
+            autils.decorate_event(aconsts.SESSION_CB_ON_SERVICE_DISCOVERED,
+                                  yyy_s_id))
+        asserts.assert_true(
+            aconsts.SESSION_CB_KEY_DISTANCE_MM in event["data"],
+            "Discovery with ranging for YYY expected!")
+        yyy_peer_id_on_sub = event['data'][aconsts.SESSION_CB_KEY_PEER_ID]
 
-    autils.fail_on_event(dut2, autils.decorate_event(
-      aconsts.SESSION_CB_ON_SERVICE_DISCOVERED, xxx_s_id))
+        autils.fail_on_event(
+            dut2,
+            autils.decorate_event(aconsts.SESSION_CB_ON_SERVICE_DISCOVERED,
+                                  xxx_s_id))
 
-    # Direct ranging
-    results21 = []
-    for iter in range(10):
-      id = dut2.droid.wifiRttStartRangingToAwarePeerId(yyy_peer_id_on_sub)
-      event = autils.wait_for_event(dut2, rutils.decorate_event(
-        rconsts.EVENT_CB_RANGING_ON_RESULT, id))
-      results21.append(event["data"][rconsts.EVENT_CB_RANGING_KEY_RESULTS][0])
+        # Direct ranging
+        results21 = []
+        for iter in range(10):
+            id = dut2.droid.wifiRttStartRangingToAwarePeerId(
+                yyy_peer_id_on_sub)
+            event = autils.wait_for_event(
+                dut2,
+                rutils.decorate_event(rconsts.EVENT_CB_RANGING_ON_RESULT, id))
+            results21.append(
+                event["data"][rconsts.EVENT_CB_RANGING_KEY_RESULTS][0])
 
-    time.sleep(5) # while switching roles
+        time.sleep(5)  # while switching roles
 
-    results12 = []
-    for iter in range(10):
-      id = dut1.droid.wifiRttStartRangingToAwarePeerMac(dut2_mac)
-      event = autils.wait_for_event(dut1, rutils.decorate_event(
-        rconsts.EVENT_CB_RANGING_ON_RESULT, id))
-      results12.append(event["data"][rconsts.EVENT_CB_RANGING_KEY_RESULTS][0])
+        results12 = []
+        for iter in range(10):
+            id = dut1.droid.wifiRttStartRangingToAwarePeerMac(dut2_mac)
+            event = autils.wait_for_event(
+                dut1,
+                rutils.decorate_event(rconsts.EVENT_CB_RANGING_ON_RESULT, id))
+            results12.append(
+                event["data"][rconsts.EVENT_CB_RANGING_KEY_RESULTS][0])
 
-    stats = [rutils.extract_stats(results12, 0, 0, 0),
-             rutils.extract_stats(results21, 0, 0, 0)]
+        stats = [
+            rutils.extract_stats(results12, 0, 0, 0),
+            rutils.extract_stats(results21, 0, 0, 0)
+        ]
 
-    # Update XXX to be within range
-    dut2.droid.wifiAwareUpdateSubscribe(xxx_s_id, autils.add_ranging_to_sub(
-      autils.create_discovery_config("XXX", aconsts.SUBSCRIBE_TYPE_PASSIVE),
-      min_distance_mm=None, max_distance_mm=1000000))
-    autils.wait_for_event(dut2, autils.decorate_event(
-      aconsts.SESSION_CB_ON_SESSION_CONFIG_UPDATED, xxx_s_id))
+        # Update XXX to be within range
+        dut2.droid.wifiAwareUpdateSubscribe(
+            xxx_s_id,
+            autils.add_ranging_to_sub(
+                autils.create_discovery_config("XXX",
+                                               aconsts.SUBSCRIBE_TYPE_PASSIVE),
+                min_distance_mm=None,
+                max_distance_mm=1000000))
+        autils.wait_for_event(
+            dut2,
+            autils.decorate_event(aconsts.SESSION_CB_ON_SESSION_CONFIG_UPDATED,
+                                  xxx_s_id))
 
-    # Expect discovery on XXX - wait until discovery with ranging:
-    # - 0 or more: without ranging info (due to concurrency limitations)
-    # - 1 or more: with ranging (once concurrency limitation relieved)
-    num_events = 0
-    while True:
-      event = autils.wait_for_event(dut2, autils.decorate_event(
-          aconsts.SESSION_CB_ON_SERVICE_DISCOVERED, xxx_s_id))
-      if aconsts.SESSION_CB_KEY_DISTANCE_MM in event["data"]:
-        break
-      num_events = num_events + 1
-      asserts.assert_true(num_events < 10, # arbitrary safety valve
-                          "Way too many discovery events without ranging!")
+        # Expect discovery on XXX - wait until discovery with ranging:
+        # - 0 or more: without ranging info (due to concurrency limitations)
+        # - 1 or more: with ranging (once concurrency limitation relieved)
+        num_events = 0
+        while True:
+            event = autils.wait_for_event(
+                dut2,
+                autils.decorate_event(aconsts.SESSION_CB_ON_SERVICE_DISCOVERED,
+                                      xxx_s_id))
+            if aconsts.SESSION_CB_KEY_DISTANCE_MM in event["data"]:
+                break
+            num_events = num_events + 1
+            asserts.assert_true(
+                num_events < 10,  # arbitrary safety valve
+                "Way too many discovery events without ranging!")
 
-    asserts.explicit_pass("Discovery/Direct RTT Concurrency Pass", extras=stats)
\ No newline at end of file
+        asserts.explicit_pass(
+            "Discovery/Direct RTT Concurrency Pass", extras={"data": stats})
diff --git a/acts/tests/google/wifi/rtt/functional/RangeApMiscTest.py b/acts/tests/google/wifi/rtt/functional/RangeApMiscTest.py
index dd5560d..b265f00 100644
--- a/acts/tests/google/wifi/rtt/functional/RangeApMiscTest.py
+++ b/acts/tests/google/wifi/rtt/functional/RangeApMiscTest.py
@@ -22,64 +22,72 @@
 
 
 class RangeApMiscTest(RttBaseTest):
-  """Test class for RTT ranging to Access Points - miscellaneous tests which
+    """Test class for RTT ranging to Access Points - miscellaneous tests which
   do not fit into the strict IEEE 802.11mc supporting or non-supporting test
   beds - e.g. a mixed test."""
 
-  # Number of RTT iterations
-  NUM_ITER = 10
+    # Number of RTT iterations
+    NUM_ITER = 10
 
-  # Time gap (in seconds) between iterations
-  TIME_BETWEEN_ITERATIONS = 0
+    # Time gap (in seconds) between iterations
+    TIME_BETWEEN_ITERATIONS = 0
 
-  def __init__(self, controllers):
-    RttBaseTest.__init__(self, controllers)
+    def __init__(self, controllers):
+        RttBaseTest.__init__(self, controllers)
 
-  #############################################################################
+    #############################################################################
 
-  def test_rtt_mixed_80211mc_supporting_aps_wo_privilege(self):
-    """Scan for APs and perform RTT on one supporting and one non-supporting
+    def test_rtt_mixed_80211mc_supporting_aps_wo_privilege(self):
+        """Scan for APs and perform RTT on one supporting and one non-supporting
     IEEE 802.11mc APs with the device not having privilege access (expect
     failures)."""
-    dut = self.android_devices[0]
-    rutils.config_privilege_override(dut, True)
-    rtt_aps = rutils.scan_with_rtt_support_constraint(dut, True)
-    non_rtt_aps = rutils.scan_with_rtt_support_constraint(dut, False)
-    mix_list = [rtt_aps[0], non_rtt_aps[0]]
-    dut.log.debug("Visible non-IEEE 802.11mc APs=%s", mix_list)
-    events = rutils.run_ranging(dut, mix_list, self.NUM_ITER,
-                                self.TIME_BETWEEN_ITERATIONS)
-    stats = rutils.analyze_results(events, self.rtt_reference_distance_mm,
-                                   self.rtt_reference_distance_margin_mm,
-                                   self.rtt_min_expected_rssi_dbm,
-                                   self.lci_reference, self.lcr_reference)
-    dut.log.debug("Stats=%s", stats)
+        dut = self.android_devices[0]
+        rutils.config_privilege_override(dut, True)
+        rtt_aps = rutils.scan_with_rtt_support_constraint(dut, True)
+        non_rtt_aps = rutils.scan_with_rtt_support_constraint(dut, False)
+        mix_list = [rtt_aps[0], non_rtt_aps[0]]
+        dut.log.debug("Visible non-IEEE 802.11mc APs=%s", mix_list)
+        events = rutils.run_ranging(dut, mix_list, self.NUM_ITER,
+                                    self.TIME_BETWEEN_ITERATIONS)
+        stats = rutils.analyze_results(events, self.rtt_reference_distance_mm,
+                                       self.rtt_reference_distance_margin_mm,
+                                       self.rtt_min_expected_rssi_dbm,
+                                       self.lci_reference, self.lcr_reference)
+        dut.log.debug("Stats=%s", stats)
 
-    for bssid, stat in stats.items():
-      asserts.assert_true(stat['num_no_results'] == 0,
-                          "Missing (timed-out) results", extras=stats)
-      if bssid == rtt_aps[0][wutils.WifiEnums.BSSID_KEY]:
-        asserts.assert_false(stat['any_lci_mismatch'],
-                             "LCI mismatch", extras=stats)
-        asserts.assert_false(stat['any_lcr_mismatch'],
-                             "LCR mismatch", extras=stats)
-        asserts.assert_equal(stat['num_invalid_rssi'], 0, "Invalid RSSI",
-                            extras=stats)
-        asserts.assert_true(stat['num_failures'] <=
-                            self.rtt_max_failure_rate_two_sided_rtt_percentage
-                            * stat['num_results'] / 100,
-                            "Failure rate is too high", extras=stats)
-        asserts.assert_true(stat['num_range_out_of_margin'] <=
+        for bssid, stat in stats.items():
+            asserts.assert_true(
+                stat['num_no_results'] == 0,
+                "Missing (timed-out) results",
+                extras=stats)
+            if bssid == rtt_aps[0][wutils.WifiEnums.BSSID_KEY]:
+                asserts.assert_false(
+                    stat['any_lci_mismatch'], "LCI mismatch", extras=stats)
+                asserts.assert_false(
+                    stat['any_lcr_mismatch'], "LCR mismatch", extras=stats)
+                asserts.assert_equal(
+                    stat['num_invalid_rssi'], 0, "Invalid RSSI", extras=stats)
+                asserts.assert_true(
+                    stat['num_failures'] <=
+                    self.rtt_max_failure_rate_two_sided_rtt_percentage *
+                    stat['num_results'] / 100,
+                    "Failure rate is too high",
+                    extras=stats)
+                asserts.assert_true(
+                    stat['num_range_out_of_margin'] <=
                     self.rtt_max_margin_exceeded_rate_two_sided_rtt_percentage
                     * stat['num_success_results'] / 100,
                     "Results exceeding error margin rate is too high",
                     extras=stats)
-      else:
-        asserts.assert_true(stat['num_failures'] == self.NUM_ITER,
-        "All one-sided RTT requests must fail when executed without privilege",
-                            extras=stats)
-        for code in stat['status_codes']:
-          asserts.assert_true(code ==
-            rconsts.EVENT_CB_RANGING_STATUS_RESPONDER_DOES_NOT_SUPPORT_IEEE80211MC,
-                              "Expected non-support error code", extras=stats)
-    asserts.explicit_pass("RTT test done", extras=stats)
+            else:
+                asserts.assert_true(
+                    stat['num_failures'] == self.NUM_ITER,
+                    "All one-sided RTT requests must fail when executed without privilege",
+                    extras=stats)
+                for code in stat['status_codes']:
+                    asserts.assert_true(
+                        code == rconsts.
+                        EVENT_CB_RANGING_STATUS_RESPONDER_DOES_NOT_SUPPORT_IEEE80211MC,
+                        "Expected non-support error code",
+                        extras=stats)
+        asserts.explicit_pass("RTT test done", extras=stats)
diff --git a/acts/tests/google/wifi/rtt/functional/RangeApNonSupporting11McTest.py b/acts/tests/google/wifi/rtt/functional/RangeApNonSupporting11McTest.py
index 65b67d2..b87a4ad 100644
--- a/acts/tests/google/wifi/rtt/functional/RangeApNonSupporting11McTest.py
+++ b/acts/tests/google/wifi/rtt/functional/RangeApNonSupporting11McTest.py
@@ -23,115 +23,131 @@
 
 
 class RangeApNonSupporting11McTest(WifiBaseTest, RttBaseTest):
-  """Test class for RTT ranging to Access Points which do not support IEEE
+    """Test class for RTT ranging to Access Points which do not support IEEE
   802.11mc"""
 
-  # Number of RTT iterations
-  NUM_ITER = 10
+    # Number of RTT iterations
+    NUM_ITER = 10
 
-  # Time gap (in seconds) between iterations
-  TIME_BETWEEN_ITERATIONS = 0
+    # Time gap (in seconds) between iterations
+    TIME_BETWEEN_ITERATIONS = 0
 
-  def __init__(self, controllers):
-    WifiBaseTest.__init__(self, controllers)
-    RttBaseTest.__init__(self, controllers)
-    if "AccessPoint" in self.user_params:
-      self.legacy_configure_ap_and_start()
+    def __init__(self, controllers):
+        WifiBaseTest.__init__(self, controllers)
+        RttBaseTest.__init__(self, controllers)
+        if "AccessPoint" in self.user_params:
+            self.legacy_configure_ap_and_start()
 
-  #############################################################################
+    #############################################################################
 
-  @test_tracker_info(uuid="cde756e9-11f3-43da-b9ae-9edf85764f82")
-  def test_rtt_non_80211mc_supporting_aps(self):
-    """Scan for APs and perform RTT on non-IEEE 802.11mc supporting APs"""
-    dut = self.android_devices[0]
-    non_rtt_aps = rutils.select_best_scan_results(
-      rutils.scan_with_rtt_support_constraint(dut, False), select_count=1)
-    dut.log.debug("Visible non-IEEE 802.11mc APs=%s", non_rtt_aps)
-    asserts.assert_true(len(non_rtt_aps) > 0, "Need at least one AP!")
-    events = rutils.run_ranging(dut, non_rtt_aps, self.NUM_ITER,
-                                self.TIME_BETWEEN_ITERATIONS)
-    stats = rutils.analyze_results(events, self.rtt_reference_distance_mm,
-                                   self.rtt_reference_distance_margin_mm,
-                                   self.rtt_min_expected_rssi_dbm,
-                                   self.lci_reference, self.lcr_reference)
-    dut.log.debug("Stats=%s", stats)
+    @test_tracker_info(uuid="cde756e9-11f3-43da-b9ae-9edf85764f82")
+    def test_rtt_non_80211mc_supporting_aps(self):
+        """Scan for APs and perform RTT on non-IEEE 802.11mc supporting APs"""
+        dut = self.android_devices[0]
+        non_rtt_aps = rutils.select_best_scan_results(
+            rutils.scan_with_rtt_support_constraint(dut, False),
+            select_count=1)
+        dut.log.debug("Visible non-IEEE 802.11mc APs=%s", non_rtt_aps)
+        asserts.assert_true(len(non_rtt_aps) > 0, "Need at least one AP!")
+        events = rutils.run_ranging(dut, non_rtt_aps, self.NUM_ITER,
+                                    self.TIME_BETWEEN_ITERATIONS)
+        stats = rutils.analyze_results(events, self.rtt_reference_distance_mm,
+                                       self.rtt_reference_distance_margin_mm,
+                                       self.rtt_min_expected_rssi_dbm,
+                                       self.lci_reference, self.lcr_reference)
+        dut.log.debug("Stats=%s", stats)
 
-    for bssid, stat in stats.items():
-      asserts.assert_true(stat['num_no_results'] == 0,
-                          "Missing (timed-out) results", extras=stats)
-      asserts.assert_false(stat['any_lci_mismatch'],
-                           "LCI mismatch", extras=stats)
-      asserts.assert_false(stat['any_lcr_mismatch'],
-                           "LCR mismatch", extras=stats)
-      asserts.assert_equal(stat['num_invalid_rssi'], 0, "Invalid RSSI",
-                          extras=stats)
-      asserts.assert_true(stat['num_failures'] <=
-                          self.rtt_max_failure_rate_one_sided_rtt_percentage
-                          * stat['num_results'] / 100,
-                          "Failure rate is too high", extras=stats)
-      asserts.assert_true(stat['num_range_out_of_margin'] <=
-                self.rtt_max_margin_exceeded_rate_one_sided_rtt_percentage
-                          * stat['num_success_results'] / 100,
+        for bssid, stat in stats.items():
+            asserts.assert_true(
+                stat['num_no_results'] == 0,
+                "Missing (timed-out) results",
+                extras=stats)
+            asserts.assert_false(
+                stat['any_lci_mismatch'], "LCI mismatch", extras=stats)
+            asserts.assert_false(
+                stat['any_lcr_mismatch'], "LCR mismatch", extras=stats)
+            asserts.assert_equal(
+                stat['num_invalid_rssi'], 0, "Invalid RSSI", extras=stats)
+            asserts.assert_true(
+                stat['num_failures'] <=
+                self.rtt_max_failure_rate_one_sided_rtt_percentage *
+                stat['num_results'] / 100,
+                "Failure rate is too high",
+                extras=stats)
+            asserts.assert_true(
+                stat['num_range_out_of_margin'] <=
+                self.rtt_max_margin_exceeded_rate_one_sided_rtt_percentage *
+                stat['num_success_results'] / 100,
                 "Results exceeding error margin rate is too high",
                 extras=stats)
-    asserts.explicit_pass("RTT test done", extras=stats)
+        asserts.explicit_pass("RTT test done", extras=stats)
 
-  @test_tracker_info(uuid="c9e22185-16d4-4fe6-894f-5823587b3288")
-  def test_rtt_non_80211mc_supporting_aps_wo_privilege(self):
-    """Scan for APs and perform RTT on non-IEEE 802.11mc supporting APs with the
+    @test_tracker_info(uuid="c9e22185-16d4-4fe6-894f-5823587b3288")
+    def test_rtt_non_80211mc_supporting_aps_wo_privilege(self):
+        """Scan for APs and perform RTT on non-IEEE 802.11mc supporting APs with the
     device not having privilege access (expect failures)."""
-    dut = self.android_devices[0]
-    rutils.config_privilege_override(dut, True)
-    non_rtt_aps = rutils.select_best_scan_results(
-      rutils.scan_with_rtt_support_constraint(dut, False), select_count=1)
-    dut.log.debug("Visible non-IEEE 802.11mc APs=%s", non_rtt_aps)
-    asserts.assert_true(len(non_rtt_aps) > 0, "Need at least one AP!")
-    events = rutils.run_ranging(dut, non_rtt_aps, self.NUM_ITER,
-                                self.TIME_BETWEEN_ITERATIONS)
-    stats = rutils.analyze_results(events, self.rtt_reference_distance_mm,
-                                   self.rtt_reference_distance_margin_mm,
-                                   self.rtt_min_expected_rssi_dbm,
-                                   self.lci_reference, self.lcr_reference)
-    dut.log.debug("Stats=%s", stats)
+        dut = self.android_devices[0]
+        rutils.config_privilege_override(dut, True)
+        non_rtt_aps = rutils.select_best_scan_results(
+            rutils.scan_with_rtt_support_constraint(dut, False),
+            select_count=1)
+        dut.log.debug("Visible non-IEEE 802.11mc APs=%s", non_rtt_aps)
+        asserts.assert_true(len(non_rtt_aps) > 0, "Need at least one AP!")
+        events = rutils.run_ranging(dut, non_rtt_aps, self.NUM_ITER,
+                                    self.TIME_BETWEEN_ITERATIONS)
+        stats = rutils.analyze_results(events, self.rtt_reference_distance_mm,
+                                       self.rtt_reference_distance_margin_mm,
+                                       self.rtt_min_expected_rssi_dbm,
+                                       self.lci_reference, self.lcr_reference)
+        dut.log.debug("Stats=%s", stats)
 
-    for bssid, stat in stats.items():
-      asserts.assert_true(stat['num_no_results'] == 0,
-                          "Missing (timed-out) results", extras=stats)
-      asserts.assert_true(stat['num_failures'] == self.NUM_ITER,
-        "All one-sided RTT requests must fail when executed without privilege",
-                          extras=stats)
-      for code in stat['status_codes']:
-        asserts.assert_true(code ==
-        rconsts.EVENT_CB_RANGING_STATUS_RESPONDER_DOES_NOT_SUPPORT_IEEE80211MC,
-                            "Expected non-support error code", extras=stats)
-    asserts.explicit_pass("RTT test done", extras=stats)
+        for bssid, stat in stats.items():
+            asserts.assert_true(
+                stat['num_no_results'] == 0,
+                "Missing (timed-out) results",
+                extras=stats)
+            asserts.assert_true(
+                stat['num_failures'] == self.NUM_ITER,
+                "All one-sided RTT requests must fail when executed without privilege",
+                extras=stats)
+            for code in stat['status_codes']:
+                asserts.assert_true(
+                    code == rconsts.
+                    EVENT_CB_RANGING_STATUS_RESPONDER_DOES_NOT_SUPPORT_IEEE80211MC,
+                    "Expected non-support error code",
+                    extras=stats)
+        asserts.explicit_pass("RTT test done", extras=stats)
 
-  @test_tracker_info(uuid="e117af56-bd3f-40ae-a2fd-4175f0daa7fa")
-  def test_rtt_non_80211mc_supporting_ap_faked_as_supporting(self):
-    """Scan for APs which do not support IEEE 802.11mc, maliciously modify the
+    @test_tracker_info(uuid="e117af56-bd3f-40ae-a2fd-4175f0daa7fa")
+    def test_rtt_non_80211mc_supporting_ap_faked_as_supporting(self):
+        """Scan for APs which do not support IEEE 802.11mc, maliciously modify the
     Responder config to indicate support and pass-through to service. Verify
     that get an error result.
     """
-    dut = self.android_devices[0]
-    non_rtt_aps = rutils.select_best_scan_results(
-      rutils.scan_with_rtt_support_constraint(dut, False), select_count=1)
-    dut.log.debug("Visible non-IEEE 802.11mc APs=%s", non_rtt_aps)
-    asserts.assert_true(len(non_rtt_aps) > 0, "Need at least one AP!")
-    non_rtt_aps = non_rtt_aps[0:1] # pick first
-    non_rtt_aps[0][rconsts.SCAN_RESULT_KEY_RTT_RESPONDER] = True # falsify
-    dut.log.debug("Visible non-IEEE 802.11mc APs=%s", non_rtt_aps)
-    events = rutils.run_ranging(dut, non_rtt_aps, self.NUM_ITER,
-                                self.TIME_BETWEEN_ITERATIONS)
-    stats = rutils.analyze_results(events, self.rtt_reference_distance_mm,
-                                   self.rtt_reference_distance_margin_mm,
-                                   self.rtt_min_expected_rssi_dbm,
-                                   self.lci_reference, self.lcr_reference)
-    dut.log.debug("Stats=%s", stats)
+        dut = self.android_devices[0]
+        non_rtt_aps = rutils.select_best_scan_results(
+            rutils.scan_with_rtt_support_constraint(dut, False),
+            select_count=1)
+        dut.log.debug("Visible non-IEEE 802.11mc APs=%s", non_rtt_aps)
+        asserts.assert_true(len(non_rtt_aps) > 0, "Need at least one AP!")
+        non_rtt_aps = non_rtt_aps[0:1]  # pick first
+        non_rtt_aps[0][rconsts.SCAN_RESULT_KEY_RTT_RESPONDER] = True  # falsify
+        dut.log.debug("Visible non-IEEE 802.11mc APs=%s", non_rtt_aps)
+        events = rutils.run_ranging(dut, non_rtt_aps, self.NUM_ITER,
+                                    self.TIME_BETWEEN_ITERATIONS)
+        stats = rutils.analyze_results(events, self.rtt_reference_distance_mm,
+                                       self.rtt_reference_distance_margin_mm,
+                                       self.rtt_min_expected_rssi_dbm,
+                                       self.lci_reference, self.lcr_reference)
+        dut.log.debug("Stats=%s", stats)
 
-    for bssid, stat in stats.items():
-      asserts.assert_true(stat['num_no_results'] == 0,
-                          "Missing (timed-out) results", extras=stats)
-      asserts.assert_true(stat['num_failures'] == self.NUM_ITER,
-                          "Failures expected for falsified responder config",
-                          extras=stats)
-    asserts.explicit_pass("RTT test done", extras=stats)
+        for bssid, stat in stats.items():
+            asserts.assert_true(
+                stat['num_no_results'] == 0,
+                "Missing (timed-out) results",
+                extras=stats)
+            asserts.assert_true(
+                stat['num_failures'] == self.NUM_ITER,
+                "Failures expected for falsified responder config",
+                extras=stats)
+        asserts.explicit_pass("RTT test done", extras=stats)
diff --git a/acts/tests/google/wifi/rtt/functional/RangeApSupporting11McTest.py b/acts/tests/google/wifi/rtt/functional/RangeApSupporting11McTest.py
index d889a22..3ec2326 100644
--- a/acts/tests/google/wifi/rtt/functional/RangeApSupporting11McTest.py
+++ b/acts/tests/google/wifi/rtt/functional/RangeApSupporting11McTest.py
@@ -15,9 +15,11 @@
 #   limitations under the License.
 
 import queue
+import time
 
 from acts import asserts
 from acts.test_decorators import test_tracker_info
+from acts.test_utils.tel.tel_test_utils import WIFI_CONFIG_APBAND_5G
 from acts.test_utils.wifi import wifi_test_utils as wutils
 from acts.test_utils.wifi.rtt import rtt_const as rconsts
 from acts.test_utils.wifi.rtt import rtt_test_utils as rutils
@@ -25,163 +27,288 @@
 
 
 class RangeApSupporting11McTest(RttBaseTest):
-  """Test class for RTT ranging to Access Points which support IEEE 802.11mc"""
+    """Test class for RTT ranging to Access Points which support IEEE 802.11mc"""
 
-  # Number of RTT iterations
-  NUM_ITER = 10
+    # Number of RTT iterations
+    NUM_ITER = 10
 
-  # Time gap (in seconds) between iterations
-  TIME_BETWEEN_ITERATIONS = 0
+    # Time gap (in seconds) between iterations
+    TIME_BETWEEN_ITERATIONS = 0
 
-  def __init__(self, controllers):
-    RttBaseTest.__init__(self, controllers)
+    # Soft AP SSID
+    SOFT_AP_SSID = "RTT_TEST_SSID"
 
-  #############################################################################
+    # Soft AP Password (irrelevant)
+    SOFT_AP_PASSWORD = "ABCDEFGH"
 
-  @test_tracker_info(uuid="6705270f-924b-4bef-b50a-0f0a7eb9ce52")
-  def test_rtt_80211mc_supporting_aps(self):
-    """Scan for APs and perform RTT only to those which support 802.11mc"""
-    dut = self.android_devices[0]
-    rtt_supporting_aps = rutils.select_best_scan_results(
-      rutils.scan_with_rtt_support_constraint(dut, True, repeat=10),
-      select_count=2)
-    dut.log.debug("RTT Supporting APs=%s", rtt_supporting_aps)
-    events = rutils.run_ranging(dut, rtt_supporting_aps, self.NUM_ITER,
-                                self.TIME_BETWEEN_ITERATIONS)
-    stats = rutils.analyze_results(events, self.rtt_reference_distance_mm,
-                                   self.rtt_reference_distance_margin_mm,
-                                   self.rtt_min_expected_rssi_dbm,
-                                   self.lci_reference, self.lcr_reference)
-    dut.log.debug("Stats=%s", stats)
+    # Time to wait before configuration changes
+    WAIT_FOR_CONFIG_CHANGES_SEC = 1
 
-    for bssid, stat in stats.items():
-      asserts.assert_true(stat['num_no_results'] == 0,
-                          "Missing (timed-out) results", extras=stats)
-      asserts.assert_false(stat['any_lci_mismatch'],
-                           "LCI mismatch", extras=stats)
-      asserts.assert_false(stat['any_lcr_mismatch'],
-                           "LCR mismatch", extras=stats)
-      asserts.assert_false(stat['invalid_num_attempted'],
-                           "Invalid (0) number of attempts", extras=stats)
-      asserts.assert_false(stat['invalid_num_successful'],
-                           "Invalid (0) number of successes", extras=stats)
-      asserts.assert_equal(stat['num_invalid_rssi'], 0, "Invalid RSSI",
-                          extras=stats)
-      asserts.assert_true(stat['num_failures'] <=
-              self.rtt_max_failure_rate_two_sided_rtt_percentage
-                          * stat['num_results'] / 100,
-              "Failure rate is too high", extras=stats)
-      asserts.assert_true(stat['num_range_out_of_margin'] <=
-              self.rtt_max_margin_exceeded_rate_two_sided_rtt_percentage
-                          * stat['num_success_results'] / 100,
-              "Results exceeding error margin rate is too high", extras=stats)
-    asserts.explicit_pass("RTT test done", extras=stats)
+    def __init__(self, controllers):
+        RttBaseTest.__init__(self, controllers)
 
-  #########################################################################
-  #
-  # LEGACY API test code
-  #
-  #########################################################################
+    #############################################################################
 
-  @test_tracker_info(uuid="18be9737-2f03-4e35-9a23-f722dea7b82d")
-  def test_legacy_rtt_80211mc_supporting_aps(self):
-    """Scan for APs and perform RTT only to those which support 802.11mc - using
-    the LEGACY API!"""
-    dut = self.android_devices[0]
-    rtt_supporting_aps = rutils.select_best_scan_results(
-      rutils.scan_with_rtt_support_constraint(dut, True, repeat=10),
-      select_count=2)
-    dut.log.debug("RTT Supporting APs=%s", rtt_supporting_aps)
+    @test_tracker_info(uuid="6705270f-924b-4bef-b50a-0f0a7eb9ce52")
+    def test_rtt_80211mc_supporting_aps(self):
+        """Scan for APs and perform RTT only to those which support 802.11mc"""
+        dut = self.android_devices[0]
+        rtt_supporting_aps = rutils.select_best_scan_results(
+            rutils.scan_with_rtt_support_constraint(dut, True, repeat=10),
+            select_count=2)
+        dut.log.debug("RTT Supporting APs=%s", rtt_supporting_aps)
+        events = rutils.run_ranging(dut, rtt_supporting_aps, self.NUM_ITER,
+                                    self.TIME_BETWEEN_ITERATIONS)
+        stats = rutils.analyze_results(events, self.rtt_reference_distance_mm,
+                                       self.rtt_reference_distance_margin_mm,
+                                       self.rtt_min_expected_rssi_dbm,
+                                       self.lci_reference, self.lcr_reference)
+        dut.log.debug("Stats=%s", stats)
 
-    rtt_configs = []
-    for ap in rtt_supporting_aps:
-      rtt_configs.append(self.rtt_config_from_scan_result(ap))
-    dut.log.debug("RTT configs=%s", rtt_configs)
+        for bssid, stat in stats.items():
+            asserts.assert_true(
+                stat['num_no_results'] == 0,
+                "Missing (timed-out) results",
+                extras=stats)
+            asserts.assert_false(
+                stat['any_lci_mismatch'], "LCI mismatch", extras=stats)
+            asserts.assert_false(
+                stat['any_lcr_mismatch'], "LCR mismatch", extras=stats)
+            asserts.assert_false(
+                stat['invalid_num_attempted'],
+                "Invalid (0) number of attempts",
+                extras=stats)
+            asserts.assert_false(
+                stat['invalid_num_successful'],
+                "Invalid (0) number of successes",
+                extras=stats)
+            asserts.assert_equal(
+                stat['num_invalid_rssi'], 0, "Invalid RSSI", extras=stats)
+            asserts.assert_true(
+                stat['num_failures'] <=
+                self.rtt_max_failure_rate_two_sided_rtt_percentage *
+                stat['num_results'] / 100,
+                "Failure rate is too high",
+                extras=stats)
+            asserts.assert_true(
+                stat['num_range_out_of_margin'] <=
+                self.rtt_max_margin_exceeded_rate_two_sided_rtt_percentage *
+                stat['num_success_results'] / 100,
+                "Results exceeding error margin rate is too high",
+                extras=stats)
+        asserts.explicit_pass("RTT test done", extras=stats)
 
-    results = []
-    num_missing = 0
-    num_failed_aborted = 0
-    for i in range(self.NUM_ITER):
-        idx = dut.droid.wifiRttStartRanging(rtt_configs)
-        event = None
-        try:
-          events = dut.ed.pop_events("WifiRttRanging%d" % idx, 30)
-          dut.log.debug("Event=%s", events)
-          for event in events:
-            if rconsts.EVENT_CB_RANGING_KEY_RESULTS in event["data"]:
-              results.append(
-                  event["data"][rconsts.EVENT_CB_RANGING_KEY_RESULTS])
+    @test_tracker_info(uuid="")
+    def test_rtt_in_and_after_softap_mode(self):
+        """Verify behavior when a SoftAP is enabled and then disabled on the
+        device:
+
+        - SAP Enabled: depending on device characteristics RTT may succeed or
+                       fail.
+        - SAP Disabled: RTT must now succeed.
+        """
+        supp_required_params = ("dbs_supported_models", )
+        self.unpack_userparams(supp_required_params)
+
+        dut = self.android_devices[0]
+
+        rtt_supporting_aps = rutils.select_best_scan_results(
+            rutils.scan_with_rtt_support_constraint(dut, True, repeat=10),
+            select_count=1)
+        dut.log.debug("RTT Supporting APs=%s", rtt_supporting_aps)
+
+        # phase 1 (pre-SAP)
+        events = rutils.run_ranging(dut, rtt_supporting_aps, self.NUM_ITER,
+                                    self.TIME_BETWEEN_ITERATIONS)
+        stats = rutils.analyze_results(events, self.rtt_reference_distance_mm,
+                                       self.rtt_reference_distance_margin_mm,
+                                       self.rtt_min_expected_rssi_dbm,
+                                       self.lci_reference, self.lcr_reference)
+        dut.log.debug("Stats Phase 1 (pre-SAP)=%s", stats)
+
+        for bssid, stat in stats.items():
+            asserts.assert_true(
+                stat['num_no_results'] == 0,
+                "Phase 1 (pre-SAP) missing (timed-out) results",
+                extras=stats)
+
+        # phase 2 (SAP)
+        wutils.start_wifi_tethering(
+            dut,
+            self.SOFT_AP_SSID,
+            self.SOFT_AP_PASSWORD,
+            band=WIFI_CONFIG_APBAND_5G,
+            hidden=False)
+        time.sleep(self.WAIT_FOR_CONFIG_CHANGES_SEC)
+
+        if dut.model not in self.dbs_supported_models:
+            rutils.wait_for_event(dut, rconsts.BROADCAST_WIFI_RTT_NOT_AVAILABLE)
+            asserts.assert_false(dut.droid.wifiIsRttAvailable(),
+                                 "RTT is available")
+
+        events = rutils.run_ranging(dut, rtt_supporting_aps, self.NUM_ITER,
+                                    self.TIME_BETWEEN_ITERATIONS)
+        stats = rutils.analyze_results(events, self.rtt_reference_distance_mm,
+                                       self.rtt_reference_distance_margin_mm,
+                                       self.rtt_min_expected_rssi_dbm,
+                                       self.lci_reference, self.lcr_reference)
+        dut.log.debug("Stats Phase 2 (SAP)=%s", stats)
+
+        for bssid, stat in stats.items():
+            if dut.model in self.dbs_supported_models:
+                asserts.assert_true(
+                    stat['num_no_results'] == 0,
+                    "Phase 2 (SAP) missing (timed-out) results",
+                    extras=stats)
             else:
-              self.log.info("RTT failed/aborted - %s", event)
-              results.append([])
-              num_failed_aborted = num_failed_aborted + 1
-        except queue.Empty:
-          self.log.debug("Waiting for RTT event timed out.")
-          results.append([])
-          num_missing = num_missing + 1
+                asserts.assert_true(
+                    stat['num_success_results'] == 0,
+                    "Phase 2 (SAP) valid results - but unexpected in SAP!?",
+                    extras=stats)
 
-    # basic error checking:
-    # 1. no missing
-    # 2. no full failed/aborted (i.e. operation not even tried)
-    # 3. overall (all BSSIDs) success rate > threshold
-    asserts.assert_equal(num_missing, 0,
-                         "Missing results (timeout waiting for event)",
-                         extras={"data":results})
-    asserts.assert_equal(num_failed_aborted, 0,
-                         "Failed or aborted operations (not tried)",
-                         extras={"data":results})
+        # phase 3 (post-SAP)
 
-    num_results = 0
-    num_errors = 0
-    for result_group in results:
-      num_results = num_results + len(result_group)
-      for result in result_group:
-        if result["status"] != 0:
-          num_errors = num_errors + 1
+        # enabling Wi-Fi first: on some devices this will also disable SAP
+        # (that's the scenario we're primarily testing). Additionally,
+        # explicitly disable SAP (which may be a NOP on some devices).
+        wutils.wifi_toggle_state(dut, True)
+        time.sleep(self.WAIT_FOR_CONFIG_CHANGES_SEC)
+        wutils.stop_wifi_tethering(dut)
 
-    extras = [results, {"num_results": num_results, "num_errors": num_errors}]
-    asserts.assert_true(
-      num_errors <= self.rtt_max_failure_rate_two_sided_rtt_percentage
-        * num_results / 100,
-      "Failure rate is too high", extras={"data":extras})
-    asserts.explicit_pass("RTT test done", extras={"data": extras})
+        if dut.model not in self.dbs_supported_models:
+            rutils.wait_for_event(dut, rconsts.BROADCAST_WIFI_RTT_AVAILABLE)
+            asserts.assert_true(dut.droid.wifiIsRttAvailable(),
+                                "RTT is not available")
 
-  def rtt_config_from_scan_result(self, scan_result):
-    """Creates an Rtt configuration based on the scan result of a network.
+        events = rutils.run_ranging(dut, rtt_supporting_aps, self.NUM_ITER,
+                                    self.TIME_BETWEEN_ITERATIONS)
+        stats = rutils.analyze_results(events, self.rtt_reference_distance_mm,
+                                       self.rtt_reference_distance_margin_mm,
+                                       self.rtt_min_expected_rssi_dbm,
+                                       self.lci_reference, self.lcr_reference)
+        dut.log.debug("Stats Phase 3 (post-SAP)=%s", stats)
+
+        for bssid, stat in stats.items():
+            asserts.assert_true(
+                stat['num_no_results'] == 0,
+                "Phase 3 (post-SAP) missing (timed-out) results",
+                extras=stats)
+
+    #########################################################################
+    #
+    # LEGACY API test code
+    #
+    #########################################################################
+
+    @test_tracker_info(uuid="18be9737-2f03-4e35-9a23-f722dea7b82d")
+    def test_legacy_rtt_80211mc_supporting_aps(self):
+        """Scan for APs and perform RTT only to those which support 802.11mc - using
+    the LEGACY API!"""
+        dut = self.android_devices[0]
+        rtt_supporting_aps = rutils.select_best_scan_results(
+            rutils.scan_with_rtt_support_constraint(dut, True, repeat=10),
+            select_count=2)
+        dut.log.debug("RTT Supporting APs=%s", rtt_supporting_aps)
+
+        rtt_configs = []
+        for ap in rtt_supporting_aps:
+            rtt_configs.append(self.rtt_config_from_scan_result(ap))
+        dut.log.debug("RTT configs=%s", rtt_configs)
+
+        results = []
+        num_missing = 0
+        num_failed_aborted = 0
+        for i in range(self.NUM_ITER):
+            idx = dut.droid.wifiRttStartRanging(rtt_configs)
+            event = None
+            try:
+                events = dut.ed.pop_events("WifiRttRanging%d" % idx, 30)
+                dut.log.debug("Event=%s", events)
+                for event in events:
+                    if rconsts.EVENT_CB_RANGING_KEY_RESULTS in event["data"]:
+                        results.append(event["data"][
+                            rconsts.EVENT_CB_RANGING_KEY_RESULTS])
+                    else:
+                        self.log.info("RTT failed/aborted - %s", event)
+                        results.append([])
+                        num_failed_aborted = num_failed_aborted + 1
+            except queue.Empty:
+                self.log.debug("Waiting for RTT event timed out.")
+                results.append([])
+                num_missing = num_missing + 1
+
+        # basic error checking:
+        # 1. no missing
+        # 2. no full failed/aborted (i.e. operation not even tried)
+        # 3. overall (all BSSIDs) success rate > threshold
+        asserts.assert_equal(
+            num_missing,
+            0,
+            "Missing results (timeout waiting for event)",
+            extras={"data": results})
+        asserts.assert_equal(
+            num_failed_aborted,
+            0,
+            "Failed or aborted operations (not tried)",
+            extras={"data": results})
+
+        num_results = 0
+        num_errors = 0
+        for result_group in results:
+            num_results = num_results + len(result_group)
+            for result in result_group:
+                if result["status"] != 0:
+                    num_errors = num_errors + 1
+
+        extras = [
+            results, {
+                "num_results": num_results,
+                "num_errors": num_errors
+            }
+        ]
+        asserts.assert_true(
+            num_errors <= self.rtt_max_failure_rate_two_sided_rtt_percentage *
+            num_results / 100,
+            "Failure rate is too high",
+            extras={"data": extras})
+        asserts.explicit_pass("RTT test done", extras={"data": extras})
+
+    def rtt_config_from_scan_result(self, scan_result):
+        """Creates an Rtt configuration based on the scan result of a network.
     """
-    WifiEnums = wutils.WifiEnums
-    ScanResult = WifiEnums.ScanResult
-    RttParam = WifiEnums.RttParam
-    RttBW = WifiEnums.RttBW
-    RttPreamble = WifiEnums.RttPreamble
-    RttType = WifiEnums.RttType
+        WifiEnums = wutils.WifiEnums
+        ScanResult = WifiEnums.ScanResult
+        RttParam = WifiEnums.RttParam
+        RttBW = WifiEnums.RttBW
+        RttPreamble = WifiEnums.RttPreamble
+        RttType = WifiEnums.RttType
 
-    scan_result_channel_width_to_rtt = {
-      ScanResult.CHANNEL_WIDTH_20MHZ: RttBW.BW_20_SUPPORT,
-      ScanResult.CHANNEL_WIDTH_40MHZ: RttBW.BW_40_SUPPORT,
-      ScanResult.CHANNEL_WIDTH_80MHZ: RttBW.BW_80_SUPPORT,
-      ScanResult.CHANNEL_WIDTH_160MHZ: RttBW.BW_160_SUPPORT,
-      ScanResult.CHANNEL_WIDTH_80MHZ_PLUS_MHZ: RttBW.BW_160_SUPPORT
-    }
-    p = {}
-    freq = scan_result[RttParam.frequency]
-    p[RttParam.frequency] = freq
-    p[RttParam.BSSID] = scan_result[WifiEnums.BSSID_KEY]
-    if freq > 5000:
-      p[RttParam.preamble] = RttPreamble.PREAMBLE_VHT
-    else:
-      p[RttParam.preamble] = RttPreamble.PREAMBLE_HT
-    cf0 = scan_result[RttParam.center_freq0]
-    if cf0 > 0:
-      p[RttParam.center_freq0] = cf0
-    cf1 = scan_result[RttParam.center_freq1]
-    if cf1 > 0:
-      p[RttParam.center_freq1] = cf1
-    cw = scan_result["channelWidth"]
-    p[RttParam.channel_width] = cw
-    p[RttParam.bandwidth] = scan_result_channel_width_to_rtt[cw]
-    if scan_result["is80211McRTTResponder"]:
-      p[RttParam.request_type] = RttType.TYPE_TWO_SIDED
-    else:
-      p[RttParam.request_type] = RttType.TYPE_ONE_SIDED
-    return p
+        scan_result_channel_width_to_rtt = {
+            ScanResult.CHANNEL_WIDTH_20MHZ: RttBW.BW_20_SUPPORT,
+            ScanResult.CHANNEL_WIDTH_40MHZ: RttBW.BW_40_SUPPORT,
+            ScanResult.CHANNEL_WIDTH_80MHZ: RttBW.BW_80_SUPPORT,
+            ScanResult.CHANNEL_WIDTH_160MHZ: RttBW.BW_160_SUPPORT,
+            ScanResult.CHANNEL_WIDTH_80MHZ_PLUS_MHZ: RttBW.BW_160_SUPPORT
+        }
+        p = {}
+        freq = scan_result[RttParam.frequency]
+        p[RttParam.frequency] = freq
+        p[RttParam.BSSID] = scan_result[WifiEnums.BSSID_KEY]
+        if freq > 5000:
+            p[RttParam.preamble] = RttPreamble.PREAMBLE_VHT
+        else:
+            p[RttParam.preamble] = RttPreamble.PREAMBLE_HT
+        cf0 = scan_result[RttParam.center_freq0]
+        if cf0 > 0:
+            p[RttParam.center_freq0] = cf0
+        cf1 = scan_result[RttParam.center_freq1]
+        if cf1 > 0:
+            p[RttParam.center_freq1] = cf1
+        cw = scan_result["channelWidth"]
+        p[RttParam.channel_width] = cw
+        p[RttParam.bandwidth] = scan_result_channel_width_to_rtt[cw]
+        if scan_result["is80211McRTTResponder"]:
+            p[RttParam.request_type] = RttType.TYPE_TWO_SIDED
+        else:
+            p[RttParam.request_type] = RttType.TYPE_ONE_SIDED
+        return p
diff --git a/acts/tests/google/wifi/rtt/functional/RangeAwareTest.py b/acts/tests/google/wifi/rtt/functional/RangeAwareTest.py
index d4b7d41..7eeecc2 100644
--- a/acts/tests/google/wifi/rtt/functional/RangeAwareTest.py
+++ b/acts/tests/google/wifi/rtt/functional/RangeAwareTest.py
@@ -28,38 +28,38 @@
 
 
 class RangeAwareTest(AwareBaseTest, RttBaseTest):
-  """Test class for RTT ranging to Wi-Fi Aware peers"""
-  SERVICE_NAME = "GoogleTestServiceXY"
+    """Test class for RTT ranging to Wi-Fi Aware peers"""
+    SERVICE_NAME = "GoogleTestServiceXY"
 
-  # Number of RTT iterations
-  NUM_ITER = 10
+    # Number of RTT iterations
+    NUM_ITER = 10
 
-  # Time gap (in seconds) between iterations
-  TIME_BETWEEN_ITERATIONS = 0
+    # Time gap (in seconds) between iterations
+    TIME_BETWEEN_ITERATIONS = 0
 
-  # Time gap (in seconds) when switching between Initiator and Responder
-  TIME_BETWEEN_ROLES = 4
+    # Time gap (in seconds) when switching between Initiator and Responder
+    TIME_BETWEEN_ROLES = 4
 
-  def __init__(self, controllers):
-    AwareBaseTest.__init__(self, controllers)
-    RttBaseTest.__init__(self, controllers)
+    def __init__(self, controllers):
+        AwareBaseTest.__init__(self, controllers)
+        RttBaseTest.__init__(self, controllers)
 
-  def setup_test(self):
-    """Manual setup here due to multiple inheritance: explicitly execute the
+    def setup_test(self):
+        """Manual setup here due to multiple inheritance: explicitly execute the
     setup method from both parents."""
-    AwareBaseTest.setup_test(self)
-    RttBaseTest.setup_test(self)
+        AwareBaseTest.setup_test(self)
+        RttBaseTest.setup_test(self)
 
-  def teardown_test(self):
-    """Manual teardown here due to multiple inheritance: explicitly execute the
+    def teardown_test(self):
+        """Manual teardown here due to multiple inheritance: explicitly execute the
     teardown method from both parents."""
-    AwareBaseTest.teardown_test(self)
-    RttBaseTest.teardown_test(self)
+        AwareBaseTest.teardown_test(self)
+        RttBaseTest.teardown_test(self)
 
-  #############################################################################
+    #############################################################################
 
-  def run_rtt_discovery(self, init_dut, resp_mac=None, resp_peer_id=None):
-    """Perform single RTT measurement, using Aware, from the Initiator DUT to
+    def run_rtt_discovery(self, init_dut, resp_mac=None, resp_peer_id=None):
+        """Perform single RTT measurement, using Aware, from the Initiator DUT to
     a Responder. The RTT Responder can be specified using its MAC address
     (obtained using out- of-band discovery) or its Peer ID (using Aware
     discovery).
@@ -69,28 +69,31 @@
       resp_mac: MAC address of the RTT Responder device
       resp_peer_id: Peer ID of the RTT Responder device
     """
-    asserts.assert_true(resp_mac is not None or resp_peer_id is not None,
-                        "One of the Responder specifications (MAC or Peer ID)"
-                        " must be provided!")
-    if resp_mac is not None:
-      id = init_dut.droid.wifiRttStartRangingToAwarePeerMac(resp_mac)
-    else:
-      id = init_dut.droid.wifiRttStartRangingToAwarePeerId(resp_peer_id)
-    try:
-      event = init_dut.ed.pop_event(rutils.decorate_event(
-          rconsts.EVENT_CB_RANGING_ON_RESULT, id), rutils.EVENT_TIMEOUT)
-      result = event["data"][rconsts.EVENT_CB_RANGING_KEY_RESULTS][0]
-      if resp_mac is not None:
-        rutils.validate_aware_mac_result(result, resp_mac, "DUT")
-      else:
-        rutils.validate_aware_peer_id_result(result, resp_peer_id, "DUT")
-      return result
-    except queue.Empty:
-      return None
+        asserts.assert_true(
+            resp_mac is not None or resp_peer_id is not None,
+            "One of the Responder specifications (MAC or Peer ID)"
+            " must be provided!")
+        if resp_mac is not None:
+            id = init_dut.droid.wifiRttStartRangingToAwarePeerMac(resp_mac)
+        else:
+            id = init_dut.droid.wifiRttStartRangingToAwarePeerId(resp_peer_id)
+        try:
+            event = init_dut.ed.pop_event(
+                rutils.decorate_event(rconsts.EVENT_CB_RANGING_ON_RESULT, id),
+                rutils.EVENT_TIMEOUT)
+            result = event["data"][rconsts.EVENT_CB_RANGING_KEY_RESULTS][0]
+            if resp_mac is not None:
+                rutils.validate_aware_mac_result(result, resp_mac, "DUT")
+            else:
+                rutils.validate_aware_peer_id_result(result, resp_peer_id,
+                                                     "DUT")
+            return result
+        except queue.Empty:
+            return None
 
-  def run_rtt_ib_discovery_set(self, do_both_directions, iter_count,
-      time_between_iterations, time_between_roles):
-    """Perform a set of RTT measurements, using in-band (Aware) discovery.
+    def run_rtt_ib_discovery_set(self, do_both_directions, iter_count,
+                                 time_between_iterations, time_between_roles):
+        """Perform a set of RTT measurements, using in-band (Aware) discovery.
 
     Args:
       do_both_directions: False - perform all measurements in one direction,
@@ -105,43 +108,46 @@
     failed measurement). If both directions are tested then returns a list of
     2 elements: one set for each direction.
     """
-    p_dut = self.android_devices[0]
-    s_dut = self.android_devices[1]
+        p_dut = self.android_devices[0]
+        s_dut = self.android_devices[1]
 
-    (p_id, s_id, p_disc_id, s_disc_id,
-     peer_id_on_sub, peer_id_on_pub) = autils.create_discovery_pair(
-        p_dut,
-        s_dut,
-        p_config=autils.add_ranging_to_pub(autils.create_discovery_config(
-            self.SERVICE_NAME, aconsts.PUBLISH_TYPE_UNSOLICITED), True),
-        s_config=autils.add_ranging_to_pub(autils.create_discovery_config(
-            self.SERVICE_NAME, aconsts.SUBSCRIBE_TYPE_PASSIVE), True),
-        device_startup_offset=self.device_startup_offset,
-        msg_id=self.get_next_msg_id())
+        (p_id, s_id, p_disc_id, s_disc_id, peer_id_on_sub,
+         peer_id_on_pub) = autils.create_discovery_pair(
+             p_dut,
+             s_dut,
+             p_config=autils.add_ranging_to_pub(
+                 autils.create_discovery_config(
+                     self.SERVICE_NAME, aconsts.PUBLISH_TYPE_UNSOLICITED),
+                 True),
+             s_config=autils.add_ranging_to_pub(
+                 autils.create_discovery_config(
+                     self.SERVICE_NAME, aconsts.SUBSCRIBE_TYPE_PASSIVE), True),
+             device_startup_offset=self.device_startup_offset,
+             msg_id=self.get_next_msg_id())
 
-    resultsPS = []
-    resultsSP = []
-    for i in range(iter_count):
-      if i != 0 and time_between_iterations != 0:
-        time.sleep(time_between_iterations)
+        resultsPS = []
+        resultsSP = []
+        for i in range(iter_count):
+            if i != 0 and time_between_iterations != 0:
+                time.sleep(time_between_iterations)
 
-      # perform RTT from pub -> sub
-      resultsPS.append(
-        self.run_rtt_discovery(p_dut, resp_peer_id=peer_id_on_pub))
+            # perform RTT from pub -> sub
+            resultsPS.append(
+                self.run_rtt_discovery(p_dut, resp_peer_id=peer_id_on_pub))
 
-      if do_both_directions:
-        if time_between_roles != 0:
-          time.sleep(time_between_roles)
+            if do_both_directions:
+                if time_between_roles != 0:
+                    time.sleep(time_between_roles)
 
-        # perform RTT from sub -> pub
-        resultsSP.append(
-          self.run_rtt_discovery(s_dut, resp_peer_id=peer_id_on_sub))
+                # perform RTT from sub -> pub
+                resultsSP.append(
+                    self.run_rtt_discovery(s_dut, resp_peer_id=peer_id_on_sub))
 
-    return resultsPS if not do_both_directions else [resultsPS, resultsSP]
+        return resultsPS if not do_both_directions else [resultsPS, resultsSP]
 
-  def run_rtt_oob_discovery_set(self, do_both_directions, iter_count,
-      time_between_iterations, time_between_roles):
-    """Perform a set of RTT measurements, using out-of-band discovery.
+    def run_rtt_oob_discovery_set(self, do_both_directions, iter_count,
+                                  time_between_iterations, time_between_roles):
+        """Perform a set of RTT measurements, using out-of-band discovery.
 
     Args:
       do_both_directions: False - perform all measurements in one direction,
@@ -157,253 +163,274 @@
     failed measurement). If both directions are tested then returns a list of
     2 elements: one set for each direction.
     """
-    dut0 = self.android_devices[0]
-    dut1 = self.android_devices[1]
+        dut0 = self.android_devices[0]
+        dut1 = self.android_devices[1]
 
-    id0, mac0 = autils.attach_with_identity(dut0)
-    id1, mac1 = autils.attach_with_identity(dut1)
+        id0, mac0 = autils.attach_with_identity(dut0)
+        id1, mac1 = autils.attach_with_identity(dut1)
 
-    # wait for for devices to synchronize with each other - there are no other
-    # mechanisms to make sure this happens for OOB discovery (except retrying
-    # to execute the data-path request)
-    time.sleep(autils.WAIT_FOR_CLUSTER)
+        # wait for for devices to synchronize with each other - there are no other
+        # mechanisms to make sure this happens for OOB discovery (except retrying
+        # to execute the data-path request)
+        time.sleep(autils.WAIT_FOR_CLUSTER)
 
-    # start publisher(s) on the Responder(s) with ranging enabled
-    p_config = autils.add_ranging_to_pub(
-      autils.create_discovery_config(self.SERVICE_NAME,
-                                     aconsts.PUBLISH_TYPE_UNSOLICITED),
-      enable_ranging=True)
-    dut1.droid.wifiAwarePublish(id1, p_config)
-    autils.wait_for_event(dut1, aconsts.SESSION_CB_ON_PUBLISH_STARTED)
-    if do_both_directions:
-      dut0.droid.wifiAwarePublish(id0, p_config)
-      autils.wait_for_event(dut0, aconsts.SESSION_CB_ON_PUBLISH_STARTED)
+        # start publisher(s) on the Responder(s) with ranging enabled
+        p_config = autils.add_ranging_to_pub(
+            autils.create_discovery_config(self.SERVICE_NAME,
+                                           aconsts.PUBLISH_TYPE_UNSOLICITED),
+            enable_ranging=True)
+        dut1.droid.wifiAwarePublish(id1, p_config)
+        autils.wait_for_event(dut1, aconsts.SESSION_CB_ON_PUBLISH_STARTED)
+        if do_both_directions:
+            dut0.droid.wifiAwarePublish(id0, p_config)
+            autils.wait_for_event(dut0, aconsts.SESSION_CB_ON_PUBLISH_STARTED)
 
-    results01 = []
-    results10 = []
-    for i in range(iter_count):
-      if i != 0 and time_between_iterations != 0:
-        time.sleep(time_between_iterations)
+        results01 = []
+        results10 = []
+        for i in range(iter_count):
+            if i != 0 and time_between_iterations != 0:
+                time.sleep(time_between_iterations)
 
-      # perform RTT from dut0 -> dut1
-      results01.append(
-          self.run_rtt_discovery(dut0, resp_mac=mac1))
+            # perform RTT from dut0 -> dut1
+            results01.append(self.run_rtt_discovery(dut0, resp_mac=mac1))
 
-      if do_both_directions:
-        if time_between_roles != 0:
-          time.sleep(time_between_roles)
+            if do_both_directions:
+                if time_between_roles != 0:
+                    time.sleep(time_between_roles)
 
-        # perform RTT from dut1 -> dut0
-        results10.append(
-            self.run_rtt_discovery(dut1, resp_mac=mac0))
+                # perform RTT from dut1 -> dut0
+                results10.append(self.run_rtt_discovery(dut1, resp_mac=mac0))
 
-    return results01 if not do_both_directions else [results01, results10]
+        return results01 if not do_both_directions else [results01, results10]
 
-  def verify_results(self, results, results_reverse_direction=None):
-    """Verifies the results of the RTT experiment.
+    def verify_results(self, results, results_reverse_direction=None):
+        """Verifies the results of the RTT experiment.
 
     Args:
       results: List of RTT results.
       results_reverse_direction: List of RTT results executed in the
                                 reverse direction. Optional.
     """
-    stats = rutils.extract_stats(results, self.rtt_reference_distance_mm,
-                                 self.rtt_reference_distance_margin_mm,
-                                 self.rtt_min_expected_rssi_dbm)
-    stats_reverse_direction = None
-    if results_reverse_direction is not None:
-      stats_reverse_direction = rutils.extract_stats(results_reverse_direction,
-          self.rtt_reference_distance_mm, self.rtt_reference_distance_margin_mm,
-          self.rtt_min_expected_rssi_dbm)
-    self.log.debug("Stats: %s", stats)
-    if stats_reverse_direction is not None:
-      self.log.debug("Stats in reverse direction: %s", stats_reverse_direction)
+        stats = rutils.extract_stats(results, self.rtt_reference_distance_mm,
+                                     self.rtt_reference_distance_margin_mm,
+                                     self.rtt_min_expected_rssi_dbm)
+        stats_reverse_direction = None
+        if results_reverse_direction is not None:
+            stats_reverse_direction = rutils.extract_stats(
+                results_reverse_direction, self.rtt_reference_distance_mm,
+                self.rtt_reference_distance_margin_mm,
+                self.rtt_min_expected_rssi_dbm)
+        self.log.debug("Stats: %s", stats)
+        if stats_reverse_direction is not None:
+            self.log.debug("Stats in reverse direction: %s",
+                           stats_reverse_direction)
 
-    extras = stats if stats_reverse_direction is None else {
-      "forward": stats,
-      "reverse": stats_reverse_direction}
+        extras = stats if stats_reverse_direction is None else {
+            "forward": stats,
+            "reverse": stats_reverse_direction
+        }
 
-    asserts.assert_true(stats['num_no_results'] == 0,
-                        "Missing (timed-out) results", extras=extras)
-    asserts.assert_false(stats['any_lci_mismatch'],
-                         "LCI mismatch", extras=extras)
-    asserts.assert_false(stats['any_lcr_mismatch'],
-                         "LCR mismatch", extras=extras)
-    asserts.assert_false(stats['invalid_num_attempted'],
-                         "Invalid (0) number of attempts", extras=stats)
-    asserts.assert_false(stats['invalid_num_successful'],
-                         "Invalid (0) number of successes", extras=stats)
-    asserts.assert_equal(stats['num_invalid_rssi'], 0, "Invalid RSSI",
-                         extras=extras)
-    asserts.assert_true(
-        stats['num_failures'] <=
-          self.rtt_max_failure_rate_two_sided_rtt_percentage
-          * stats['num_results'] / 100,
-        "Failure rate is too high", extras=extras)
-    asserts.assert_true(
-        stats['num_range_out_of_margin']
-          <= self.rtt_max_margin_exceeded_rate_two_sided_rtt_percentage
-             * stats['num_success_results'] / 100,
-        "Results exceeding error margin rate is too high", extras=extras)
+        asserts.assert_true(
+            stats['num_no_results'] == 0,
+            "Missing (timed-out) results",
+            extras=extras)
+        asserts.assert_false(
+            stats['any_lci_mismatch'], "LCI mismatch", extras=extras)
+        asserts.assert_false(
+            stats['any_lcr_mismatch'], "LCR mismatch", extras=extras)
+        asserts.assert_false(
+            stats['invalid_num_attempted'],
+            "Invalid (0) number of attempts",
+            extras=stats)
+        asserts.assert_false(
+            stats['invalid_num_successful'],
+            "Invalid (0) number of successes",
+            extras=stats)
+        asserts.assert_equal(
+            stats['num_invalid_rssi'], 0, "Invalid RSSI", extras=extras)
+        asserts.assert_true(
+            stats['num_failures'] <=
+            self.rtt_max_failure_rate_two_sided_rtt_percentage *
+            stats['num_results'] / 100,
+            "Failure rate is too high",
+            extras=extras)
+        asserts.assert_true(
+            stats['num_range_out_of_margin'] <=
+            self.rtt_max_margin_exceeded_rate_two_sided_rtt_percentage *
+            stats['num_success_results'] / 100,
+            "Results exceeding error margin rate is too high",
+            extras=extras)
 
-    if stats_reverse_direction is not None:
-      asserts.assert_true(stats_reverse_direction['num_no_results'] == 0,
-                          "Missing (timed-out) results",
-                          extras=extras)
-      asserts.assert_false(stats['any_lci_mismatch'],
-                           "LCI mismatch", extras=extras)
-      asserts.assert_false(stats['any_lcr_mismatch'],
-                           "LCR mismatch", extras=extras)
-      asserts.assert_equal(stats['num_invalid_rssi'], 0, "Invalid RSSI",
-                           extras=extras)
-      asserts.assert_true(
-          stats_reverse_direction['num_failures']
-            <= self.rtt_max_failure_rate_two_sided_rtt_percentage
-                * stats['num_results'] / 100,
-          "Failure rate is too high", extras=extras)
-      asserts.assert_true(
-          stats_reverse_direction['num_range_out_of_margin']
-            <= self.rtt_max_margin_exceeded_rate_two_sided_rtt_percentage
-                * stats['num_success_results'] / 100,
-          "Results exceeding error margin rate is too high",
-          extras=extras)
+        if stats_reverse_direction is not None:
+            asserts.assert_true(
+                stats_reverse_direction['num_no_results'] == 0,
+                "Missing (timed-out) results",
+                extras=extras)
+            asserts.assert_false(
+                stats['any_lci_mismatch'], "LCI mismatch", extras=extras)
+            asserts.assert_false(
+                stats['any_lcr_mismatch'], "LCR mismatch", extras=extras)
+            asserts.assert_equal(
+                stats['num_invalid_rssi'], 0, "Invalid RSSI", extras=extras)
+            asserts.assert_true(
+                stats_reverse_direction['num_failures'] <=
+                self.rtt_max_failure_rate_two_sided_rtt_percentage *
+                stats['num_results'] / 100,
+                "Failure rate is too high",
+                extras=extras)
+            asserts.assert_true(
+                stats_reverse_direction['num_range_out_of_margin'] <=
+                self.rtt_max_margin_exceeded_rate_two_sided_rtt_percentage *
+                stats['num_success_results'] / 100,
+                "Results exceeding error margin rate is too high",
+                extras=extras)
 
-    asserts.explicit_pass("RTT Aware test done", extras=extras)
+        asserts.explicit_pass("RTT Aware test done", extras=extras)
 
-  #############################################################################
+    #############################################################################
 
-  @test_tracker_info(uuid="9e4e7ab4-2254-498c-9788-21e15ed9a370")
-  def test_rtt_oob_discovery_one_way(self):
-    """Perform RTT between 2 Wi-Fi Aware devices. Use out-of-band discovery
+    @test_tracker_info(uuid="9e4e7ab4-2254-498c-9788-21e15ed9a370")
+    def test_rtt_oob_discovery_one_way(self):
+        """Perform RTT between 2 Wi-Fi Aware devices. Use out-of-band discovery
     to communicate the MAC addresses to the peer. Test one-direction RTT only.
     """
-    rtt_results = self.run_rtt_oob_discovery_set(do_both_directions=False,
-          iter_count=self.NUM_ITER,
-          time_between_iterations=self.TIME_BETWEEN_ITERATIONS,
-          time_between_roles=self.TIME_BETWEEN_ROLES)
-    self.verify_results(rtt_results)
+        rtt_results = self.run_rtt_oob_discovery_set(
+            do_both_directions=False,
+            iter_count=self.NUM_ITER,
+            time_between_iterations=self.TIME_BETWEEN_ITERATIONS,
+            time_between_roles=self.TIME_BETWEEN_ROLES)
+        self.verify_results(rtt_results)
 
-  @test_tracker_info(uuid="22edba77-eeb2-43ee-875a-84437550ad84")
-  def test_rtt_oob_discovery_both_ways(self):
-    """Perform RTT between 2 Wi-Fi Aware devices. Use out-of-band discovery
+    @test_tracker_info(uuid="22edba77-eeb2-43ee-875a-84437550ad84")
+    def test_rtt_oob_discovery_both_ways(self):
+        """Perform RTT between 2 Wi-Fi Aware devices. Use out-of-band discovery
     to communicate the MAC addresses to the peer. Test RTT both-ways:
     switching rapidly between Initiator and Responder.
     """
-    rtt_results1, rtt_results2 = self.run_rtt_oob_discovery_set(
-        do_both_directions=True, iter_count=self.NUM_ITER,
-        time_between_iterations=self.TIME_BETWEEN_ITERATIONS,
-        time_between_roles=self.TIME_BETWEEN_ROLES)
-    self.verify_results(rtt_results1, rtt_results2)
+        rtt_results1, rtt_results2 = self.run_rtt_oob_discovery_set(
+            do_both_directions=True,
+            iter_count=self.NUM_ITER,
+            time_between_iterations=self.TIME_BETWEEN_ITERATIONS,
+            time_between_roles=self.TIME_BETWEEN_ROLES)
+        self.verify_results(rtt_results1, rtt_results2)
 
-  @test_tracker_info(uuid="18cef4be-95b4-4f7d-a140-5165874e7d1c")
-  def test_rtt_ib_discovery_one_way(self):
-    """Perform RTT between 2 Wi-Fi Aware devices. Use in-band (Aware) discovery
+    @test_tracker_info(uuid="18cef4be-95b4-4f7d-a140-5165874e7d1c")
+    def test_rtt_ib_discovery_one_way(self):
+        """Perform RTT between 2 Wi-Fi Aware devices. Use in-band (Aware) discovery
     to communicate the MAC addresses to the peer. Test one-direction RTT only.
     """
-    rtt_results = self.run_rtt_ib_discovery_set(do_both_directions=False,
-           iter_count=self.NUM_ITER,
-           time_between_iterations=self.TIME_BETWEEN_ITERATIONS,
-           time_between_roles=self.TIME_BETWEEN_ROLES)
-    self.verify_results(rtt_results)
+        rtt_results = self.run_rtt_ib_discovery_set(
+            do_both_directions=False,
+            iter_count=self.NUM_ITER,
+            time_between_iterations=self.TIME_BETWEEN_ITERATIONS,
+            time_between_roles=self.TIME_BETWEEN_ROLES)
+        self.verify_results(rtt_results)
 
-  @test_tracker_info(uuid="c67c8e70-c417-42d9-9bca-af3a89f1ddd9")
-  def test_rtt_ib_discovery_both_ways(self):
-    """Perform RTT between 2 Wi-Fi Aware devices. Use in-band (Aware) discovery
+    @test_tracker_info(uuid="c67c8e70-c417-42d9-9bca-af3a89f1ddd9")
+    def test_rtt_ib_discovery_both_ways(self):
+        """Perform RTT between 2 Wi-Fi Aware devices. Use in-band (Aware) discovery
     to communicate the MAC addresses to the peer. Test RTT both-ways:
     switching rapidly between Initiator and Responder.
     """
-    rtt_results1, rtt_results2 = self.run_rtt_ib_discovery_set(
-        do_both_directions=True, iter_count=self.NUM_ITER,
-        time_between_iterations=self.TIME_BETWEEN_ITERATIONS,
-        time_between_roles=self.TIME_BETWEEN_ROLES)
-    self.verify_results(rtt_results1, rtt_results2)
+        rtt_results1, rtt_results2 = self.run_rtt_ib_discovery_set(
+            do_both_directions=True,
+            iter_count=self.NUM_ITER,
+            time_between_iterations=self.TIME_BETWEEN_ITERATIONS,
+            time_between_roles=self.TIME_BETWEEN_ROLES)
+        self.verify_results(rtt_results1, rtt_results2)
 
-  @test_tracker_info(uuid="54f9693d-45e5-4979-adbb-1b875d217c0c")
-  def test_rtt_without_initiator_aware(self):
-    """Try to perform RTT operation when there is no local Aware session (on the
+    @test_tracker_info(uuid="54f9693d-45e5-4979-adbb-1b875d217c0c")
+    def test_rtt_without_initiator_aware(self):
+        """Try to perform RTT operation when there is no local Aware session (on the
     Initiator). The Responder is configured normally: Aware on and a Publisher
     with Ranging enable. Should FAIL."""
-    init_dut = self.android_devices[0]
-    resp_dut = self.android_devices[1]
+        init_dut = self.android_devices[0]
+        resp_dut = self.android_devices[1]
 
-    # Enable a Responder and start a Publisher
-    resp_id = resp_dut.droid.wifiAwareAttach(True)
-    autils.wait_for_event(resp_dut, aconsts.EVENT_CB_ON_ATTACHED)
-    resp_ident_event = autils.wait_for_event(resp_dut,
-                                         aconsts.EVENT_CB_ON_IDENTITY_CHANGED)
-    resp_mac = resp_ident_event['data']['mac']
+        # Enable a Responder and start a Publisher
+        resp_id = resp_dut.droid.wifiAwareAttach(True)
+        autils.wait_for_event(resp_dut, aconsts.EVENT_CB_ON_ATTACHED)
+        resp_ident_event = autils.wait_for_event(
+            resp_dut, aconsts.EVENT_CB_ON_IDENTITY_CHANGED)
+        resp_mac = resp_ident_event['data']['mac']
 
-    resp_config = autils.add_ranging_to_pub(
-        autils.create_discovery_config(self.SERVICE_NAME,
-                                       aconsts.PUBLISH_TYPE_UNSOLICITED),
-        enable_ranging=True)
-    resp_dut.droid.wifiAwarePublish(resp_id, resp_config)
-    autils.wait_for_event(resp_dut, aconsts.SESSION_CB_ON_PUBLISH_STARTED)
+        resp_config = autils.add_ranging_to_pub(
+            autils.create_discovery_config(self.SERVICE_NAME,
+                                           aconsts.PUBLISH_TYPE_UNSOLICITED),
+            enable_ranging=True)
+        resp_dut.droid.wifiAwarePublish(resp_id, resp_config)
+        autils.wait_for_event(resp_dut, aconsts.SESSION_CB_ON_PUBLISH_STARTED)
 
-    # Initiate an RTT to Responder (no Aware started on Initiator!)
-    results = []
-    num_no_responses = 0
-    num_successes = 0
-    for i in range(self.NUM_ITER):
-      result = self.run_rtt_discovery(init_dut, resp_mac=resp_mac)
-      self.log.debug("result: %s", result)
-      results.append(result)
-      if result is None:
-        num_no_responses = num_no_responses + 1
-      elif (result[rconsts.EVENT_CB_RANGING_KEY_STATUS]
-            == rconsts.EVENT_CB_RANGING_STATUS_SUCCESS):
-        num_successes = num_successes + 1
+        # Initiate an RTT to Responder (no Aware started on Initiator!)
+        results = []
+        num_no_responses = 0
+        num_successes = 0
+        for i in range(self.NUM_ITER):
+            result = self.run_rtt_discovery(init_dut, resp_mac=resp_mac)
+            self.log.debug("result: %s", result)
+            results.append(result)
+            if result is None:
+                num_no_responses = num_no_responses + 1
+            elif (result[rconsts.EVENT_CB_RANGING_KEY_STATUS] ==
+                  rconsts.EVENT_CB_RANGING_STATUS_SUCCESS):
+                num_successes = num_successes + 1
 
-    asserts.assert_equal(num_no_responses, 0, "No RTT response?",
-                         extras={"data":results})
-    asserts.assert_equal(num_successes, 0, "Aware RTT w/o Aware should FAIL!",
-                         extras={"data":results})
-    asserts.explicit_pass("RTT Aware test done", extras={"data":results})
+        asserts.assert_equal(
+            num_no_responses, 0, "No RTT response?", extras={"data": results})
+        asserts.assert_equal(
+            num_successes,
+            0,
+            "Aware RTT w/o Aware should FAIL!",
+            extras={"data": results})
+        asserts.explicit_pass("RTT Aware test done", extras={"data": results})
 
-  @test_tracker_info(uuid="87a69053-8261-4928-8ec1-c93aac7f3a8d")
-  def test_rtt_without_responder_aware(self):
-    """Try to perform RTT operation when there is no peer Aware session (on the
+    @test_tracker_info(uuid="87a69053-8261-4928-8ec1-c93aac7f3a8d")
+    def test_rtt_without_responder_aware(self):
+        """Try to perform RTT operation when there is no peer Aware session (on the
     Responder). Should FAIL."""
-    init_dut = self.android_devices[0]
-    resp_dut = self.android_devices[1]
+        init_dut = self.android_devices[0]
+        resp_dut = self.android_devices[1]
 
-    # Enable a Responder and start a Publisher
-    resp_id = resp_dut.droid.wifiAwareAttach(True)
-    autils.wait_for_event(resp_dut, aconsts.EVENT_CB_ON_ATTACHED)
-    resp_ident_event = autils.wait_for_event(resp_dut,
-                                             aconsts.EVENT_CB_ON_IDENTITY_CHANGED)
-    resp_mac = resp_ident_event['data']['mac']
+        # Enable a Responder and start a Publisher
+        resp_id = resp_dut.droid.wifiAwareAttach(True)
+        autils.wait_for_event(resp_dut, aconsts.EVENT_CB_ON_ATTACHED)
+        resp_ident_event = autils.wait_for_event(
+            resp_dut, aconsts.EVENT_CB_ON_IDENTITY_CHANGED)
+        resp_mac = resp_ident_event['data']['mac']
 
-    resp_config = autils.add_ranging_to_pub(
-        autils.create_discovery_config(self.SERVICE_NAME,
-                                       aconsts.PUBLISH_TYPE_UNSOLICITED),
-        enable_ranging=True)
-    resp_dut.droid.wifiAwarePublish(resp_id, resp_config)
-    autils.wait_for_event(resp_dut, aconsts.SESSION_CB_ON_PUBLISH_STARTED)
+        resp_config = autils.add_ranging_to_pub(
+            autils.create_discovery_config(self.SERVICE_NAME,
+                                           aconsts.PUBLISH_TYPE_UNSOLICITED),
+            enable_ranging=True)
+        resp_dut.droid.wifiAwarePublish(resp_id, resp_config)
+        autils.wait_for_event(resp_dut, aconsts.SESSION_CB_ON_PUBLISH_STARTED)
 
-    # Disable Responder
-    resp_dut.droid.wifiAwareDestroy(resp_id)
+        # Disable Responder
+        resp_dut.droid.wifiAwareDestroy(resp_id)
 
-    # Enable the Initiator
-    init_id = init_dut.droid.wifiAwareAttach()
-    autils.wait_for_event(init_dut, aconsts.EVENT_CB_ON_ATTACHED)
+        # Enable the Initiator
+        init_id = init_dut.droid.wifiAwareAttach()
+        autils.wait_for_event(init_dut, aconsts.EVENT_CB_ON_ATTACHED)
 
-    # Initiate an RTT to Responder (no Aware started on Initiator!)
-    results = []
-    num_no_responses = 0
-    num_successes = 0
-    for i in range(self.NUM_ITER):
-      result = self.run_rtt_discovery(init_dut, resp_mac=resp_mac)
-      self.log.debug("result: %s", result)
-      results.append(result)
-      if result is None:
-        num_no_responses = num_no_responses + 1
-      elif (result[rconsts.EVENT_CB_RANGING_KEY_STATUS]
-            == rconsts.EVENT_CB_RANGING_STATUS_SUCCESS):
-        num_successes = num_successes + 1
+        # Initiate an RTT to Responder (no Aware started on Initiator!)
+        results = []
+        num_no_responses = 0
+        num_successes = 0
+        for i in range(self.NUM_ITER):
+            result = self.run_rtt_discovery(init_dut, resp_mac=resp_mac)
+            self.log.debug("result: %s", result)
+            results.append(result)
+            if result is None:
+                num_no_responses = num_no_responses + 1
+            elif (result[rconsts.EVENT_CB_RANGING_KEY_STATUS] ==
+                  rconsts.EVENT_CB_RANGING_STATUS_SUCCESS):
+                num_successes = num_successes + 1
 
-    asserts.assert_equal(num_no_responses, 0, "No RTT response?",
-                         extras={"data":results})
-    asserts.assert_equal(num_successes, 0, "Aware RTT w/o Aware should FAIL!",
-                         extras={"data":results})
-    asserts.explicit_pass("RTT Aware test done", extras={"data":results})
+        asserts.assert_equal(
+            num_no_responses, 0, "No RTT response?", extras={"data": results})
+        asserts.assert_equal(
+            num_successes,
+            0,
+            "Aware RTT w/o Aware should FAIL!",
+            extras={"data": results})
+        asserts.explicit_pass("RTT Aware test done", extras={"data": results})
diff --git a/acts/tests/google/wifi/rtt/functional/RangeSoftApTest.py b/acts/tests/google/wifi/rtt/functional/RangeSoftApTest.py
index f0c4f4c..0478be8 100644
--- a/acts/tests/google/wifi/rtt/functional/RangeSoftApTest.py
+++ b/acts/tests/google/wifi/rtt/functional/RangeSoftApTest.py
@@ -24,72 +24,83 @@
 
 
 class RangeSoftApTest(RttBaseTest):
-  """Test class for RTT ranging to an Android Soft AP."""
+    """Test class for RTT ranging to an Android Soft AP."""
 
-  # Soft AP SSID
-  SOFT_AP_SSID = "RTT_TEST_SSID"
+    # Soft AP SSID
+    SOFT_AP_SSID = "RTT_TEST_SSID"
 
-  # Soft AP Password (irrelevant)
-  SOFT_AP_PASSWORD = "ABCDEFGH"
+    # Soft AP Password (irrelevant)
+    SOFT_AP_PASSWORD = "ABCDEFGH"
 
-  # Number of RTT iterations
-  NUM_ITER = 10
+    # Number of RTT iterations
+    NUM_ITER = 10
 
-  def __init__(self, controllers):
-    RttBaseTest.__init__(self, controllers)
+    def __init__(self, controllers):
+        RttBaseTest.__init__(self, controllers)
 
-  #########################################################################
+    #########################################################################
 
-  @test_tracker_info(uuid="578f0725-31e3-4e60-ad62-0212d93cf5b8")
-  def test_rtt_to_soft_ap(self):
-    """Set up a Soft AP on one device and try performing an RTT ranging to it
+    @test_tracker_info(uuid="578f0725-31e3-4e60-ad62-0212d93cf5b8")
+    def test_rtt_to_soft_ap(self):
+        """Set up a Soft AP on one device and try performing an RTT ranging to it
     from another device. The attempt must fail - RTT on Soft AP must be
     disabled."""
-    sap = self.android_devices[0]
-    sap.pretty_name = "SoftAP"
-    client = self.android_devices[1]
-    client.pretty_name = "Client"
+        sap = self.android_devices[0]
+        sap.pretty_name = "SoftAP"
+        client = self.android_devices[1]
+        client.pretty_name = "Client"
 
-    # start Soft AP
-    wutils.start_wifi_tethering(sap, self.SOFT_AP_SSID, self.SOFT_AP_PASSWORD,
-                                band=WIFI_CONFIG_APBAND_5G, hidden=False)
+        # start Soft AP
+        wutils.start_wifi_tethering(
+            sap,
+            self.SOFT_AP_SSID,
+            self.SOFT_AP_PASSWORD,
+            band=WIFI_CONFIG_APBAND_5G,
+            hidden=False)
 
-    try:
-      # start scanning on the client
-      wutils.start_wifi_connection_scan_and_ensure_network_found(client,
-                                                             self.SOFT_AP_SSID)
-      scans = client.droid.wifiGetScanResults()
-      scanned_softap = None
-      for scanned_ap in scans:
-        if scanned_ap[wutils.WifiEnums.SSID_KEY] == self.SOFT_AP_SSID:
-          scanned_softap = scanned_ap
-          break
+        try:
+            # start scanning on the client
+            wutils.start_wifi_connection_scan_and_ensure_network_found(
+                client, self.SOFT_AP_SSID)
+            scans = client.droid.wifiGetScanResults()
+            scanned_softap = None
+            for scanned_ap in scans:
+                if scanned_ap[wutils.WifiEnums.SSID_KEY] == self.SOFT_AP_SSID:
+                    scanned_softap = scanned_ap
+                    break
 
-      asserts.assert_false(scanned_softap == None, "Soft AP not found in scan!",
-                           extras=scans)
+            asserts.assert_false(
+                scanned_softap == None,
+                "Soft AP not found in scan!",
+                extras=scans)
 
-      # validate that Soft AP does not advertise 802.11mc support
-      asserts.assert_false(
-        rconsts.SCAN_RESULT_KEY_RTT_RESPONDER in scanned_softap and
-        scanned_softap[rconsts.SCAN_RESULT_KEY_RTT_RESPONDER],
-        "Soft AP advertises itself as supporting 802.11mc!",
-        extras=scanned_softap)
+            # validate that Soft AP does not advertise 802.11mc support
+            asserts.assert_false(
+                rconsts.SCAN_RESULT_KEY_RTT_RESPONDER in scanned_softap
+                and scanned_softap[rconsts.SCAN_RESULT_KEY_RTT_RESPONDER],
+                "Soft AP advertises itself as supporting 802.11mc!",
+                extras=scanned_softap)
 
-      # falsify the SoftAP's support for IEEE 802.11 so we try a 2-sided RTT
-      scanned_softap[rconsts.SCAN_RESULT_KEY_RTT_RESPONDER] = True # falsify
+            # falsify the SoftAP's support for IEEE 802.11 so we try a 2-sided RTT
+            scanned_softap[
+                rconsts.SCAN_RESULT_KEY_RTT_RESPONDER] = True  # falsify
 
-      # actually try ranging to the Soft AP
-      events = rutils.run_ranging(client, [scanned_softap], self.NUM_ITER, 0)
-      stats = rutils.analyze_results(events, self.rtt_reference_distance_mm,
-                                     self.rtt_reference_distance_margin_mm,
-                                     self.rtt_min_expected_rssi_dbm,
-                                     self.lci_reference, self.lcr_reference)
+            # actually try ranging to the Soft AP
+            events = rutils.run_ranging(client, [scanned_softap],
+                                        self.NUM_ITER, 0)
+            stats = rutils.analyze_results(
+                events, self.rtt_reference_distance_mm,
+                self.rtt_reference_distance_margin_mm,
+                self.rtt_min_expected_rssi_dbm, self.lci_reference,
+                self.lcr_reference)
 
-      asserts.assert_equal(
-          stats[scanned_ap[wutils.WifiEnums.BSSID_KEY]]['num_failures'],
-          self.NUM_ITER, "Some RTT operations to Soft AP succeed!?",
-          extras=stats)
+            asserts.assert_equal(
+                stats[scanned_ap[wutils.WifiEnums.BSSID_KEY]]['num_failures'],
+                self.NUM_ITER,
+                "Some RTT operations to Soft AP succeed!?",
+                extras=stats)
 
-      asserts.explicit_pass("SoftAP + RTT validation done", extras=events)
-    finally:
-      wutils.stop_wifi_tethering(sap)
+            asserts.explicit_pass(
+                "SoftAP + RTT validation done", extras=events)
+        finally:
+            wutils.stop_wifi_tethering(sap)
diff --git a/acts/tests/google/wifi/rtt/functional/RttDisableTest.py b/acts/tests/google/wifi/rtt/functional/RttDisableTest.py
index 1816cd5..be6f36f 100644
--- a/acts/tests/google/wifi/rtt/functional/RttDisableTest.py
+++ b/acts/tests/google/wifi/rtt/functional/RttDisableTest.py
@@ -24,86 +24,91 @@
 
 
 class RttDisableTest(WifiBaseTest, RttBaseTest):
-  """Test class for RTT ranging enable/disable flows."""
+    """Test class for RTT ranging enable/disable flows."""
 
-  MODE_DISABLE_WIFI = 0
-  MODE_ENABLE_DOZE = 1
-  MODE_DISABLE_LOCATIONING = 2
+    MODE_DISABLE_WIFI = 0
+    MODE_ENABLE_DOZE = 1
+    MODE_DISABLE_LOCATIONING = 2
 
-  def __init__(self, controllers):
-    WifiBaseTest.__init__(self, controllers)
-    RttBaseTest.__init__(self, controllers)
-    if "AccessPoint" in self.user_params:
-      self.legacy_configure_ap_and_start()
+    def __init__(self, controllers):
+        WifiBaseTest.__init__(self, controllers)
+        RttBaseTest.__init__(self, controllers)
+        if "AccessPoint" in self.user_params:
+            self.legacy_configure_ap_and_start()
 
-  def run_disable_rtt(self, disable_mode):
-    """Validate the RTT disabled flows: whether by disabling Wi-Fi or entering
+    def run_disable_rtt(self, disable_mode):
+        """Validate the RTT disabled flows: whether by disabling Wi-Fi or entering
     doze mode.
 
     Args:
       disable_mode: The particular mechanism in which RTT is disabled. One of
                     the MODE_* constants.
     """
-    dut = self.android_devices[0]
+        dut = self.android_devices[0]
 
-    # validate start-up conditions
-    asserts.assert_true(dut.droid.wifiIsRttAvailable(), "RTT is not available")
+        # validate start-up conditions
+        asserts.assert_true(dut.droid.wifiIsRttAvailable(),
+                            "RTT is not available")
 
-    # scan to get some APs to be used later
-    all_aps = rutils.select_best_scan_results(rutils.scan_networks(dut),
-                                              select_count=1)
-    asserts.assert_true(len(all_aps) > 0, "Need at least one visible AP!")
+        # scan to get some APs to be used later
+        all_aps = rutils.select_best_scan_results(
+            rutils.scan_networks(dut), select_count=1)
+        asserts.assert_true(len(all_aps) > 0, "Need at least one visible AP!")
 
-    # disable RTT and validate broadcast & API
-    if disable_mode == self.MODE_DISABLE_WIFI:
-      # disabling Wi-Fi is not sufficient: since scan mode (and hence RTT) will
-      # remain enabled - we need to disable the Wi-Fi chip aka Airplane Mode
-      asserts.assert_true(utils.force_airplane_mode(dut, True),
-                          "Can not turn on airplane mode on: %s" % dut.serial)
-    elif disable_mode == self.MODE_ENABLE_DOZE:
-      asserts.assert_true(utils.enable_doze(dut), "Can't enable doze")
-    elif disable_mode == self.MODE_DISABLE_LOCATIONING:
-      utils.set_location_service(dut, False)
+        # disable RTT and validate broadcast & API
+        if disable_mode == self.MODE_DISABLE_WIFI:
+            # disabling Wi-Fi is not sufficient: since scan mode (and hence RTT) will
+            # remain enabled - we need to disable the Wi-Fi chip aka Airplane Mode
+            asserts.assert_true(
+                utils.force_airplane_mode(dut, True),
+                "Can not turn on airplane mode on: %s" % dut.serial)
+        elif disable_mode == self.MODE_ENABLE_DOZE:
+            asserts.assert_true(utils.enable_doze(dut), "Can't enable doze")
+        elif disable_mode == self.MODE_DISABLE_LOCATIONING:
+            utils.set_location_service(dut, False)
 
-    rutils.wait_for_event(dut, rconsts.BROADCAST_WIFI_RTT_NOT_AVAILABLE)
-    asserts.assert_false(dut.droid.wifiIsRttAvailable(), "RTT is available")
+        rutils.wait_for_event(dut, rconsts.BROADCAST_WIFI_RTT_NOT_AVAILABLE)
+        asserts.assert_false(dut.droid.wifiIsRttAvailable(),
+                             "RTT is available")
 
-    # request a range and validate error
-    id = dut.droid.wifiRttStartRangingToAccessPoints(all_aps[0:1])
-    event = rutils.wait_for_event(dut, rutils.decorate_event(
-        rconsts.EVENT_CB_RANGING_ON_FAIL, id))
-    asserts.assert_equal(event["data"][rconsts.EVENT_CB_RANGING_KEY_STATUS],
-                         rconsts.RANGING_FAIL_CODE_RTT_NOT_AVAILABLE,
-                         "Invalid error code")
+        # request a range and validate error
+        id = dut.droid.wifiRttStartRangingToAccessPoints(all_aps[0:1])
+        event = rutils.wait_for_event(
+            dut, rutils.decorate_event(rconsts.EVENT_CB_RANGING_ON_FAIL, id))
+        asserts.assert_equal(
+            event["data"][rconsts.EVENT_CB_RANGING_KEY_STATUS],
+            rconsts.RANGING_FAIL_CODE_RTT_NOT_AVAILABLE, "Invalid error code")
 
-    # enable RTT and validate broadcast & API
-    if disable_mode == self.MODE_DISABLE_WIFI:
-      asserts.assert_true(utils.force_airplane_mode(dut, False),
-                          "Can not turn off airplane mode on: %s" % dut.serial)
-    elif disable_mode == self.MODE_ENABLE_DOZE:
-      asserts.assert_true(utils.disable_doze(dut), "Can't disable doze")
-    elif disable_mode == self.MODE_DISABLE_LOCATIONING:
-      utils.set_location_service(dut, True)
+        # enable RTT and validate broadcast & API
+        if disable_mode == self.MODE_DISABLE_WIFI:
+            asserts.assert_true(
+                utils.force_airplane_mode(dut, False),
+                "Can not turn off airplane mode on: %s" % dut.serial)
+        elif disable_mode == self.MODE_ENABLE_DOZE:
+            asserts.assert_true(utils.disable_doze(dut), "Can't disable doze")
+        elif disable_mode == self.MODE_DISABLE_LOCATIONING:
+            utils.set_location_service(dut, True)
 
-    rutils.wait_for_event(dut, rconsts.BROADCAST_WIFI_RTT_AVAILABLE)
-    asserts.assert_true(dut.droid.wifiIsRttAvailable(), "RTT is not available")
+        rutils.wait_for_event(dut, rconsts.BROADCAST_WIFI_RTT_AVAILABLE)
+        asserts.assert_true(dut.droid.wifiIsRttAvailable(),
+                            "RTT is not available")
 
-  ############################################################################
+    ############################################################################
 
-  @test_tracker_info(uuid="498c49ab-a188-4612-998d-c47b35ff285e")
-  def test_disable_wifi(self):
-    """Validate that getting expected broadcast when Wi-Fi is disabled and that
+    @test_tracker_info(uuid="498c49ab-a188-4612-998d-c47b35ff285e")
+    def test_disable_wifi(self):
+        """Validate that getting expected broadcast when Wi-Fi is disabled and that
     any range requests are rejected."""
-    self.run_disable_rtt(self.MODE_DISABLE_WIFI)
+        self.run_disable_rtt(self.MODE_DISABLE_WIFI)
 
-  @test_tracker_info(uuid="f71f731f-4aaf-402b-8595-db94b625b544")
-  def test_enable_doze(self):
-    """Validate that getting expected broadcast when RTT is disabled due to doze
+    @test_tracker_info(uuid="f71f731f-4aaf-402b-8595-db94b625b544")
+    def test_enable_doze(self):
+        """Validate that getting expected broadcast when RTT is disabled due to doze
     mode and that any range requests are rejected."""
-    self.run_disable_rtt(self.MODE_ENABLE_DOZE)
+        self.run_disable_rtt(self.MODE_ENABLE_DOZE)
 
-  @test_tracker_info(uuid="6a1c83a8-9eaf-49db-b547-5131cba0eafe")
-  def test_disable_location(self):
-    """Validate that getting expected broadcast when locationing is disabled and
+    @test_tracker_info(uuid="6a1c83a8-9eaf-49db-b547-5131cba0eafe")
+    def test_disable_location(self):
+        """Validate that getting expected broadcast when locationing is disabled and
     that any range requests are rejected."""
-    self.run_disable_rtt(self.MODE_DISABLE_LOCATIONING)
+        self.run_disable_rtt(self.MODE_DISABLE_LOCATIONING)
diff --git a/acts/tests/google/wifi/rtt/functional/RttRequestManagementTest.py b/acts/tests/google/wifi/rtt/functional/RttRequestManagementTest.py
index 82c1058..48fdf5f 100644
--- a/acts/tests/google/wifi/rtt/functional/RttRequestManagementTest.py
+++ b/acts/tests/google/wifi/rtt/functional/RttRequestManagementTest.py
@@ -25,18 +25,18 @@
 
 
 class RttRequestManagementTest(RttBaseTest):
-  """Test class for RTT request management flows."""
+    """Test class for RTT request management flows."""
 
-  SPAMMING_LIMIT = 20
+    SPAMMING_LIMIT = 20
 
-  def __init__(self, controllers):
-    RttBaseTest.__init__(self, controllers)
+    def __init__(self, controllers):
+        RttBaseTest.__init__(self, controllers)
 
-  #############################################################################
+    #############################################################################
 
-  @test_tracker_info(uuid="29ff4a02-2952-47df-bf56-64f30c963093")
-  def test_cancel_ranging(self):
-    """Request a 'large' number of range operations with various UIDs (using the
+    @test_tracker_info(uuid="29ff4a02-2952-47df-bf56-64f30c963093")
+    def test_cancel_ranging(self):
+        """Request a 'large' number of range operations with various UIDs (using the
     work-source API), then cancel some of them.
 
     We can't guarantee a reaction time - it is possible that a cancelled test
@@ -51,90 +51,99 @@
 
     Expect to receive only 51 results.
     """
-    dut = self.android_devices[0]
-    max_peers = dut.droid.wifiRttMaxPeersInRequest()
+        dut = self.android_devices[0]
+        max_peers = dut.droid.wifiRttMaxPeersInRequest()
 
-    all_uids = [1000, 20, 30] # 1000 = System Server (makes requests foreground)
-    some_uids = [20, 30]
+        all_uids = [1000, 20,
+                    30]  # 1000 = System Server (makes requests foreground)
+        some_uids = [20, 30]
 
-    aps = rutils.select_best_scan_results(
-      rutils.scan_with_rtt_support_constraint(dut, True, repeat=10),
-      select_count=1)
-    dut.log.info("RTT Supporting APs=%s", aps)
+        aps = rutils.select_best_scan_results(
+            rutils.scan_with_rtt_support_constraint(dut, True, repeat=10),
+            select_count=1)
+        dut.log.info("RTT Supporting APs=%s", aps)
 
-    asserts.assert_true(
-        len(aps) > 0,
-        "Need at least one AP which supports 802.11mc!")
-    if len(aps) > max_peers:
-      aps = aps[0:max_peers]
+        asserts.assert_true(
+            len(aps) > 0, "Need at least one AP which supports 802.11mc!")
+        if len(aps) > max_peers:
+            aps = aps[0:max_peers]
 
-    group1_ids = []
-    group2_ids = []
-    group3_ids = []
+        group1_ids = []
+        group2_ids = []
+        group3_ids = []
 
-    # step 1: request <spam_limit> ranging operations on [uid1, uid2, uid3]
-    for i in range(self.SPAMMING_LIMIT):
-      group1_ids.append(
-        dut.droid.wifiRttStartRangingToAccessPoints(aps, all_uids))
+        # step 1: request <spam_limit> ranging operations on [uid1, uid2, uid3]
+        for i in range(self.SPAMMING_LIMIT):
+            group1_ids.append(
+                dut.droid.wifiRttStartRangingToAccessPoints(aps, all_uids))
 
-    # step 2: request 2 ranging operations on [uid2, uid3]
-    for i in range(2):
-      group2_ids.append(
-        dut.droid.wifiRttStartRangingToAccessPoints(aps, some_uids))
+        # step 2: request 2 ranging operations on [uid2, uid3]
+        for i in range(2):
+            group2_ids.append(
+                dut.droid.wifiRttStartRangingToAccessPoints(aps, some_uids))
 
-    # step 3: request 1 ranging operation on [uid1, uid2, uid3]
-    for i in range(1):
-      group3_ids.append(
-          dut.droid.wifiRttStartRangingToAccessPoints(aps, all_uids))
+        # step 3: request 1 ranging operation on [uid1, uid2, uid3]
+        for i in range(1):
+            group3_ids.append(
+                dut.droid.wifiRttStartRangingToAccessPoints(aps, all_uids))
 
-    # step 4: cancel ranging requests on [uid2, uid3]
-    dut.droid.wifiRttCancelRanging(some_uids)
+        # step 4: cancel ranging requests on [uid2, uid3]
+        dut.droid.wifiRttCancelRanging(some_uids)
 
-    # collect results
-    for i in range(len(group1_ids)):
-      rutils.wait_for_event(dut, rutils.decorate_event(
-        rconsts.EVENT_CB_RANGING_ON_RESULT, group1_ids[i]))
-    time.sleep(rutils.EVENT_TIMEOUT) # optimize time-outs below to single one
-    for i in range(len(group2_ids)):
-      rutils.fail_on_event(dut, rutils.decorate_event(
-          rconsts.EVENT_CB_RANGING_ON_RESULT, group2_ids[i]), 0)
-    for i in range(len(group3_ids)):
-      rutils.wait_for_event(dut, rutils.decorate_event(
-          rconsts.EVENT_CB_RANGING_ON_RESULT, group3_ids[i]))
+        # collect results
+        for i in range(len(group1_ids)):
+            rutils.wait_for_event(
+                dut,
+                rutils.decorate_event(rconsts.EVENT_CB_RANGING_ON_RESULT,
+                                      group1_ids[i]))
+        time.sleep(
+            rutils.EVENT_TIMEOUT)  # optimize time-outs below to single one
+        for i in range(len(group2_ids)):
+            rutils.fail_on_event(
+                dut,
+                rutils.decorate_event(rconsts.EVENT_CB_RANGING_ON_RESULT,
+                                      group2_ids[i]), 0)
+        for i in range(len(group3_ids)):
+            rutils.wait_for_event(
+                dut,
+                rutils.decorate_event(rconsts.EVENT_CB_RANGING_ON_RESULT,
+                                      group3_ids[i]))
 
-  @test_tracker_info(uuid="48297480-c026-4780-8c13-476e7bea440c")
-  def test_throttling(self):
-    """Request sequential range operations using a bogus UID (which will
+    @test_tracker_info(uuid="48297480-c026-4780-8c13-476e7bea440c")
+    def test_throttling(self):
+        """Request sequential range operations using a bogus UID (which will
     translate as a throttled process) and similarly using the ACTS/sl4a as
     the source (a foreground/unthrottled process)."""
-    dut = self.android_devices[0]
-    max_peers = dut.droid.wifiRttMaxPeersInRequest()
+        dut = self.android_devices[0]
+        max_peers = dut.droid.wifiRttMaxPeersInRequest()
 
-    # Need to use a random number since the system keeps states and so the
-    # background uid will be throttled on the next run of this script
-    fake_uid = [random.randint(10, 9999)]
+        # Need to use a random number since the system keeps states and so the
+        # background uid will be throttled on the next run of this script
+        fake_uid = [random.randint(10, 9999)]
 
-    aps = rutils.select_best_scan_results(
-      rutils.scan_with_rtt_support_constraint(dut, True, repeat=10),
-      select_count=1)
-    dut.log.info("RTT Supporting APs=%s", aps)
+        aps = rutils.select_best_scan_results(
+            rutils.scan_with_rtt_support_constraint(dut, True, repeat=10),
+            select_count=1)
+        dut.log.info("RTT Supporting APs=%s", aps)
 
-    asserts.assert_true(
-        len(aps) > 0,
-        "Need at least one AP which supports 802.11mc!")
-    if len(aps) > max_peers:
-      aps = aps[0:max_peers]
+        asserts.assert_true(
+            len(aps) > 0, "Need at least one AP which supports 802.11mc!")
+        if len(aps) > max_peers:
+            aps = aps[0:max_peers]
 
-    id1 = dut.droid.wifiRttStartRangingToAccessPoints(aps) # as ACTS/sl4a
-    id2 = dut.droid.wifiRttStartRangingToAccessPoints(aps, fake_uid)
-    id3 = dut.droid.wifiRttStartRangingToAccessPoints(aps, fake_uid)
-    id4 = dut.droid.wifiRttStartRangingToAccessPoints(aps) # as ACTS/sl4a
+        id1 = dut.droid.wifiRttStartRangingToAccessPoints(aps)  # as ACTS/sl4a
+        id2 = dut.droid.wifiRttStartRangingToAccessPoints(aps, fake_uid)
+        id3 = dut.droid.wifiRttStartRangingToAccessPoints(aps, fake_uid)
+        id4 = dut.droid.wifiRttStartRangingToAccessPoints(aps)  # as ACTS/sl4a
 
-    rutils.wait_for_event(dut, rutils.decorate_event(
-      rconsts.EVENT_CB_RANGING_ON_RESULT, id1))
-    rutils.wait_for_event(dut, rutils.decorate_event(
-        rconsts.EVENT_CB_RANGING_ON_RESULT, id2))
-    rutils.wait_for_event(dut, rutils.decorate_event(
-        rconsts.EVENT_CB_RANGING_ON_FAIL, id3))
-    rutils.wait_for_event(dut, rutils.decorate_event(
-        rconsts.EVENT_CB_RANGING_ON_RESULT, id4))
+        rutils.wait_for_event(
+            dut, rutils.decorate_event(rconsts.EVENT_CB_RANGING_ON_RESULT,
+                                       id1))
+        rutils.wait_for_event(
+            dut, rutils.decorate_event(rconsts.EVENT_CB_RANGING_ON_RESULT,
+                                       id2))
+        rutils.wait_for_event(
+            dut, rutils.decorate_event(rconsts.EVENT_CB_RANGING_ON_FAIL, id3))
+        rutils.wait_for_event(
+            dut, rutils.decorate_event(rconsts.EVENT_CB_RANGING_ON_RESULT,
+                                       id4))
diff --git a/acts/tests/google/wifi/rtt/stress/StressRangeApTest.py b/acts/tests/google/wifi/rtt/stress/StressRangeApTest.py
index 497c125..9f64982 100644
--- a/acts/tests/google/wifi/rtt/stress/StressRangeApTest.py
+++ b/acts/tests/google/wifi/rtt/stress/StressRangeApTest.py
@@ -21,59 +21,66 @@
 
 
 class StressRangeApTest(RttBaseTest):
-  """Test class for stress testing of RTT ranging to Access Points"""
+    """Test class for stress testing of RTT ranging to Access Points"""
 
-  def __init__(self, controllers):
-    BaseTestClass.__init__(self, controllers)
+    def __init__(self, controllers):
+        BaseTestClass.__init__(self, controllers)
 
-  #############################################################################
+    #############################################################################
 
-  def test_rtt_supporting_ap_only(self):
-    """Scan for APs and perform RTT only to those which support 802.11mc.
+    def test_rtt_supporting_ap_only(self):
+        """Scan for APs and perform RTT only to those which support 802.11mc.
 
     Stress test: repeat ranging to the same AP. Verify rate of success and
     stability of results.
     """
-    dut = self.android_devices[0]
-    rtt_supporting_aps = rutils.scan_with_rtt_support_constraint(dut, True,
-                                                                 repeat=10)
-    dut.log.debug("RTT Supporting APs=%s", rtt_supporting_aps)
+        dut = self.android_devices[0]
+        rtt_supporting_aps = rutils.scan_with_rtt_support_constraint(
+            dut, True, repeat=10)
+        dut.log.debug("RTT Supporting APs=%s", rtt_supporting_aps)
 
-    num_iter = self.stress_test_min_iteration_count
+        num_iter = self.stress_test_min_iteration_count
 
-    max_peers = dut.droid.wifiRttMaxPeersInRequest()
-    asserts.assert_true(
-        len(rtt_supporting_aps) > 0,
-        "Need at least one AP which supports 802.11mc!")
-    if len(rtt_supporting_aps) > max_peers:
-      rtt_supporting_aps = rtt_supporting_aps[0:max_peers]
+        max_peers = dut.droid.wifiRttMaxPeersInRequest()
+        asserts.assert_true(
+            len(rtt_supporting_aps) > 0,
+            "Need at least one AP which supports 802.11mc!")
+        if len(rtt_supporting_aps) > max_peers:
+            rtt_supporting_aps = rtt_supporting_aps[0:max_peers]
 
-    events = rutils.run_ranging(dut, rtt_supporting_aps, num_iter, 0,
-                                self.stress_test_target_run_time_sec)
-    stats = rutils.analyze_results(events, self.rtt_reference_distance_mm,
-                                   self.rtt_reference_distance_margin_mm,
-                                   self.rtt_min_expected_rssi_dbm,
-                                   self.lci_reference, self.lcr_reference,
-                                   summary_only=True)
-    dut.log.debug("Stats=%s", stats)
+        events = rutils.run_ranging(dut, rtt_supporting_aps, num_iter, 0,
+                                    self.stress_test_target_run_time_sec)
+        stats = rutils.analyze_results(
+            events,
+            self.rtt_reference_distance_mm,
+            self.rtt_reference_distance_margin_mm,
+            self.rtt_min_expected_rssi_dbm,
+            self.lci_reference,
+            self.lcr_reference,
+            summary_only=True)
+        dut.log.debug("Stats=%s", stats)
 
-    for bssid, stat in stats.items():
-      asserts.assert_true(stat['num_no_results'] == 0,
-                          "Missing (timed-out) results", extras=stats)
-      asserts.assert_false(stat['any_lci_mismatch'],
-                           "LCI mismatch", extras=stats)
-      asserts.assert_false(stat['any_lcr_mismatch'],
-                           "LCR mismatch", extras=stats)
-      asserts.assert_equal(stat['num_invalid_rssi'], 0, "Invalid RSSI",
-                          extras=stats)
-      asserts.assert_true(stat['num_failures'] <=
-                          self.rtt_max_failure_rate_two_sided_rtt_percentage
-                          * stat['num_results'] / 100,
-                          "Failure rate is too high", extras=stats)
-      asserts.assert_true(stat['num_range_out_of_margin'] <=
-                    self.rtt_max_margin_exceeded_rate_two_sided_rtt_percentage
-                    * stat['num_success_results'] / 100,
-                    "Results exceeding error margin rate is too high",
-                    extras=stats)
-    asserts.explicit_pass("RTT test done", extras=stats)
-
+        for bssid, stat in stats.items():
+            asserts.assert_true(
+                stat['num_no_results'] == 0,
+                "Missing (timed-out) results",
+                extras=stats)
+            asserts.assert_false(
+                stat['any_lci_mismatch'], "LCI mismatch", extras=stats)
+            asserts.assert_false(
+                stat['any_lcr_mismatch'], "LCR mismatch", extras=stats)
+            asserts.assert_equal(
+                stat['num_invalid_rssi'], 0, "Invalid RSSI", extras=stats)
+            asserts.assert_true(
+                stat['num_failures'] <=
+                self.rtt_max_failure_rate_two_sided_rtt_percentage *
+                stat['num_results'] / 100,
+                "Failure rate is too high",
+                extras=stats)
+            asserts.assert_true(
+                stat['num_range_out_of_margin'] <=
+                self.rtt_max_margin_exceeded_rate_two_sided_rtt_percentage *
+                stat['num_success_results'] / 100,
+                "Results exceeding error margin rate is too high",
+                extras=stats)
+        asserts.explicit_pass("RTT test done", extras=stats)
diff --git a/acts/tests/google/wifi/rtt/stress/StressRangeAwareTest.py b/acts/tests/google/wifi/rtt/stress/StressRangeAwareTest.py
index 3073898..e5a4099 100644
--- a/acts/tests/google/wifi/rtt/stress/StressRangeAwareTest.py
+++ b/acts/tests/google/wifi/rtt/stress/StressRangeAwareTest.py
@@ -27,29 +27,29 @@
 
 
 class StressRangeAwareTest(AwareBaseTest, RttBaseTest):
-  """Test class for stress testing of RTT ranging to Wi-Fi Aware peers."""
-  SERVICE_NAME = "GoogleTestServiceXY"
+    """Test class for stress testing of RTT ranging to Wi-Fi Aware peers."""
+    SERVICE_NAME = "GoogleTestServiceXY"
 
-  def __init__(self, controllers):
-    AwareBaseTest.__init__(self, controllers)
-    RttBaseTest.__init__(self, controllers)
+    def __init__(self, controllers):
+        AwareBaseTest.__init__(self, controllers)
+        RttBaseTest.__init__(self, controllers)
 
-  def setup_test(self):
-    """Manual setup here due to multiple inheritance: explicitly execute the
+    def setup_test(self):
+        """Manual setup here due to multiple inheritance: explicitly execute the
     setup method from both parents."""
-    AwareBaseTest.setup_test(self)
-    RttBaseTest.setup_test(self)
+        AwareBaseTest.setup_test(self)
+        RttBaseTest.setup_test(self)
 
-  def teardown_test(self):
-    """Manual teardown here due to multiple inheritance: explicitly execute the
+    def teardown_test(self):
+        """Manual teardown here due to multiple inheritance: explicitly execute the
     teardown method from both parents."""
-    AwareBaseTest.teardown_test(self)
-    RttBaseTest.teardown_test(self)
+        AwareBaseTest.teardown_test(self)
+        RttBaseTest.teardown_test(self)
 
-  #############################################################################
+    #############################################################################
 
-  def run_rtt_discovery(self, init_dut, resp_mac=None, resp_peer_id=None):
-    """Perform single RTT measurement, using Aware, from the Initiator DUT to
+    def run_rtt_discovery(self, init_dut, resp_mac=None, resp_peer_id=None):
+        """Perform single RTT measurement, using Aware, from the Initiator DUT to
     a Responder. The RTT Responder can be specified using its MAC address
     (obtained using out- of-band discovery) or its Peer ID (using Aware
     discovery).
@@ -59,79 +59,93 @@
       resp_mac: MAC address of the RTT Responder device
       resp_peer_id: Peer ID of the RTT Responder device
     """
-    asserts.assert_true(resp_mac is not None or resp_peer_id is not None,
-                        "One of the Responder specifications (MAC or Peer ID)"
-                        " must be provided!")
-    if resp_mac is not None:
-      id = init_dut.droid.wifiRttStartRangingToAwarePeerMac(resp_mac)
-    else:
-      id = init_dut.droid.wifiRttStartRangingToAwarePeerId(resp_peer_id)
-    try:
-      event = init_dut.ed.pop_event(rutils.decorate_event(
-          rconsts.EVENT_CB_RANGING_ON_RESULT, id), rutils.EVENT_TIMEOUT)
-      result = event["data"][rconsts.EVENT_CB_RANGING_KEY_RESULTS][0]
-      if resp_mac is not None:
-        rutils.validate_aware_mac_result(result, resp_mac, "DUT")
-      else:
-        rutils.validate_aware_peer_id_result(result, resp_peer_id, "DUT")
-      return result
-    except queue.Empty:
-      return None
+        asserts.assert_true(
+            resp_mac is not None or resp_peer_id is not None,
+            "One of the Responder specifications (MAC or Peer ID)"
+            " must be provided!")
+        if resp_mac is not None:
+            id = init_dut.droid.wifiRttStartRangingToAwarePeerMac(resp_mac)
+        else:
+            id = init_dut.droid.wifiRttStartRangingToAwarePeerId(resp_peer_id)
+        try:
+            event = init_dut.ed.pop_event(
+                rutils.decorate_event(rconsts.EVENT_CB_RANGING_ON_RESULT, id),
+                rutils.EVENT_TIMEOUT)
+            result = event["data"][rconsts.EVENT_CB_RANGING_KEY_RESULTS][0]
+            if resp_mac is not None:
+                rutils.validate_aware_mac_result(result, resp_mac, "DUT")
+            else:
+                rutils.validate_aware_peer_id_result(result, resp_peer_id,
+                                                     "DUT")
+            return result
+        except queue.Empty:
+            return None
 
-  def test_stress_rtt_ib_discovery_set(self):
-    """Perform a set of RTT measurements, using in-band (Aware) discovery, and
+    def test_stress_rtt_ib_discovery_set(self):
+        """Perform a set of RTT measurements, using in-band (Aware) discovery, and
     switching Initiator and Responder roles repeatedly.
 
     Stress test: repeat ranging operations. Verify rate of success and
     stability of results.
     """
-    p_dut = self.android_devices[0]
-    s_dut = self.android_devices[1]
+        p_dut = self.android_devices[0]
+        s_dut = self.android_devices[1]
 
-    (p_id, s_id, p_disc_id, s_disc_id,
-     peer_id_on_sub, peer_id_on_pub) = autils.create_discovery_pair(
-        p_dut,
-        s_dut,
-        p_config=autils.add_ranging_to_pub(autils.create_discovery_config(
-            self.SERVICE_NAME, aconsts.PUBLISH_TYPE_UNSOLICITED), True),
-        s_config=autils.add_ranging_to_pub(autils.create_discovery_config(
-            self.SERVICE_NAME, aconsts.SUBSCRIBE_TYPE_PASSIVE), True),
-        device_startup_offset=self.device_startup_offset,
-        msg_id=self.get_next_msg_id())
+        (p_id, s_id, p_disc_id, s_disc_id, peer_id_on_sub,
+         peer_id_on_pub) = autils.create_discovery_pair(
+             p_dut,
+             s_dut,
+             p_config=autils.add_ranging_to_pub(
+                 autils.create_discovery_config(
+                     self.SERVICE_NAME, aconsts.PUBLISH_TYPE_UNSOLICITED),
+                 True),
+             s_config=autils.add_ranging_to_pub(
+                 autils.create_discovery_config(
+                     self.SERVICE_NAME, aconsts.SUBSCRIBE_TYPE_PASSIVE), True),
+             device_startup_offset=self.device_startup_offset,
+             msg_id=self.get_next_msg_id())
 
-    results = []
-    start_clock = time.time()
-    iterations_done = 0
-    run_time = 0
-    while iterations_done < self.stress_test_min_iteration_count or (
-            self.stress_test_target_run_time_sec != 0
-        and run_time < self.stress_test_target_run_time_sec):
-      results.append(self.run_rtt_discovery(p_dut, resp_peer_id=peer_id_on_pub))
-      results.append(self.run_rtt_discovery(s_dut, resp_peer_id=peer_id_on_sub))
+        results = []
+        start_clock = time.time()
+        iterations_done = 0
+        run_time = 0
+        while iterations_done < self.stress_test_min_iteration_count or (
+                self.stress_test_target_run_time_sec != 0
+                and run_time < self.stress_test_target_run_time_sec):
+            results.append(
+                self.run_rtt_discovery(p_dut, resp_peer_id=peer_id_on_pub))
+            results.append(
+                self.run_rtt_discovery(s_dut, resp_peer_id=peer_id_on_sub))
 
-      iterations_done = iterations_done + 1
-      run_time = time.time() - start_clock
+            iterations_done = iterations_done + 1
+            run_time = time.time() - start_clock
 
-    stats = rutils.extract_stats(results, self.rtt_reference_distance_mm,
-                                 self.rtt_reference_distance_margin_mm,
-                                 self.rtt_min_expected_rssi_dbm,
-                                 summary_only=True)
-    self.log.debug("Stats: %s", stats)
-    asserts.assert_true(stats['num_no_results'] == 0,
-                        "Missing (timed-out) results", extras=stats)
-    asserts.assert_false(stats['any_lci_mismatch'],
-                         "LCI mismatch", extras=stats)
-    asserts.assert_false(stats['any_lcr_mismatch'],
-                         "LCR mismatch", extras=stats)
-    asserts.assert_equal(stats['num_invalid_rssi'], 0, "Invalid RSSI",
-                         extras=stats)
-    asserts.assert_true(
-        stats['num_failures'] <=
-        self.rtt_max_failure_rate_two_sided_rtt_percentage
-        * stats['num_results'] / 100,
-        "Failure rate is too high", extras=stats)
-    asserts.assert_true(
-        stats['num_range_out_of_margin']
-        <= self.rtt_max_margin_exceeded_rate_two_sided_rtt_percentage
-        * stats['num_success_results'] / 100,
-        "Results exceeding error margin rate is too high", extras=stats)
+        stats = rutils.extract_stats(
+            results,
+            self.rtt_reference_distance_mm,
+            self.rtt_reference_distance_margin_mm,
+            self.rtt_min_expected_rssi_dbm,
+            summary_only=True)
+        self.log.debug("Stats: %s", stats)
+        asserts.assert_true(
+            stats['num_no_results'] == 0,
+            "Missing (timed-out) results",
+            extras=stats)
+        asserts.assert_false(
+            stats['any_lci_mismatch'], "LCI mismatch", extras=stats)
+        asserts.assert_false(
+            stats['any_lcr_mismatch'], "LCR mismatch", extras=stats)
+        asserts.assert_equal(
+            stats['num_invalid_rssi'], 0, "Invalid RSSI", extras=stats)
+        asserts.assert_true(
+            stats['num_failures'] <=
+            self.rtt_max_failure_rate_two_sided_rtt_percentage *
+            stats['num_results'] / 100,
+            "Failure rate is too high",
+            extras=stats)
+        asserts.assert_true(
+            stats['num_range_out_of_margin'] <=
+            self.rtt_max_margin_exceeded_rate_two_sided_rtt_percentage *
+            stats['num_success_results'] / 100,
+            "Results exceeding error margin rate is too high",
+            extras=stats)
diff --git a/acts/tests/sample/BudsTest.py b/acts/tests/sample/BudsTest.py
new file mode 100644
index 0000000..ba1a04c
--- /dev/null
+++ b/acts/tests/sample/BudsTest.py
@@ -0,0 +1,29 @@
+#!/usr/bin/env python3.4
+#
+#   Copyright 2016 - The Android Open Source Project
+#
+#   Licensed under the Apache License, Version 2.0 (the "License");
+#   you may not use this file except in compliance with the License.
+#   You may obtain a copy of the License at
+#
+#       http://www.apache.org/licenses/LICENSE-2.0
+#
+#   Unless required by applicable law or agreed to in writing, software
+#   distributed under the License is distributed on an "AS IS" BASIS,
+#   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#   See the License for the specific language governing permissions and
+#   limitations under the License.
+
+from acts.base_test import BaseTestClass
+
+
+class BudsTest(BaseTestClass):
+    def __init__(self, controllers):
+        BaseTestClass.__init__(self, controllers)
+        self.dut = self.buds_devices[0]
+
+    def test_make_toast(self):
+        self.log.info('Battery Level: %s', self.dut.get_battery_level())
+        self.log.info('Gas Gauge Current: %s', self.dut.get_gas_gauge_current())
+        self.log.info('Gas Gauge Voltage: %s', self.dut.get_gas_gauge_voltage())
+        self.log.info('Serial Log Dump: %s', self.dut.get_serial_log())