Catch StopIteration to comply with PEP 479 am: 61e2c1086a am: d6873adde4 am: e416085a0e

Original change: https://android-review.googlesource.com/c/platform/tools/test/connectivity/+/2015655

Change-Id: I22ac785d0d7caabc1bf14ef9ef52580213ababba
diff --git a/acts/framework/acts/controllers/attenuator_lib/_tnhelper.py b/acts/framework/acts/controllers/attenuator_lib/_tnhelper.py
index 348cab1..b7e0f8f 100644
--- a/acts/framework/acts/controllers/attenuator_lib/_tnhelper.py
+++ b/acts/framework/acts/controllers/attenuator_lib/_tnhelper.py
@@ -79,7 +79,7 @@
         """
         logging.debug('Diagnosing telnet connection')
         try:
-            job_result = job.run('ping {} -c 5 -i 0.1'.format(
+            job_result = job.run('ping {} -c 5 -i 0.2'.format(
                 self._ip_address))
         except:
             logging.error("Unable to ping telnet server.")
diff --git a/acts/framework/acts/controllers/attenuator_lib/minicircuits/http.py b/acts/framework/acts/controllers/attenuator_lib/minicircuits/http.py
index acde271..95fa0ba 100644
--- a/acts/framework/acts/controllers/attenuator_lib/minicircuits/http.py
+++ b/acts/framework/acts/controllers/attenuator_lib/minicircuits/http.py
@@ -35,7 +35,6 @@
     With the exception of HTTP-specific commands, all functionality is defined
     by the AttenuatorInstrument class.
     """
-
     def __init__(self, num_atten=1):
         super(AttenuatorInstrument, self).__init__(num_atten)
         self._ip_address = None
@@ -86,7 +85,7 @@
         """
         pass
 
-    def set_atten(self, idx, value, strict=True, **_):
+    def set_atten(self, idx, value, strict=True, retry=False, **_):
         """This function sets the attenuation of an attenuator given its index
         in the instrument.
 
@@ -98,6 +97,7 @@
             strict: if True, function raises an error when given out of
                 bounds attenuation values, if false, the function sets out of
                 bounds values to 0 or max_atten.
+            retry: if True, command will be retried if possible
 
         Raises:
             InvalidDataError if the attenuator does not respond with the
@@ -112,20 +112,25 @@
                              value)
         # The actual device uses one-based index for channel numbers.
         att_req = urllib.request.urlopen(
-            'http://{}:{}/CHAN:{}:SETATT:{}'.format(
-                self._ip_address, self._port, idx + 1, value),
+            'http://{}:{}/CHAN:{}:SETATT:{}'.format(self._ip_address,
+                                                    self._port, idx + 1,
+                                                    value),
             timeout=self._timeout)
-        att_resp = att_req.read().decode('utf-8')
+        att_resp = att_req.read().decode('utf-8').strip()
         if att_resp != '1':
-            raise attenuator.InvalidDataError(
-                'Attenuator returned invalid data. Attenuator returned: {}'.
-                format(att_resp))
+            if retry:
+                self.set_atten(idx, value, strict, retry=False)
+            else:
+                raise attenuator.InvalidDataError(
+                    'Attenuator returned invalid data. Attenuator returned: {}'
+                    .format(att_resp))
 
-    def get_atten(self, idx, **_):
+    def get_atten(self, idx, retry=False, **_):
         """Returns the current attenuation of the attenuator at the given index.
 
         Args:
             idx: The index of the attenuator.
+            retry: if True, command will be retried if possible
 
         Raises:
             InvalidDataError if the attenuator does not respond with the
@@ -137,15 +142,17 @@
         if not (0 <= idx < self.num_atten):
             raise IndexError('Attenuator index out of range!', self.num_atten,
                              idx)
-        att_req = urllib.request.urlopen(
-            'http://{}:{}/CHAN:{}:ATT?'.format(self._ip_address, self.port,
-                                               idx + 1),
-            timeout=self._timeout)
+        att_req = urllib.request.urlopen('http://{}:{}/CHAN:{}:ATT?'.format(
+            self._ip_address, self.port, idx + 1),
+                                         timeout=self._timeout)
         att_resp = att_req.read().decode('utf-8')
         try:
             atten_val = float(att_resp)
         except:
-            raise attenuator.InvalidDataError(
-                'Attenuator returned invalid data. Attenuator returned: {}'.
-                format(att_resp))
+            if retry:
+                self.get_atten(idx, retry=False)
+            else:
+                raise attenuator.InvalidDataError(
+                    'Attenuator returned invalid data. Attenuator returned: {}'
+                    .format(att_resp))
         return atten_val
diff --git a/acts/framework/acts/controllers/bits.py b/acts/framework/acts/controllers/bits.py
index 5194f92..d89a9b3 100644
--- a/acts/framework/acts/controllers/bits.py
+++ b/acts/framework/acts/controllers/bits.py
@@ -1,9 +1,9 @@
 """Module managing the required definitions for using the bits power monitor"""
 
-from datetime import datetime
 import logging
 import os
 import time
+import uuid
 
 from acts import context
 from acts.controllers import power_metrics
@@ -29,6 +29,32 @@
     return [bits.config for bits in bitses]
 
 
+class BitsError(Exception):
+    pass
+
+
+class _BitsCollection(object):
+    """Object that represents a bits collection
+
+    Attributes:
+        name: The name given to the collection.
+        markers_buffer: An array of un-flushed markers, each marker is
+        represented by a bi-dimensional tuple with the format
+        (<nanoseconds_since_epoch or datetime>, <text>).
+        monsoon_output_path: A path to store monsoon-like data if possible, Bits
+        uses this path to attempt data extraction in monsoon format, if this
+        parameter is left as None such extraction is not attempted.
+    """
+
+    def __init__(self, name, monsoon_output_path=None):
+        self.monsoon_output_path = monsoon_output_path
+        self.name = name
+        self.markers_buffer = []
+
+    def add_marker(self, timestamp, marker_text):
+        self.markers_buffer.append((timestamp, marker_text))
+
+
 def _transform_name(bits_metric_name):
     """Transform bits metrics names to a more succinct version.
 
@@ -62,7 +88,7 @@
     elif 'mV' == unit:
         suffix = 'avg_voltage'
     else:
-        logging.getLogger().warning('unknown unit type for unit %s' % unit)
+        logging.warning('unknown unit type for unit %s' % unit)
         suffix = ''
 
     if 'Monsoon' == rail:
@@ -87,7 +113,7 @@
         elif 'mV' == unit:
             unit_type = 'voltage'
         else:
-            logging.getLogger().warning('unknown unit type for unit %s' % unit)
+            logging.warning('unknown unit type for unit %s' % unit)
             continue
 
         name = _transform_name(sample['name'])
@@ -111,6 +137,10 @@
 
 
 class Bits(object):
+
+    ROOT_RAIL_KEY = 'RootRail'
+    ROOT_RAIL_DEFAULT_VALUE = 'Monsoon:mA'
+
     def __init__(self, index, config):
         """Creates an instance of a bits controller.
 
@@ -139,17 +169,26 @@
                             'serial': 'serial_2'
                         }
                     ]
+                    // optional
+                    'RootRail': 'Monsoon:mA'
                 }
         """
         self.index = index
         self.config = config
         self._service = None
         self._client = None
+        self._active_collection = None
+        self._collections_counter = 0
+        self._root_rail = config.get(self.ROOT_RAIL_KEY,
+                                     self.ROOT_RAIL_DEFAULT_VALUE)
 
     def setup(self, *_, registry=None, **__):
         """Starts a bits_service in the background.
 
-        This function needs to be
+        This function needs to be called with either a registry or after calling
+        power_monitor.update_registry, and it needs to be called before any other
+        method in this class.
+
         Args:
             registry: A dictionary with files used by bits. Format:
                 {
@@ -208,6 +247,8 @@
             'bits_service_out_%s.txt' % self.index)
         service_name = 'bits_config_%s' % self.index
 
+        self._active_collection = None
+        self._collections_counter = 0
         self._service = bits_service.BitsService(config,
                                                  bits_service_binary,
                                                  output_log,
@@ -219,7 +260,7 @@
                                               config)
         # this call makes sure that the client can interact with the server.
         devices = self._client.list_devices()
-        logging.getLogger().debug(devices)
+        logging.debug(devices)
 
     def disconnect_usb(self, *_, **__):
         self._client.disconnect_usb()
@@ -227,16 +268,29 @@
     def connect_usb(self, *_, **__):
         self._client.connect_usb()
 
-    def measure(self, *_, measurement_args=None, **__):
+    def measure(self, *_, measurement_args=None,
+                measurement_name=None, monsoon_output_path=None,
+                **__):
         """Blocking function that measures power through bits for the specified
         duration. Results need to be consulted through other methods such as
-        get_metrics or export_to_csv.
+        get_metrics or post processing files like the ones
+        generated at monsoon_output_path after calling `release_resources`.
 
         Args:
             measurement_args: A dictionary with the following structure:
                 {
                    'duration': <seconds to measure for>
+                   'hz': <samples per second>
+                   'measure_after_seconds': <sleep time before measurement>
                 }
+                The actual number of samples per second is limited by the
+                bits configuration. The value of hz is defaulted to 1000.
+            measurement_name: A name to give to the measurement (which is also
+                used as the Bits collection name. Bits collection names (and
+                therefore measurement names) need to be unique within the
+                context of a Bits object.
+            monsoon_output_path: If provided this path will be used to generate
+                a monsoon like formatted file at the release_resources step.
         """
         if measurement_args is None:
             raise ValueError('measurement_args can not be left undefined')
@@ -245,12 +299,41 @@
         if duration is None:
             raise ValueError(
                 'duration can not be left undefined within measurement_args')
-        self._client.start_collection()
+
+        hz = measurement_args.get('hz', 1000)
+
+        # Delay the start of the measurement if an offset is required
+        measure_after_seconds = measurement_args.get('measure_after_seconds')
+        if measure_after_seconds:
+            time.sleep(measure_after_seconds)
+
+        if self._active_collection:
+            raise BitsError(
+                'Attempted to start a collection while there is still an '
+                'active one. Active collection: %s',
+                self._active_collection.name)
+
+        self._collections_counter = self._collections_counter + 1
+        # The name gets a random 8 characters salt suffix because the Bits
+        # client has a bug where files with the same name are considered to be
+        # the same collection and it won't load two files with the same name.
+        # b/153170987 b/153944171
+        if not measurement_name:
+            measurement_name = 'bits_collection_%s_%s' % (
+                str(self._collections_counter), str(uuid.uuid4())[0:8])
+
+        self._active_collection = _BitsCollection(measurement_name,
+                                                  monsoon_output_path)
+        self._client.start_collection(self._active_collection.name,
+                                      default_sampling_rate=hz)
         time.sleep(duration)
 
     def get_metrics(self, *_, timestamps=None, **__):
         """Gets metrics for the segments delimited by the timestamps dictionary.
 
+        Must be called before releasing resources, otherwise it will fail adding
+        markers to the collection.
+
         Args:
             timestamps: A dictionary of the shape:
                 {
@@ -276,6 +359,9 @@
         metrics = {}
 
         for segment_name, times in timestamps.items():
+            if 'start' not in times or 'end' not in times:
+                continue
+
             start = times['start']
             end = times['end']
 
@@ -285,18 +371,96 @@
             # The preferred way for new calls to this function should be using
             # datetime instead which is unambiguous
             if isinstance(start, (int, float)):
-                start = times['start'] * 1e6
+                start = start * 1e6
             if isinstance(end, (int, float)):
-                end = times['end'] * 1e6
+                end = end * 1e6
 
-            self._client.add_marker(start, 'start - %s' % segment_name)
-            self._client.add_marker(end, 'end - %s' % segment_name)
-            raw_metrics = self._client.get_metrics(start, end)
+            raw_metrics = self._client.get_metrics(self._active_collection.name,
+                                                   start=start, end=end)
+            self._add_marker(start, 'start - %s' % segment_name)
+            self._add_marker(end, 'end - %s' % segment_name)
             metrics[segment_name] = _raw_data_to_metrics(raw_metrics)
         return metrics
 
+    def _add_marker(self, timestamp, marker_text):
+        if not self._active_collection:
+            raise BitsError(
+                'markers can not be added without an active collection')
+        self._active_collection.add_marker(timestamp, marker_text)
+
     def release_resources(self):
-        self._client.stop_collection()
+        """Performs all the cleanup and export tasks.
+
+        In the way that Bits' is interfaced several tasks can not be performed
+        while a collection is still active (like exporting the data) and others
+        can only take place while the collection is still active (like adding
+        markers to a collection).
+
+        To workaround this unique workflow, the collections that are started
+        with the 'measure' method are not really stopped after the method
+        is unblocked, it is only stopped after this method is called.
+
+        All the export files (.7z.bits and monsoon-formatted file) are also
+        generated in this method.
+        """
+        if not self._active_collection:
+            raise BitsError(
+                'Attempted to stop a collection without starting one')
+        self._client.add_markers(self._active_collection.name,
+                                 self._active_collection.markers_buffer)
+        self._client.stop_collection(self._active_collection.name)
+
+        export_file = os.path.join(
+            context.get_current_context().get_full_output_path(),
+            '%s.7z.bits' % self._active_collection.name)
+        self._client.export(self._active_collection.name, export_file)
+        if self._active_collection.monsoon_output_path:
+            self._attempt_monsoon_format()
+        self._active_collection = None
+
+    def _attempt_monsoon_format(self):
+        """Attempts to create a monsoon-formatted file.
+
+        In the case where there is not enough information to retrieve a
+        monsoon-like file, this function will do nothing.
+        """
+        available_channels = self._client.list_channels(
+            self._active_collection.name)
+        milli_amps_channel = None
+
+        for channel in available_channels:
+            if channel.endswith(self._root_rail):
+                milli_amps_channel = self._root_rail
+                break
+
+        if milli_amps_channel is None:
+            logging.debug('No monsoon equivalent channels were found when '
+                          'attempting to recreate monsoon file format. '
+                          'Available channels were: %s',
+                          str(available_channels))
+            return
+
+        logging.debug('Recreating monsoon file format from channel: %s',
+                      milli_amps_channel)
+        self._client.export_as_monsoon_format(
+            self._active_collection.monsoon_output_path,
+            self._active_collection.name,
+            milli_amps_channel)
+
+    def get_waveform(self, file_path=None):
+        """Parses a file generated in release_resources.
+
+        Args:
+            file_path: Path to a waveform file.
+
+        Returns:
+            A list of tuples in which the first element is a timestamp and the
+            second element is the sampled current at that time.
+        """
+        if file_path is None:
+            raise ValueError('file_path can not be None')
+
+        return list(power_metrics.import_raw_data(file_path))
 
     def teardown(self):
         if self._service is None:
diff --git a/acts/framework/acts/controllers/bits_lib/bits_client.py b/acts/framework/acts/controllers/bits_lib/bits_client.py
index 4c8e740..f33c44b 100644
--- a/acts/framework/acts/controllers/bits_lib/bits_client.py
+++ b/acts/framework/acts/controllers/bits_lib/bits_client.py
@@ -14,15 +14,13 @@
 #   See the License for the specific language governing permissions and
 #   limitations under the License.
 
-import logging
-import os
-import uuid
-import tempfile
-import yaml
+import csv
 from datetime import datetime
+import logging
+import tempfile
 
 from acts.libs.proc import job
-from acts import context
+import yaml
 
 
 class BitsClientError(Exception):
@@ -52,26 +50,6 @@
                      'nanoseconds.' % type(timestamp))
 
 
-class _BitsCollection(object):
-    """Object that represents a bits collection
-
-    Attributes:
-        name: The name given to the collection.
-        markers_buffer: An array of un-flushed markers, each marker is
-        represented by a bi-dimensional tuple with the format
-        (<nanoseconds_since_epoch or datetime>, <text>).
-    """
-    def __init__(self, name):
-        self.name = name
-        self.markers_buffer = []
-
-    def add_marker(self, timestamp, marker_text):
-        self.markers_buffer.append((timestamp, marker_text))
-
-    def clear_markers_buffer(self):
-        self.markers_buffer.clear()
-
-
 class BitsClient(object):
     """Helper class to issue bits' commands"""
 
@@ -89,115 +67,159 @@
         self._binary = binary
         self._service = service
         self._server_config = service_config
-        self._active_collection = None
-        self._collections_counter = 0
 
     def _acquire_monsoon(self):
         """Gets hold of a Monsoon so no other processes can use it.
         Only works if there is a monsoon."""
-        cmd = [self._binary,
-               '--port',
-               self._service.port,
-               '--collector',
-               'Monsoon',
-               '--collector_cmd',
-               'acquire_monsoon']
-        self._log.info('acquiring monsoon')
-        job.run(cmd, timeout=10)
+        self._log.debug('acquiring monsoon')
+        self.run_cmd('--collector',
+                     'Monsoon',
+                     '--collector_cmd',
+                     'acquire_monsoon', timeout=10)
 
     def _release_monsoon(self):
-        cmd = [self._binary,
-               '--port',
-               self._service.port,
-               '--collector',
-               'Monsoon',
-               '--collector_cmd',
-               'release_monsoon']
-        self._log.info('releasing monsoon')
-        job.run(cmd, timeout=10)
+        self._log.debug('releasing monsoon')
+        self.run_cmd('--collector',
+                     'Monsoon',
+                     '--collector_cmd',
+                     'release_monsoon', timeout=10)
 
-    def _export(self):
-        collection_path = os.path.join(
-            context.get_current_context().get_full_output_path(),
-            '%s.7z.bits' % self._active_collection.name)
-        cmd = [self._binary,
-               '--port',
-               self._service.port,
-               '--name',
-               self._active_collection.name,
-               '--ignore_gaps',
-               '--export',
-               '--export_path',
-               collection_path]
-        self._log.info('exporting collection %s to %s',
-                       self._active_collection.name,
-                       collection_path)
-        job.run(cmd, timeout=600)
-
-    def _flush_markers(self):
-        for ts, marker in sorted(self._active_collection.markers_buffer,
-                                 key=lambda x: x[0]):
-            cmd = [self._binary,
-                   '--port',
-                   self._service.port,
-                   '--name',
-                   self._active_collection.name,
-                   '--log_ts',
-                   str(_to_ns(ts)),
-                   '--log',
-                   marker]
-            job.run(cmd, timeout=10)
-        self._active_collection.clear_markers_buffer()
-
-    def add_marker(self, timestamp, marker_text):
-        """Buffers a marker for the active collection.
-
-        Bits does not allow inserting markers with timestamps out of order.
-        The buffer of markers will be flushed when the collection is stopped to
-        ensure all the timestamps are input in order.
+    def run_cmd(self, *args, timeout=60):
+        """Executes a generic bits.par command.
 
         Args:
-            timestamp: Numerical nanoseconds since epoch or datetime.
-            marker_text: A string to label this marker with.
+            args: A bits.par command as a tokenized array. The path to the
+              binary and the service port are provided by default, cmd should
+              only contain the remaining tokens of the desired command.
+            timeout: Number of seconds to wait for the command to finish before
+              forcibly killing it.
         """
-        if not self._active_collection:
-            raise BitsClientError(
-                'markers can not be added without an active collection')
-        self._active_collection.add_marker(timestamp, marker_text)
+        result = job.run([self._binary, '--port',
+                          self._service.port] + [str(arg) for arg in args],
+                         timeout=timeout)
+        return result.stdout
 
-    def get_metrics(self, start, end):
+    def export(self, collection_name, path):
+        """Exports a collection to its bits persistent format.
+
+        Exported files can be shared and opened through the Bits UI.
+
+        Args:
+            collection_name: Collection to be exported.
+            path: Where the resulting file should be created. Bits requires that
+            the resulting file ends in .7z.bits.
+        """
+        if not path.endswith('.7z.bits'):
+            raise BitsClientError('Bits\' collections can only be exported to '
+                                  'files ending in .7z.bits, got %s' % path)
+        self._log.debug('exporting collection %s to %s',
+                        collection_name,
+                        path)
+        self.run_cmd('--name',
+                     collection_name,
+                     '--ignore_gaps',
+                     '--export',
+                     '--export_path',
+                     path,
+                     timeout=600)
+
+    def export_as_csv(self, channels, collection_name, output_file):
+        """Export bits data as CSV.
+
+        Writes the selected channel data to the given output_file. Note that
+        the first line of the file contains headers.
+
+        Args:
+          channels: A list of string pattern matches for the channel to be
+            retrieved. For example, ":mW" will export all power channels,
+            ":mV" will export all voltage channels, "C1_01__" will export
+            power/voltage/current for the first fail of connector 1.
+          collection_name: A string for a collection that is sampling.
+          output_file: A string file path where the CSV will be written.
+        """
+        channels_arg = ','.join(channels)
+        cmd = ['--csvfile',
+               output_file,
+               '--name',
+               collection_name,
+               '--ignore_gaps',
+               '--csv_rawtimestamps',
+               '--channels',
+               channels_arg]
+        if self._server_config.has_virtual_metrics_file:
+            cmd = cmd + ['--vm_file', 'default']
+        self._log.debug(
+            'exporting csv for collection %s to %s, with channels %s',
+            collection_name, output_file, channels_arg)
+        self.run_cmd(*cmd, timeout=600)
+
+    def add_markers(self, collection_name, markers):
+        """Appends markers to a collection.
+
+        These markers are displayed in the Bits UI and are useful to label
+        important test events.
+
+        Markers can only be added to collections that have not been
+        closed / stopped. Markers need to be added in chronological order,
+        this function ensures that at least the markers added in each
+        call are sorted in chronological order, but if this function
+        is called multiple times, then is up to the user to ensure that
+        the subsequent batches of markers are for timestamps higher (newer)
+        than all the markers passed in previous calls to this function.
+
+        Args:
+            collection_name: The name of the collection to add markers to.
+            markers: A list of tuples of the shape:
+
+             [(<nano_seconds_since_epoch or datetime>, <marker text>),
+              (<nano_seconds_since_epoch or datetime>, <marker text>),
+              (<nano_seconds_since_epoch or datetime>, <marker text>),
+              ...
+            ]
+        """
+        # sorts markers in chronological order before adding them. This is
+        # required by go/pixel-bits
+        for ts, marker in sorted(markers, key=lambda x: _to_ns(x[0])):
+            self._log.debug('Adding marker at %s: %s', str(ts), marker)
+            self.run_cmd('--name',
+                         collection_name,
+                         '--log_ts',
+                         str(_to_ns(ts)),
+                         '--log',
+                         marker,
+                         timeout=10)
+
+    def get_metrics(self, collection_name, start=None, end=None):
         """Extracts metrics for a period of time.
 
         Args:
+            collection_name: The name of the collection to get metrics from
             start: Numerical nanoseconds since epoch until the start of the
-            period of interest or datetime.
+            period of interest or datetime. If not provided, start will be the
+            beginning of the collection.
             end: Numerical nanoseconds since epoch until the end of the
-            period of interest or datetime.
+            period of interest or datetime. If not provided, end will be the
+            end of the collection.
         """
-        if not self._active_collection:
-            raise BitsClientError(
-                'metrics can not be collected without an active collection')
-
         with tempfile.NamedTemporaryFile(prefix='bits_metrics') as tf:
-            cmd = [self._binary,
-                   '--port',
-                   self._service.port,
-                   '--name',
-                   self._active_collection.name,
+            cmd = ['--name',
+                   collection_name,
                    '--ignore_gaps',
-                   '--abs_start_time',
-                   str(_to_ns(start)),
-                   '--abs_stop_time',
-                   str(_to_ns(end)),
                    '--aggregates_yaml_path',
                    tf.name]
+
+            if start is not None:
+                cmd = cmd + ['--abs_start_time', str(_to_ns(start))]
+            if end is not None:
+                cmd = cmd + ['--abs_stop_time', str(_to_ns(end))]
             if self._server_config.has_virtual_metrics_file:
                 cmd = cmd + ['--vm_file', 'default']
-            job.run(cmd)
+
+            self.run_cmd(*cmd)
             with open(tf.name) as mf:
                 self._log.debug(
                     'bits aggregates for collection %s [%s-%s]: %s' % (
-                        self._active_collection.name, start, end,
+                        collection_name, start, end,
                         mf.read()))
 
             with open(tf.name) as mf:
@@ -205,82 +227,53 @@
 
     def disconnect_usb(self):
         """Disconnects the monsoon's usb. Only works if there is a monsoon"""
-        cmd = [self._binary,
-               '--port',
-               self._service.port,
-               '--collector',
-               'Monsoon',
-               '--collector_cmd',
-               'usb_disconnect']
-        self._log.info('disconnecting monsoon\'s usb')
-        job.run(cmd, timeout=10)
+        self._log.debug('disconnecting monsoon\'s usb')
+        self.run_cmd('--collector',
+                     'Monsoon',
+                     '--collector_cmd',
+                     'usb_disconnect', timeout=10)
 
-    def start_collection(self, postfix=None):
+    def start_collection(self, collection_name, default_sampling_rate=1000):
         """Indicates Bits to start a collection.
 
         Args:
-            postfix: Optional argument that can be used to identify the
-            collection with.
+            collection_name: Name to give to the collection to be started.
+            Collection names must be unique at Bits' service level. If multiple
+            collections must be taken within the context of the same Bits'
+            service, ensure that each collection is given a different one.
+            default_sampling_rate: Samples per second to be collected
         """
-        if self._active_collection:
-            raise BitsClientError(
-                'Attempted to start a collection while there is still an '
-                'active one. Active collection: %s',
-                self._active_collection.name)
-        self._collections_counter = self._collections_counter + 1
-        # The name gets a random 8 characters salt suffix because the Bits
-        # client has a bug where files with the same name are considered to be
-        # the same collection and it won't load two files with the same name.
-        # b/153170987 b/153944171
-        if not postfix:
-            postfix = str(self._collections_counter)
-        postfix = '%s_%s' % (postfix, str(uuid.uuid4())[0:8])
-        self._active_collection = _BitsCollection(
-            'bits_collection_%s' % postfix)
 
-        cmd = [self._binary,
-               '--port',
-               self._service.port,
-               '--name',
-               self._active_collection.name,
+        cmd = ['--name',
+               collection_name,
                '--non_blocking',
                '--time',
                ONE_YEAR,
                '--default_sampling_rate',
-               '1000',
-               '--disk_space_saver']
-        self._log.info('starting collection %s', self._active_collection.name)
-        job.run(cmd, timeout=10)
+               str(default_sampling_rate)]
+
+        if self._server_config.has_kibbles:
+            cmd = cmd + ['--disk_space_saver']
+
+        self._log.debug('starting collection %s', collection_name)
+        self.run_cmd(*cmd, timeout=10)
 
     def connect_usb(self):
         """Connects the monsoon's usb. Only works if there is a monsoon."""
-        cmd = [self._binary,
-               '--port',
-               self._service.port,
-               '--collector',
+        cmd = ['--collector',
                'Monsoon',
                '--collector_cmd',
                'usb_connect']
-        self._log.info('connecting monsoon\'s usb')
-        job.run(cmd, timeout=10)
+        self._log.debug('connecting monsoon\'s usb')
+        self.run_cmd(*cmd, timeout=10)
 
-    def stop_collection(self):
+    def stop_collection(self, collection_name):
         """Stops the active collection."""
-        if not self._active_collection:
-            raise BitsClientError(
-                'Attempted to stop a collection without starting one')
-        self._log.info('stopping collection %s', self._active_collection.name)
-        self._flush_markers()
-        cmd = [self._binary,
-               '--port',
-               self._service.port,
-               '--name',
-               self._active_collection.name,
-               '--stop']
-        job.run(cmd)
-        self._export()
-        self._log.info('stopped collection %s', self._active_collection.name)
-        self._active_collection = None
+        self._log.debug('stopping collection %s', collection_name)
+        self.run_cmd('--name',
+                     collection_name,
+                     '--stop')
+        self._log.debug('stopped collection %s', collection_name)
 
     def list_devices(self):
         """Lists devices managed by the bits_server this client is connected
@@ -289,11 +282,54 @@
         Returns:
             bits' output when called with --list devices.
         """
-        cmd = [self._binary,
-               '--port',
-               self._service.port,
-               '--list',
-               'devices']
         self._log.debug('listing devices')
-        result = job.run(cmd, timeout=20)
-        return result.stdout
+        result = self.run_cmd('--list', 'devices', timeout=20)
+        return result
+
+    def list_channels(self, collection_name):
+        """Finds all the available channels in a given collection.
+
+        Args:
+            collection_name: The name of the collection to get channels from.
+        """
+        metrics = self.get_metrics(collection_name)
+        return [channel['name'] for channel in metrics['data']]
+
+    def export_as_monsoon_format(self, dest_path, collection_name,
+                                 channel_pattern):
+        """Exports data from a collection in monsoon style.
+
+        This function exists because there are tools that have been built on
+        top of the monsoon format. To be able to leverage such tools we need
+        to make the data compliant with the format.
+
+        The monsoon format is:
+
+        <time_since_epoch_in_secs> <amps>
+
+        Args:
+            dest_path: Path where the resulting file will be generated.
+            collection_name: The name of the Bits' collection to export data
+            from.
+            channel_pattern: A regex that matches the Bits' channel to be used
+            as source of data. If there are multiple matching channels, only the
+            first one will be used. The channel is always assumed to be
+            expressed en milli-amps, the resulting format requires amps, so the
+            values coming from the first matching channel will always be
+            multiplied by 1000.
+        """
+        with tempfile.NamedTemporaryFile(prefix='bits_csv_') as tmon:
+            self.export_as_csv([channel_pattern], collection_name, tmon.name)
+
+            self._log.debug(
+                'massaging bits csv to monsoon format for collection'
+                ' %s', collection_name)
+            with open(tmon.name) as csv_file:
+                reader = csv.reader(csv_file)
+                headers = next(reader)
+                self._log.debug('csv headers %s', headers)
+                with open(dest_path, 'w') as dest:
+                    for row in reader:
+                        ts = float(row[0]) / 1e9
+                        amps = float(row[1]) / 1e3
+                        dest.write('%.7f %.12f\n' % (ts, amps))
diff --git a/acts/framework/acts/controllers/bits_lib/bits_service_config.py b/acts/framework/acts/controllers/bits_lib/bits_service_config.py
index b17ff3b..cb2d219 100644
--- a/acts/framework/acts/controllers/bits_lib/bits_service_config.py
+++ b/acts/framework/acts/controllers/bits_lib/bits_service_config.py
@@ -62,8 +62,8 @@
             raise ValueError('Monsoon voltage can not be undefined. Received '
                              'config was: %s' % monsoon_config)
 
-        self.serial_num = monsoon_config['serial_num']
-        self.monsoon_voltage = monsoon_config['monsoon_voltage']
+        self.serial_num = int(monsoon_config['serial_num'])
+        self.monsoon_voltage = float(monsoon_config['monsoon_voltage'])
 
         self.config_dic = copy.deepcopy(DEFAULT_MONSOON_CONFIG_DICT)
         if float(self.serial_num) >= 20000:
diff --git a/acts/framework/acts/controllers/gnss_lib/GnssSimulator.py b/acts/framework/acts/controllers/gnss_lib/GnssSimulator.py
new file mode 100644
index 0000000..7f64164
--- /dev/null
+++ b/acts/framework/acts/controllers/gnss_lib/GnssSimulator.py
@@ -0,0 +1,200 @@
+#!/usr/bin/env python3
+#
+# Copyright (C) 2021 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""
+Python module for General abstract GNSS Simulator.
+@author: Clay Liao (jianhsiungliao@)
+"""
+from time import sleep
+from acts.controllers.spectracom_lib import gsg6
+from acts.controllers.spirent_lib import gss7000
+from acts import logger
+from acts.utils import ping
+from acts.libs.proc import job
+
+
+class AbstractGnssSimulator:
+    """General abstract GNSS Simulator"""
+
+    def __init__(self, simulator, ip_addr, ip_port, ip_port_ctrl=7717):
+        """Init AbstractGnssSimulator
+
+        Args:
+            simulator: GNSS simulator name,
+                Type, str
+                Option 'gss7000/gsg6'
+            ip_addr: IP Address.
+                Type, str
+            ip_port: TCPIP Port,
+                Type, str
+            ip_port_ctrl: TCPIP port,
+                Type, int
+                Default, 7717
+        """
+        self.simulator_name = str(simulator).lower()
+        self.ip_addr = ip_addr
+        self.ip_port = ip_port
+        self.ip_port_ctrl = ip_port_ctrl
+        self._logger = logger.create_tagged_trace_logger(
+            '%s %s:%s' % (simulator, self.ip_addr, self.ip_port))
+        if self.simulator_name == 'gsg6':
+            self._logger.info('GNSS simulator is GSG6')
+            self.simulator = gsg6.GSG6(self.ip_addr, self.ip_port)
+        elif self.simulator_name == 'gss7000':
+            self._logger.info('GNSS simulator is GSS7000')
+            self.simulator = gss7000.GSS7000(self.ip_addr, self.ip_port,
+                                             self.ip_port_ctrl)
+        else:
+            self._logger.error('No matched GNSS simulator')
+            raise AttributeError(
+                'The GNSS simulator in config file is {} which is not supported.'
+                .format(self.simulator_name))
+
+    def connect(self):
+        """Connect to GNSS Simulator"""
+        self._logger.debug('Connect to GNSS Simulator {}'.format(
+            self.simulator_name.upper()))
+        self.simulator.connect()
+
+    def close(self):
+        """Disconnect from GNSS Simulator"""
+        self._logger.debug('Disconnect from GNSS Simulator {}'.format(
+            self.simulator_name.upper()))
+        self.simulator.close()
+
+    def start_scenario(self, scenario=''):
+        """Start the running scenario.
+
+        Args:
+            scenario: path of scenario,
+                Type, str
+        """
+        self._logger.info('Start GNSS Scenario {}'.format(scenario))
+        self.simulator.start_scenario(scenario)
+
+    def stop_scenario(self):
+        """Stop the running scenario."""
+        self._logger.debug('Stop playing scenario')
+        self.simulator.stop_scenario()
+
+    def set_power(self, power_level=-130):
+        """Set scenario power level.
+        Args:
+            power_level: target power level in dBm for gsg6 or gss7000,
+                gsg6 power_level range is [-160, -65],
+                gss7000 power_level range is [-170, -115]
+                Type, float,
+        """
+        self.simulator.set_power(power_level)
+
+    def set_power_offset(self, gss7000_ant=1, pwr_offset=0):
+        """Set scenario power level offset based on reference level.
+           The default reference level is -130dBm for GPS L1.
+        Args:
+            ant: target gss7000 RF port,
+                Type, int
+            pwr_offset: target power offset in dB,
+                Type, float
+        """
+        if self.simulator_name == 'gsg6':
+            power_level = -130 + pwr_offset
+            self.simulator.set_power(power_level)
+        elif self.simulator_name == 'gss7000':
+            self.simulator.set_power_offset(gss7000_ant, pwr_offset)
+        else:
+            self._logger.error('No GNSS simulator is available')
+
+    def set_scenario_power(self,
+                           power_level,
+                           sat_id='',
+                           sat_system='',
+                           freq_band=''):
+        """Set dynamic power for the running scenario.
+
+        Args:
+            power_level: transmit power level
+                Type, float.
+                Decimal, unit [dBm]
+            sat_id: set power level for specific satellite identifiers
+                Type, str.
+                Option
+                    For GSG-6: 'Gxx/Rxx/Exx/Cxx/Jxx/Ixx/Sxxx'
+                    where xx is satellite identifiers number
+                    e.g.: G10
+                    For GSS7000: Provide SVID.
+                Default, '', assumed All.
+            sat_system: to set power level for all Satellites
+                Type, str
+                Option [GPS, GLO, GAL, BDS, QZSS, IRNSS, SBAS]
+                Default, '', assumed All.
+            freq_band: Frequency band to set the power level
+                Type, str
+                Default, '', assumed to be L1.
+         Raises:
+            RuntimeError: raise when instrument does not support this function.
+        """
+        self.simulator.set_scenario_power(power_level=power_level,
+                                          sat_id=sat_id,
+                                          sat_system=sat_system,
+                                          freq_band=freq_band)
+
+    def toggle_scenario_power(self,
+                              toggle_onoff='ON',
+                              sat_id='',
+                              sat_system=''):
+        """Toggle ON OFF scenario.
+
+        Args:
+            toggle_onoff: turn on or off the satellites
+                Type, str. Option ON/OFF
+                Default, 'ON'
+            sat_id: satellite identifiers
+                Type, str.
+                Option 'Gxx/Rxx/Exx/Cxx/Jxx/Ixx/Sxxx'
+                where xx is satellite identifiers no.
+                e.g.: G10
+            sat_system: to toggle On/OFF for all Satellites
+                Type, str
+                Option 'GPS/GLO/GAL'
+        """
+        # TODO: [b/208719212] Currently only support GSG-6. Will implement GSS7000 feature.
+        if self.simulator_name == 'gsg6':
+            self.simulator.toggle_scenario_power(toggle_onoff=toggle_onoff,
+                                                 sat_id=sat_id,
+                                                 sat_system=sat_system)
+        else:
+            raise RuntimeError('{} does not support this function'.format(
+                self.simulator_name))
+
+    def ping_inst(self, retry=3, wait=1):
+        """Ping IP of instrument to check if the connection is stable.
+        Args:
+            retry: Retry times.
+                Type, int.
+                Default, 3.
+            wait: Wait time between each ping command when ping fail is met.
+                Type, int.
+                Default, 1.
+        Return:
+            True/False of ping result.
+        """
+        for i in range(retry):
+            ret = ping(job, self.ip_addr)
+            self._logger.debug(f'Ping return results: {ret}')
+            if ret.get('packet_loss') == '0':
+                return True
+            self._logger.warning(f'Fail to ping GNSS Simulator: {i+1}')
+            sleep(wait)
+        return False
diff --git a/acts/framework/acts/controllers/gnss_lib/__init__.py b/acts/framework/acts/controllers/gnss_lib/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/acts/framework/acts/controllers/gnss_lib/__init__.py
diff --git a/acts/framework/acts/controllers/iperf_server.py b/acts/framework/acts/controllers/iperf_server.py
index 78cb787..4be9d1e 100755
--- a/acts/framework/acts/controllers/iperf_server.py
+++ b/acts/framework/acts/controllers/iperf_server.py
@@ -243,8 +243,8 @@
         """
         if not self._has_data():
             return None
-        instantaneous_rates = self.instantaneous_rates[iperf_ignored_interval:
-                                                       -1]
+        instantaneous_rates = self.instantaneous_rates[
+            iperf_ignored_interval:-1]
         avg_rate = math.fsum(instantaneous_rates) / len(instantaneous_rates)
         sqd_deviations = ([(rate - avg_rate)**2
                            for rate in instantaneous_rates])
@@ -497,6 +497,18 @@
             self.start_ssh()
         utils.renew_linux_ip_address(self._ssh_session, self.test_interface)
 
+    def _cleanup_iperf_port(self):
+        """Checks and kills zombie iperf servers occupying intended port."""
+        iperf_check_cmd = ('netstat -tulpn | grep LISTEN | grep iperf3'
+                           ' | grep :{}').format(self.port)
+        iperf_check = self._ssh_session.run(iperf_check_cmd,
+                                            ignore_status=True)
+        iperf_check = iperf_check.stdout
+        if iperf_check:
+            logging.debug('Killing zombie server on port {}'.format(self.port))
+            iperf_pid = iperf_check.split(' ')[-1].split('/')[0]
+            self._ssh_session.run('kill -9 {}'.format(str(iperf_pid)))
+
     def start(self, extra_args='', tag='', iperf_binary=None):
         """Starts iperf server on specified machine and port.
 
@@ -513,6 +525,7 @@
 
         if not self._ssh_session:
             self.start_ssh()
+        self._cleanup_iperf_port()
         if not iperf_binary:
             logging.debug('No iperf3 binary specified.  '
                           'Assuming iperf3 is in the path.')
diff --git a/acts/framework/acts/controllers/power_metrics.py b/acts/framework/acts/controllers/power_metrics.py
index f4d9edd..98599fe 100644
--- a/acts/framework/acts/controllers/power_metrics.py
+++ b/acts/framework/acts/controllers/power_metrics.py
@@ -15,6 +15,7 @@
 #   limitations under the License.
 
 import math
+import numpy as np
 
 # Metrics timestamp keys
 START_TIMESTAMP = 'start'
@@ -166,6 +167,57 @@
             yield float(time[:-1]), float(sample)
 
 
+def generate_percentiles(monsoon_file, timestamps, percentiles):
+    """Generates metrics .
+
+    Args:
+        monsoon_file: monsoon-like file where each line has two
+            numbers separated by a space, in the format:
+            seconds_since_epoch amperes
+            seconds_since_epoch amperes
+        timestamps: dict following the output format of
+            instrumentation_proto_parser.get_test_timestamps()
+        percentiles: percentiles to be returned
+    """
+    if timestamps is None:
+        timestamps = {}
+    test_starts = {}
+    test_ends = {}
+    for seg_name, times in timestamps.items():
+        if START_TIMESTAMP in times and END_TIMESTAMP in times:
+            test_starts[seg_name] = Metric(
+                times[START_TIMESTAMP], TIME, MILLISECOND).to_unit(
+                SECOND).value
+            test_ends[seg_name] = Metric(
+                times[END_TIMESTAMP], TIME, MILLISECOND).to_unit(
+                SECOND).value
+
+    arrays = {}
+    for seg_name in test_starts:
+        arrays[seg_name] = []
+
+    with open(monsoon_file, 'r') as m:
+        for line in m:
+            timestamp = float(line.strip().split()[0])
+            value = float(line.strip().split()[1])
+            for seg_name in arrays.keys():
+                if test_starts[seg_name] <= timestamp <= test_ends[seg_name]:
+                    arrays[seg_name].append(value)
+
+    results = {}
+    for seg_name in arrays:
+        if len(arrays[seg_name]) == 0:
+            continue
+
+        pairs = zip(percentiles, np.percentile(arrays[seg_name],
+                                               percentiles))
+        results[seg_name] = [
+            Metric.amps(p[1], 'percentile_%s' % p[0]).to_unit(MILLIAMP) for p in
+            pairs
+        ]
+    return results
+
+
 def generate_test_metrics(raw_data, timestamps=None,
                           voltage=None):
     """Split the data into individual test metrics, based on the timestamps
@@ -185,28 +237,18 @@
     test_ends = {}
     test_metrics = {}
     for seg_name, times in timestamps.items():
-        test_metrics[seg_name] = PowerMetrics(voltage)
-        try:
+        if START_TIMESTAMP in times and END_TIMESTAMP in times:
+            test_metrics[seg_name] = PowerMetrics(voltage)
             test_starts[seg_name] = Metric(
                 times[START_TIMESTAMP], TIME, MILLISECOND).to_unit(
                 SECOND).value
-        except KeyError:
-            raise ValueError(
-                'Missing start timestamp for test scenario "%s". Refer to '
-                'instrumentation_proto.txt for details.' % seg_name)
-        try:
             test_ends[seg_name] = Metric(
                 times[END_TIMESTAMP], TIME, MILLISECOND).to_unit(
                 SECOND).value
-        except KeyError:
-            raise ValueError(
-                'Missing end timestamp for test scenario "%s". Test '
-                'scenario may have terminated with errors. Refer to '
-                'instrumentation_proto.txt for details.' % seg_name)
 
     # Assign data to tests based on timestamps
     for timestamp, amps in raw_data:
-        for seg_name in timestamps:
+        for seg_name in test_metrics.keys():
             if test_starts[seg_name] <= timestamp <= test_ends[seg_name]:
                 test_metrics[seg_name].update_metrics(amps)
 
diff --git a/acts/framework/acts/controllers/spectracom_lib/gsg6.py b/acts/framework/acts/controllers/spectracom_lib/gsg6.py
index a1c30cc..ef381e2 100644
--- a/acts/framework/acts/controllers/spectracom_lib/gsg6.py
+++ b/acts/framework/acts/controllers/spectracom_lib/gsg6.py
@@ -1,18 +1,3 @@
-#!/usr/bin/env python3
-#
-#   Copyright 2019 - The Android Open Source Project
-#
-#   Licensed under the Apache License, Version 2.0 (the "License");
-#   you may not use this file except in compliance with the License.
-#   You may obtain a copy of the License at
-#
-#           http://www.apache.org/licenses/LICENSE-2.0
-#
-#   Unless required by applicable law or agreed to in writing, software
-#   distributed under the License is distributed on an "AS IS" BASIS,
-#   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#   See the License for the specific language governing permissions and
-#   limitations under the License.
 """Python module for Spectracom/Orolia GSG-6 GNSS simulator."""
 
 from acts.controllers import abstract_inst
@@ -115,8 +100,8 @@
 
         self._send(':SOUR:POW ' + str(round(power_level, 1)))
 
-        infmsg = 'Set GSG-6 transmit power to "{}"'.format(
-            round(power_level, 1))
+        infmsg = 'Set GSG-6 transmit power to "{}"'.format(round(
+            power_level, 1))
         self._logger.debug(infmsg)
 
     def get_nmealog(self):
@@ -128,3 +113,107 @@
         nmea_data = self._query('SOUR:SCEN:LOG?')
 
         return nmea_data
+
+    def toggle_scenario_power(self,
+                              toggle_onoff='ON',
+                              sat_id='',
+                              sat_system=''):
+        """Toggle ON OFF scenario.
+
+        Args:
+            toggle_onoff: turn on or off the satellites
+                Type, str. Option ON/OFF
+                Default, 'ON'
+            sat_id: satellite identifiers
+                Type, str.
+                Option 'Gxx/Rxx/Exx/Cxx/Jxx/Ixx/Sxxx'
+                where xx is satellite identifiers no.
+                e.g.: G10
+            sat_system: to toggle On/OFF for all Satellites
+                Type, str
+                Option [GPS, GLO, GAL, BDS, QZSS, IRNSS, SBAS]
+        Raises:
+            GSG6Error: raise when toggle is not set.
+        """
+        if not sat_id and not sat_system:
+            self._send(':SOUR:SCEN:POW ' + str(toggle_onoff))
+            infmsg = 'Set GSG-6 Power to "{}"'.format(toggle_onoff)
+            self._logger.debug(infmsg)
+
+        elif sat_id and not sat_system:
+            self._send(':SOUR:SCEN:POW ' + str(sat_id) + ',' +
+                       str(toggle_onoff))
+            infmsg = ('Set GSG-6 Power to "{}" for "{}" satellite '
+                      'identifiers').format(toggle_onoff, sat_id)
+            self._logger.debug(infmsg)
+
+        elif not sat_id and sat_system:
+            self._send(':SOUR:SCEN:POW ' + str(sat_system) + ',' +
+                       str(toggle_onoff))
+            infmsg = 'Set GSG-6 Power to "{}" for "{}" satellite system'.format(
+                toggle_onoff, sat_system)
+            self._logger.debug(infmsg)
+
+        else:
+            errmsg = ('"toggle power" must have either of these value [ON/OFF],'
+                      ' current input is {}').format(str(toggle_onoff))
+            raise GSG6Error(error=errmsg, command='toggle_scenario_power')
+
+    def set_scenario_power(self,
+                           power_level,
+                           sat_id='',
+                           sat_system='',
+                           freq_band=''):
+        """Set dynamic power for the running scenario.
+
+        Args:
+            power_level: transmit power level
+                Type, float.
+                Decimal, unit [dBm]
+            sat_id: set power level for specific satellite identifiers
+                Type, str. Option
+                'Gxx/Rxx/Exx/Cxx/Jxx/Ixx/Sxxx'
+                where xx is satellite identifiers number
+                e.g.: G10
+            sat_system: to set power level for all Satellites
+                Type, str
+                Option [GPS, GLO, GAL, BDS, QZSS, IRNSS, SBAS]
+            freq_band: Frequency band to set the power level
+                Type, str
+                Option  [L1, L2, L5, ALL]
+                Default, '', assumed to be L1.
+        Raises:
+            GSG6Error: raise when power level is not in [-160, -65] range.
+        """
+        if freq_band == 'ALL':
+            if not -100 <= power_level <= 100:
+                errmsg = ('"power_level" must be within [-100, 100], for '
+                          '"freq_band"="ALL", current input is {}').format(
+                              str(power_level))
+                raise GSG6Error(error=errmsg, command='set_scenario_power')
+        else:
+            if not -160 <= power_level <= -65:
+                errmsg = ('"power_level" must be within [-160, -65], for '
+                          '"freq_band" != "ALL", current input is {}').format(
+                              str(power_level))
+                raise GSG6Error(error=errmsg, command='set_scenario_power')
+
+        if sat_id and not sat_system:
+            self._send(':SOUR:SCEN:POW ' + str(sat_id) + ',' +
+                       str(round(power_level, 1)) + ',' + str(freq_band))
+            infmsg = ('Set GSG-6 transmit power to "{}" for "{}" '
+                      'satellite id').format(round(power_level, 1), sat_id)
+            self._logger.debug(infmsg)
+
+        elif not sat_id and sat_system:
+            self._send(':SOUR:SCEN:POW ' + str(sat_system) + ',' +
+                       str(round(power_level, 1)) + ',' + str(freq_band))
+            infmsg = ('Set GSG-6 transmit power to "{}" for "{}" '
+                      'satellite system').format(round(power_level, 1),
+                                                 sat_system)
+            self._logger.debug(infmsg)
+
+        else:
+            errmsg = ('sat_id or sat_system must have value, current input of '
+                      'sat_id {} and sat_system {}').format(sat_id, sat_system)
+            raise GSG6Error(error=errmsg, command='set_scenario_power')
diff --git a/acts/framework/acts/controllers/spirent_lib/gss7000.py b/acts/framework/acts/controllers/spirent_lib/gss7000.py
new file mode 100644
index 0000000..961d4e8
--- /dev/null
+++ b/acts/framework/acts/controllers/spirent_lib/gss7000.py
@@ -0,0 +1,490 @@
+#!/usr/bin/env python3
+#
+# Copyright (C) 2021 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""
+Python module for Spirent GSS7000 GNSS simulator.
+@author: Clay Liao (jianhsiungliao@)
+"""
+from time import sleep
+import xml.etree.ElementTree as ET
+from acts.controllers import abstract_inst
+
+
+def get_xml_text(xml_string='', tag=''):
+    """Parse xml from string and return specific tag
+
+        Args:
+            xml_string: xml string,
+                Type, Str.
+            tag: tag in xml,
+                Type, Str.
+
+        Returns:
+            text: Text content in the tag
+                Type, Str.
+        """
+    if xml_string and tag:
+        root = ET.fromstring(xml_string)
+        try:
+            text = str(root.find(tag).text).rstrip().lstrip()
+        except ValueError:
+            text = 'INVALID DATA'
+    else:
+        text = 'INVALID DATA'
+    return text
+
+
+class GSS7000Error(abstract_inst.SocketInstrumentError):
+    """GSS7000 Instrument Error Class."""
+
+
+class AbstractInstGss7000(abstract_inst.SocketInstrument):
+    """Abstract instrument for  GSS7000"""
+
+    def _query(self, cmd):
+        """query instrument via Socket.
+
+        Args:
+            cmd: Command to send,
+                Type, Str.
+
+        Returns:
+            resp: Response from Instrument via Socket,
+                Type, Str.
+        """
+        self._send(cmd)
+        self._wait()
+        resp = self._recv()
+        return resp
+
+    def _wait(self, wait_time=1):
+        """wait function
+        Args:
+            wait_time: wait time in sec.
+                Type, int,
+                Default, 1.
+        """
+        sleep(wait_time)
+
+
+class GSS7000Ctrl(AbstractInstGss7000):
+    """GSS7000 control daemon class"""
+
+    def __init__(self, ip_addr, ip_port=7717):
+        """Init method for GSS7000 Control Daemon.
+
+        Args:
+            ip_addr: IP Address.
+                Type, str.
+            ip_port: TCPIP Port.
+                Type, str.
+        """
+        super().__init__(ip_addr, ip_port)
+        self.idn = 'Spirent-GSS7000 Control Daemon'
+
+    def connect(self):
+        """Init and Connect to GSS7000 Control Daemon."""
+        # Connect socket then connect socket again
+        self._close_socket()
+        self._connect_socket()
+        # Stop GSS7000 Control Daeamon Then Start
+        self._query('STOP_ENGINE')
+        self._wait()
+        self._query('START_ENGINE')
+
+    def close(self):
+        """Close GSS7000 control daemon"""
+        self._close_socket()
+        self._logger.debug('Closed connection to GSS7000 control daemon')
+
+
+class GSS7000(AbstractInstGss7000):
+    """GSS7000 Class, inherted from abstract_inst SocketInstrument."""
+
+    def __init__(self, ip_addr, engine_ip_port=15650, ctrl_ip_port=7717):
+        """Init method for GSS7000.
+
+        Args:
+            ip_addr: IP Address.
+                Type, str.
+            engine_ip_port: TCPIP Port for
+                Type, str.
+            ctrl_ip_port: TCPIP Port for Control Daemon
+        """
+        super().__init__(ip_addr, engine_ip_port)
+        self.idn = ''
+        self.connected = False
+        self.capability = []
+        self.gss7000_ctrl_daemon = GSS7000Ctrl(ip_addr, ctrl_ip_port)
+        # Close control daemon and engine sockets at the beginning
+        self.gss7000_ctrl_daemon._close_socket()
+        self._close_socket()
+
+    def connect(self):
+        """Connect GSS7000 engine daemon"""
+        # Connect control daemon socket
+        self._logger.debug('Connect to GSS7000')
+        self.gss7000_ctrl_daemon.connect()
+        # Connect to remote engine socket
+        self._wait()
+        self._connect_socket()
+        self.connected = True
+        self.get_hw_capability()
+
+    def close(self):
+        """Close GSS7000 engine daemon"""
+        # Close GSS7000 control daemon
+        self.gss7000_ctrl_daemon.close()
+        # Close GSS7000 engine daemon
+        self._close_socket()
+        self._logger.debug('Closed connection to GSS7000 engine daemon')
+
+    def _parse_hw_cap(self, xml):
+        """Parse GSS7000 hardware capability xml to list.
+            Args:
+                xml: hardware capability xml,
+                    Type, str.
+
+            Returns:
+                capability: Hardware capability dictionary
+                    Type, list.
+        """
+        root = ET.fromstring(xml)
+        capability_ls = list()
+        sig_cap_list = root.find('data').find('Signal_capabilities').findall(
+            'Signal')
+        for signal in sig_cap_list:
+            value = str(signal.text).rstrip().lstrip()
+            capability_ls.extend(value.upper().split(' '))
+        return capability_ls
+
+    def get_hw_capability(self):
+        """Check GSS7000 hardware capability
+
+            Returns:
+                capability: Hardware capability dictionary,
+                    Type, list.
+        """
+        if self.connected:
+            capability_xml = self._query('GET_LICENCED_HARDWARE_CAPABILITY')
+            self.capability = self._parse_hw_cap(capability_xml)
+
+        return self.capability
+
+    def get_idn(self):
+        """Get the SimREPLAYplus Version
+
+        Returns:
+            SimREPLAYplus Version
+        """
+        idn_xml = self._query('*IDN?')
+        self.idn = get_xml_text(idn_xml, 'data')
+        return self.idn
+
+    def load_scenario(self, scenario=''):
+        """Load the scenario.
+
+        Args:
+            scenario: path of scenario,
+                Type, str
+        """
+        if scenario == '':
+            errmsg = ('Missing scenario file')
+            raise GSS7000Error(error=errmsg, command='load_scenario')
+        else:
+            self._logger.debug('Stopped the original scenario')
+            self._query('-,EN,1')
+            cmd = 'SC,' + scenario
+            self._logger.debug('Loading scenario')
+            self._query(cmd)
+            self._logger.debug('Scenario is loaded')
+            return True
+        return False
+
+    def start_scenario(self, scenario=''):
+        """Load and Start the running scenario.
+
+        Args:
+            scenario: path of scenario,
+                Type, str
+        """
+        if scenario:
+            if self.load_scenario(scenario):
+                self._query('RU')
+            else:
+                infmsg = 'No scenario is loaded. Stop running scenario'
+                self._logger.debug(infmsg)
+        else:
+            pass
+
+        if scenario:
+            infmsg = 'Started running scenario {}'.format(scenario)
+        else:
+            infmsg = 'Started running current scenario'
+
+        self._logger.debug(infmsg)
+
+    def get_scenario_name(self):
+        """Get current scenario name"""
+        sc_name_xml = self._query('SC_NAME')
+        return get_xml_text(sc_name_xml, 'data')
+
+    def stop_scenario(self):
+        """Stop the running scenario."""
+        self._query('-,EN,1')
+        self._logger.debug('Stopped running scenario')
+
+    def set_power_offset(self, ant=1, power_offset=0):
+        """Set Power Offset of GSS7000 Tx
+        Args:
+            ant: antenna number of GSS7000
+            power_offset: transmit power offset level
+                Type, float.
+                Decimal, unit [dB]
+
+        Raises:
+            GSS7000Error: raise when power offset level is not in [-49, 15] range.
+        """
+        if not -49 <= power_offset <= 15:
+            errmsg = (f'"power_offset" must be within [-49, 15], '
+                      f'current input is {power_offset}')
+            raise GSS7000Error(error=errmsg, command='set_power_offset')
+
+        cmd = f'-,POW_LEV,V1_A{ant},{power_offset},GPS,0,0,1,1,1,1,0'
+        self._query(cmd)
+
+        infmsg = f'Set veichel 1 antenna {ant} power offset: {power_offset}'
+        self._logger.debug(infmsg)
+
+    def set_ref_power(self, ref_dBm=-130):
+        """Set Ref Power of GSS7000 Tx
+        Args:
+            ref_dBm: transmit reference power level in dBm for GSS7000
+                Type, float.
+                Decimal, unit [dBm]
+
+        Raises:
+            GSS7000Error: raise when power offset level is not in [-170, -115] range.
+        """
+        if not -170 <= ref_dBm <= -115:
+            errmsg = ('"power_offset" must be within [-170, -115], '
+                      'current input is {}').format(str(ref_dBm))
+            raise GSS7000Error(error=errmsg, command='set_ref_power')
+        cmd = 'REF_DBM,{}'.format(str(round(ref_dBm, 1)))
+        self._query(cmd)
+        infmsg = 'Set reference power level: {}'.format(str(round(ref_dBm, 1)))
+        self._logger.debug(infmsg)
+
+    def get_status(self, return_txt=False):
+        """Get current GSS7000 Status
+        Args:
+            return_txt: booling for determining the return results
+                Type, booling.
+        """
+        status_xml = self._query('NULL')
+        status = get_xml_text(status_xml, 'status')
+        if return_txt:
+            status_dict = {
+                '0': 'No Scenario loaded',
+                '1': 'Not completed loading a scenario',
+                '2': 'Idle, ready to run a scenario',
+                '3': 'Arming the scenario',
+                '4': 'Completed arming; or waiting for a command or'
+                     'trigger signal to start the scenario',
+                '5': 'Scenario running',
+                '6': 'Current scenario is paused.',
+                '7': 'Active scenario has stopped and has not been reset.'
+                     'Waiting for further commands.'
+            }
+            return status_dict.get(status)
+        else:
+            return int(status)
+
+    def set_power(self, power_level=-130):
+        """Set Power Level of GSS7000 Tx
+        Args:
+            power_level: transmit power level
+                Type, float.
+                Decimal, unit [dBm]
+
+        Raises:
+            GSS7000Error: raise when power level is not in [-170, -115] range.
+        """
+        if not -170 <= power_level <= -115:
+            errmsg = (f'"power_level" must be within [-170, -115], '
+                      f'current input is {power_level}')
+            raise GSS7000Error(error=errmsg, command='set_power')
+
+        power_offset = power_level + 130
+        self.set_power_offset(1, power_offset)
+        self.set_power_offset(2, power_offset)
+
+        infmsg = 'Set GSS7000 transmit power to "{}"'.format(
+            round(power_level, 1))
+        self._logger.debug(infmsg)
+
+    def power_lev_offset_cal(self, power_level=-130, sat='GPS', band='L1'):
+        """Convert target power level to power offset for GSS7000 power setting
+        Args:
+            power_level: transmit power level
+                Type, float.
+                Decimal, unit [dBm]
+                Default. -130
+            sat_system: to set power level for all Satellites
+                Type, str
+                Option 'GPS/GLO/GAL'
+                Type, str
+            freq_band: Frequency band to set the power level
+                Type, str
+                Option 'L1/L5/B1I/B1C/B2A/E5'
+                Default, '', assumed to be L1.
+        Return:
+            power_offset: The calculated power offset for setting GSS7000 GNSS target power.
+        """
+        gss7000_tx_pwr = {
+            'GPS_L1': -130,
+            'GPS_L5': -127.9,
+            'GLONASS_F1': -131,
+            'GALILEO_L1': -127,
+            'GALILEO_E5': -122,
+            'BEIDOU_B1I': -133,
+            'BEIDOU_B1C': -130,
+            'BEIDOU_B2A': -127,
+            'QZSS_L1': -128.5,
+            'QZSS_L5': -124.9,
+            'IRNSS_L5': -130
+        }
+
+        sat_band = f'{sat}_{band}'
+        infmsg = f'Target satellite system and band: {sat_band}'
+        self._logger.debug(infmsg)
+        default_pwr_lev = gss7000_tx_pwr.get(sat_band, -130)
+        power_offset = power_level - default_pwr_lev
+        infmsg = (
+            f'Targer power: {power_level}; Default power: {default_pwr_lev};'
+            f' Power offset: {power_offset}')
+        self._logger.debug(infmsg)
+
+        return power_offset
+
+    def sat_band_convert(self, sat, band):
+        """Satellite system and operation band conversion and check.
+        Args:
+            sat: to set power level for all Satellites
+                Type, str
+                Option 'GPS/GLO/GAL/BDS'
+                Type, str
+            band: Frequency band to set the power level
+                Type, str
+                Option 'L1/L5/B1I/B1C/B2A/F1/E5'
+                Default, '', assumed to be L1.
+        """
+        sat_system_dict = {
+            'GPS': 'GPS',
+            'GLO': 'GLONASS',
+            'GAL': 'GALILEO',
+            'BDS': 'BEIDOU',
+            'IRNSS': 'IRNSS',
+            'ALL': 'GPS'
+        }
+        sat = sat_system_dict.get(sat, 'GPS')
+        if band == '':
+            infmsg = 'No band is set. Set to default band = L1'
+            self._logger.debug(infmsg)
+            band = 'L1'
+        if sat == '':
+            infmsg = 'No satellite system is set. Set to default sat = GPS'
+            self._logger.debug(infmsg)
+            sat = 'GPS'
+        sat_band = f'{sat}_{band}'
+        self._logger.debug(f'Current band: {sat_band}')
+        self._logger.debug(f'Capability: {self.capability}')
+        # Check if satellite standard and band are supported
+        # If not in support list, return GPS_L1 as default
+        if not sat_band in self.capability:
+            errmsg = (
+                f'Satellite system and band ({sat_band}) are not supported.'
+                f'The GSS7000 support list: {self.capability}')
+            raise GSS7000Error(error=errmsg, command='set_scenario_power')
+        else:
+            sat_band_tp = tuple(sat_band.split('_'))
+
+        return sat_band_tp
+
+    def set_scenario_power(self,
+                           power_level=-130,
+                           sat_id='',
+                           sat_system='',
+                           freq_band='L1'):
+        """Set dynamic power for the running scenario.
+        Args:
+            power_level: transmit power level
+                Type, float.
+                Decimal, unit [dBm]
+                Default. -130
+            sat_id: set power level for specific satellite identifiers
+                Type, int.
+            sat_system: to set power level for all Satellites
+                Type, str
+                Option 'GPS/GLO/GAL/BDS'
+                Type, str
+                Default, '', assumed to be GPS.
+            freq_band: Frequency band to set the power level
+                Type, str
+                Option 'L1/L5/B1I/B1C/B2A/F1/E5/ALL'
+                Default, '', assumed to be L1.
+        Raises:
+            GSS7000Error: raise when power offset is not in [-49, -15] range.
+        """
+        band_dict = {
+            'L1': 1,
+            'L5': 2,
+            'B2A': 2,
+            'B1I': 1,
+            'B1C': 1,
+            'F1': 1,
+            'E5': 2,
+            'ALL': 3
+        }
+
+        # Convert and check satellite system and band
+        sat, band = self.sat_band_convert(sat_system, freq_band)
+        # Get freq band setting
+        band_cmd = band_dict.get(band, 1)
+
+        if not sat_id:
+            sat_id = 0
+            all_tx_type = 1
+        else:
+            all_tx_type = 0
+
+        # Convert absolute power level to absolute power offset.
+        power_offset = self.power_lev_offset_cal(power_level, sat, band)
+
+        if not -49 <= power_offset <= 15:
+            errmsg = (f'"power_offset" must be within [-49, 15], '
+                      f'current input is {power_offset}')
+            raise GSS7000Error(error=errmsg, command='set_power_offset')
+
+        if band_cmd == 1:
+            cmd = f'-,POW_LEV,v1_a1,{power_offset},{sat},{sat_id},0,0,0,1,1,{all_tx_type}'
+            self._query(cmd)
+        elif band_cmd == 2:
+            cmd = f'-,POW_LEV,v1_a2,{power_offset},{sat},{sat_id},0,0,0,1,1,{all_tx_type}'
+            self._query(cmd)
+        elif band_cmd == 3:
+            cmd = f'-,POW_LEV,v1_a1,{power_offset},{sat},{sat_id},0,0,0,1,1,{all_tx_type}'
+            self._query(cmd)
+            cmd = f'-,POW_LEV,v1_a2,{power_offset},{sat},{sat_id},0,0,0,1,1,{all_tx_type}'
diff --git a/acts/framework/tests/controllers/bits_lib/bits_client_test.py b/acts/framework/tests/controllers/bits_lib/bits_client_test.py
index c4261d6..dda819d 100644
--- a/acts/framework/tests/controllers/bits_lib/bits_client_test.py
+++ b/acts/framework/tests/controllers/bits_lib/bits_client_test.py
@@ -17,6 +17,7 @@
 from datetime import datetime
 import unittest
 
+from acts.libs.proc import job
 from acts.controllers.bits_lib import bits_client
 from acts.controllers.bits_lib import bits_service_config
 import mock
@@ -33,6 +34,18 @@
 NON_MONSOONED_CONFIG = bits_service_config.BitsServiceConfig(
     CONTROLLER_CONFIG_WITHOUT_MONSOON)
 
+KIBBLES_CONFIG = bits_service_config.BitsServiceConfig(
+    {
+        'Kibbles': [{
+            'board':     'board',
+            'connector': 'connector',
+            'serial':    'serial',
+        }],
+    },
+    kibble_bin='bin',
+    kibble_board_file='file.board',
+    virtual_metrics_file='file.vm')
+
 
 class BitsClientTest(unittest.TestCase):
 
@@ -40,9 +53,25 @@
         super().setUp()
         self.mock_service = mock.Mock()
         self.mock_service.port = '42'
-        self.mock_active_collection = mock.Mock()
-        self.mock_active_collection.name = 'my_active_collection'
-        self.mock_active_collection.markers_buffer = []
+
+    @mock.patch('acts.libs.proc.job.run')
+    def test_execute_generic_command(self, mock_run):
+        mock_service = mock.Mock()
+        mock_service.port = '1337'
+        client = bits_client.BitsClient('bits.par', mock_service,
+                                        service_config=KIBBLES_CONFIG)
+
+        client.run_cmd('-i', '-am', '-not', '-a', '-teapot', timeout=12345)
+
+        expected_final_command = ['bits.par',
+                                  '--port',
+                                  '1337',
+                                  '-i',
+                                  '-am',
+                                  '-not',
+                                  '-a',
+                                  '-teapot']
+        mock_run.assert_called_with(expected_final_command, timeout=12345)
 
     @mock.patch('acts.libs.proc.job.run')
     def test_start_collection__without_monsoon__does_not_disconnect_monsoon(
@@ -51,7 +80,7 @@
         client = bits_client.BitsClient('bits.par', self.mock_service,
                                         service_config=NON_MONSOONED_CONFIG)
 
-        client.start_collection()
+        client.start_collection('collection')
 
         mock_run.assert_called()
         args_list = mock_run.call_args_list
@@ -61,55 +90,57 @@
         self.assertEqual(len(non_expected_call), 0,
                          'did not expect call with usb_disconnect')
 
-    @mock.patch('acts.context.get_current_context')
     @mock.patch('acts.libs.proc.job.run')
-    def test_stop_collection__usb_not_automanaged__does_not_connect_monsoon(
-        self,
-        mock_run,
-        mock_context):
-        output_path = mock.MagicMock(return_value='out')
-        mock_context.side_effect = lambda: output_path
+    def test_start_collection__frecuency_arg_gets_populated(self, mock_run):
         client = bits_client.BitsClient('bits.par', self.mock_service,
                                         service_config=MONSOONED_CONFIG)
-        client._active_collection = self.mock_active_collection
 
-        client.stop_collection()
+        client.start_collection('collection', default_sampling_rate=12345)
+
+        mock_run.assert_called()
+        args_list = mock_run.call_args_list
+        expected_calls = list(
+            filter(lambda call: '--time' in call.args[0], args_list))
+        self.assertEqual(len(expected_calls), 1, 'expected 1 calls with --time')
+        self.assertIn('--default_sampling_rate', expected_calls[0][0][0])
+        self.assertIn('12345', expected_calls[0][0][0])
+
+    @mock.patch('acts.libs.proc.job.run')
+    def test_start_collection__sampling_rate_defaults_to_1000(self, mock_run):
+        client = bits_client.BitsClient('bits.par', self.mock_service,
+                                        service_config=MONSOONED_CONFIG)
+
+        client.start_collection('collection')
+
+        mock_run.assert_called()
+        args_list = mock_run.call_args_list
+        expected_calls = list(
+            filter(lambda call: '--time' in call.args[0], args_list))
+        self.assertEqual(len(expected_calls), 1, 'expected 1 calls with --time')
+        self.assertIn('--default_sampling_rate', expected_calls[0][0][0])
+        self.assertIn('1000', expected_calls[0][0][0])
+
+    @mock.patch('acts.libs.proc.job.run')
+    def test_stop_collection__usb_not_automanaged__does_not_connect_monsoon(
+        self, mock_run):
+        client = bits_client.BitsClient('bits.par', self.mock_service,
+                                        service_config=MONSOONED_CONFIG)
+
+        client.stop_collection('collection')
 
         mock_run.assert_called()
         args_list = mock_run.call_args_list
         non_expected_call = list(
             filter(lambda call: 'usb_connect' in call.args[0], args_list))
-        self.assertEquals(len(non_expected_call), 0,
-                          'did not expect call with usb_connect')
+        self.assertEqual(len(non_expected_call), 0,
+                         'did not expect call with usb_connect')
 
-    @mock.patch('acts.context.get_current_context')
     @mock.patch('acts.libs.proc.job.run')
-    def test_stop_collection__triggers_export(self, mock_run, mock_context):
-        output_path = mock.MagicMock(return_value='out')
-        mock_context.side_effect = lambda: output_path
+    def test_export_ignores_dataseries_gaps(self, mock_run):
         client = bits_client.BitsClient('bits.par', self.mock_service,
                                         service_config=MONSOONED_CONFIG)
-        client._active_collection = self.mock_active_collection
 
-        client.stop_collection()
-
-        mock_run.assert_called()
-        args_list = mock_run.call_args_list
-        expected_call = list(
-            filter(lambda call: '--export' in call.args[0], args_list))
-        self.assertEqual(len(expected_call), 1,
-                         'expected a call with --export')
-
-    @mock.patch('acts.context.get_current_context')
-    @mock.patch('acts.libs.proc.job.run')
-    def test__export_ignores_dataseries_gaps(self, mock_run, mock_context):
-        output_path = mock.MagicMock(return_value='out')
-        mock_context.side_effect = lambda: output_path
-        client = bits_client.BitsClient('bits.par', self.mock_service,
-                                        service_config=MONSOONED_CONFIG)
-        client._active_collection = self.mock_active_collection
-
-        client._export()
+        client.export('collection', '/path/a.7z.bits')
 
         mock_run.assert_called()
         args_list = mock_run.call_args_list
@@ -121,92 +152,107 @@
                          'expected a call with --ignore_gaps and --export')
         self.assertIn('--ignore_gaps', expected_call[0].args[0])
 
-    @mock.patch('acts.libs.proc.job.run')
-    def test_add_marker(self, _):
+    def test_export_path_must_end_in_bits_file_extension(self):
         client = bits_client.BitsClient('bits.par', self.mock_service,
                                         service_config=MONSOONED_CONFIG)
-        client._active_collection = self.mock_active_collection
 
-        client.add_marker(7133, 'my marker')
+        self.assertRaisesRegex(
+            bits_client.BitsClientError,
+            r'collections can only be exported to files ending in .7z.bits',
+            client.export, 'collection', '/path/')
 
-        client._active_collection.add_marker.assert_called_with(7133,
-                                                                'my marker')
-
-    @mock.patch('acts.context.get_current_context')
     @mock.patch('acts.libs.proc.job.run')
-    def test_stop_collection__flushes_buffered_markers(self, mock_run,
-                                                       mock_context):
-        output_path = mock.MagicMock(return_value='out')
-        mock_context.side_effect = lambda: output_path
+    def test_export_as_csv(self, mock_run):
         client = bits_client.BitsClient('bits.par', self.mock_service,
                                         service_config=MONSOONED_CONFIG)
-        self.mock_active_collection.markers_buffer.append((3, 'tres'))
-        self.mock_active_collection.markers_buffer.append((1, 'uno'))
-        self.mock_active_collection.markers_buffer.append((2, 'dos'))
-        client._active_collection = self.mock_active_collection
+        output_file = '/path/to/csv'
+        collection = 'collection'
 
-        client.stop_collection()
+        client.export_as_csv([':mW', ':mV'], collection, output_file)
+
+        mock_run.assert_called()
+        cmd = mock_run.call_args_list[0].args[0]
+        self.assertIn(collection, cmd)
+        self.assertIn(output_file, cmd)
+        self.assertIn(':mW,:mV', cmd)
+        self.assertNotIn('--vm_file', cmd)
+        self.assertNotIn('default', cmd)
+
+    @mock.patch('acts.libs.proc.job.run')
+    def test_export_as_csv_with_virtual_metrics_file(self, mock_run):
+        output_file = '/path/to/csv'
+        collection = 'collection'
+        client = bits_client.BitsClient('bits.par', self.mock_service,
+                                        service_config=KIBBLES_CONFIG)
+
+        client.export_as_csv([':mW', ':mV'], collection, output_file)
+
+        mock_run.assert_called()
+        cmd = mock_run.call_args_list[0].args[0]
+        self.assertIn(collection, cmd)
+        self.assertIn(':mW,:mV', cmd)
+        self.assertIn('--vm_file', cmd)
+        self.assertIn('default', cmd)
+
+    @mock.patch('acts.libs.proc.job.run')
+    def test_add_markers(self, mock_run):
+        client = bits_client.BitsClient('bits.par', self.mock_service,
+                                        service_config=MONSOONED_CONFIG)
+
+        client.add_markers('collection', [(1, 'ein'),
+                                          (2, 'zwei'),
+                                          (3, 'drei')])
 
         mock_run.assert_called()
         args_list = mock_run.call_args_list
         expected_calls = list(
             filter(lambda call: '--log' in call.args[0], args_list))
-        self.assertEqual(len(expected_calls), 3,
-                         'expected 3 calls with --log')
+        self.assertEqual(len(expected_calls), 3, 'expected 3 calls with --log')
         self.assertIn('--log_ts', expected_calls[0][0][0])
         self.assertIn('1', expected_calls[0][0][0])
-        self.assertIn('uno', expected_calls[0][0][0])
+        self.assertIn('ein', expected_calls[0][0][0])
+
         self.assertIn('--log_ts', expected_calls[1][0][0])
         self.assertIn('2', expected_calls[1][0][0])
-        self.assertIn('dos', expected_calls[1][0][0])
+        self.assertIn('zwei', expected_calls[1][0][0])
+
         self.assertIn('--log_ts', expected_calls[2][0][0])
         self.assertIn('3', expected_calls[2][0][0])
-        self.assertIn('tres', expected_calls[2][0][0])
-        self.mock_active_collection.clear_markers_buffer.assert_called()
+        self.assertIn('drei', expected_calls[2][0][0])
 
-    @mock.patch('acts.context.get_current_context')
     @mock.patch('acts.libs.proc.job.run')
-    def test_stop_collection__flushes_buffered_datetime_markers(self,
-                                                                mock_run,
-                                                                mock_context):
-        output_path = mock.MagicMock(return_value='out')
-        mock_context.side_effect = lambda: output_path
+    def test_add_markers_with_datetimes(self, mock_run):
         client = bits_client.BitsClient('bits.par', self.mock_service,
                                         service_config=MONSOONED_CONFIG)
-        self.mock_active_collection.markers_buffer.append(
-            (datetime.utcfromtimestamp(3), 'tres'))
-        self.mock_active_collection.markers_buffer.append(
-            (datetime.utcfromtimestamp(1), 'uno'))
-        self.mock_active_collection.markers_buffer.append(
-            (datetime.utcfromtimestamp(2), 'dos'))
-        client._active_collection = self.mock_active_collection
 
-        client.stop_collection()
+        client.add_markers('collection',
+                           [(datetime.utcfromtimestamp(1), 'ein'),
+                            (2e9, 'zwei'),
+                            (datetime.utcfromtimestamp(3), 'drei')])
 
         mock_run.assert_called()
         args_list = mock_run.call_args_list
         expected_calls = list(
             filter(lambda call: '--log' in call.args[0], args_list))
-        self.assertEqual(len(expected_calls), 3,
-                         'expected 3 calls with --log')
+        self.assertEqual(len(expected_calls), 3, 'expected 3 calls with --log')
         self.assertIn('--log_ts', expected_calls[0][0][0])
         self.assertIn(str(int(1e9)), expected_calls[0][0][0])
-        self.assertIn('uno', expected_calls[0][0][0])
+        self.assertIn('ein', expected_calls[0][0][0])
+
         self.assertIn('--log_ts', expected_calls[1][0][0])
         self.assertIn(str(int(2e9)), expected_calls[1][0][0])
-        self.assertIn('dos', expected_calls[1][0][0])
+        self.assertIn('zwei', expected_calls[1][0][0])
+
         self.assertIn('--log_ts', expected_calls[2][0][0])
         self.assertIn(str(int(3e9)), expected_calls[2][0][0])
-        self.assertIn('tres', expected_calls[2][0][0])
-        self.mock_active_collection.clear_markers_buffer.assert_called()
+        self.assertIn('drei', expected_calls[2][0][0])
 
     @mock.patch('acts.libs.proc.job.run')
     def test_get_metrics(self, mock_run):
         client = bits_client.BitsClient('bits.par', self.mock_service,
                                         service_config=MONSOONED_CONFIG)
-        client._active_collection = self.mock_active_collection
 
-        client.get_metrics(8888, 9999)
+        client.get_metrics('collection', 8888, 9999)
 
         mock_run.assert_called()
         args_list = mock_run.call_args_list
@@ -224,9 +270,9 @@
     def test_get_metrics_with_datetime_markers(self, mock_run):
         client = bits_client.BitsClient('bits.par', self.mock_service,
                                         service_config=MONSOONED_CONFIG)
-        client._active_collection = self.mock_active_collection
 
-        client.get_metrics(datetime.utcfromtimestamp(1),
+        client.get_metrics('collection',
+                           datetime.utcfromtimestamp(1),
                            datetime.utcfromtimestamp(2))
 
         mock_run.assert_called()
@@ -247,7 +293,6 @@
         service_config.has_virtual_metrics_file = True
         client = bits_client.BitsClient('bits.par', self.mock_service,
                                         service_config=service_config)
-        client._active_collection = self.mock_active_collection
 
         client.get_metrics(8888, 9999)
 
@@ -261,6 +306,21 @@
         self.assertIn('--vm_file', expected_call[0][0][0])
         self.assertIn('default', expected_call[0][0][0])
 
+    @mock.patch('acts.libs.proc.job.run',
+                return_value=job.Result(stdout=bytes('device', 'utf-8')))
+    def test_list_devices(self, mock_run):
+        service_config = mock.Mock()
+        client = bits_client.BitsClient('bits.par', self.mock_service,
+                                        service_config=service_config)
+
+        result = client.list_devices()
+
+        mock_run.assert_called()
+        cmd = mock_run.call_args_list[0].args[0]
+        self.assertIn('--list', cmd)
+        self.assertIn('devices', cmd)
+        self.assertEqual(result, 'device')
+
 
 if __name__ == '__main__':
     unittest.main()
diff --git a/acts/framework/tests/controllers/bits_test.py b/acts/framework/tests/controllers/bits_test.py
index e437bea..a7ea45c 100644
--- a/acts/framework/tests/controllers/bits_test.py
+++ b/acts/framework/tests/controllers/bits_test.py
@@ -3,6 +3,7 @@
 #   Copyright 2020 - The Android Open Source Project
 #
 #   Licensed under the Apache License, Version 2.0 (the 'License');
+
 #   you may not use this file except in compliance with the License.
 #   You may obtain a copy of the License at
 #
diff --git a/acts/framework/tests/controllers/iperf_server_test.py b/acts/framework/tests/controllers/iperf_server_test.py
index b5d91a5..00f7da3 100644
--- a/acts/framework/tests/controllers/iperf_server_test.py
+++ b/acts/framework/tests/controllers/iperf_server_test.py
@@ -35,20 +35,15 @@
 
 class IPerfServerModuleTest(unittest.TestCase):
     """Tests the acts.controllers.iperf_server module."""
-
     def test_create_creates_local_iperf_server_with_int(self):
         self.assertIsInstance(
-            iperf_server.create([12345])[0],
-            IPerfServer,
-            'create() failed to create IPerfServer for integer input.'
-        )
+            iperf_server.create([12345])[0], IPerfServer,
+            'create() failed to create IPerfServer for integer input.')
 
     def test_create_creates_local_iperf_server_with_str(self):
         self.assertIsInstance(
-            iperf_server.create(['12345'])[0],
-            IPerfServer,
-            'create() failed to create IPerfServer for integer input.'
-        )
+            iperf_server.create(['12345'])[0], IPerfServer,
+            'create() failed to create IPerfServer for integer input.')
 
     def test_create_cannot_create_local_iperf_server_with_bad_str(self):
         with self.assertRaises(ValueError):
@@ -57,39 +52,47 @@
     @mock.patch('acts.controllers.iperf_server.utils')
     def test_create_creates_server_over_ssh_with_ssh_config_and_port(self, _):
         self.assertIsInstance(
-            iperf_server.create([{'ssh_config': {'user': '', 'host': ''},
-                                  'port': ''}])[0],
-            IPerfServerOverSsh,
-            'create() failed to create IPerfServerOverSsh for a valid config.'
-        )
+            iperf_server.create([{
+                'ssh_config': {
+                    'user': '',
+                    'host': ''
+                },
+                'port': ''
+            }])[0], IPerfServerOverSsh,
+            'create() failed to create IPerfServerOverSsh for a valid config.')
 
     def test_create_creates_server_over_adb_with_proper_config(self):
         self.assertIsInstance(
-            iperf_server.create([{'AndroidDevice': '53R147', 'port': 0}])[0],
-            IPerfServerOverAdb,
-            'create() failed to create IPerfServerOverAdb for a valid config.'
-        )
+            iperf_server.create([{
+                'AndroidDevice': '53R147',
+                'port': 0
+            }])[0], IPerfServerOverAdb,
+            'create() failed to create IPerfServerOverAdb for a valid config.')
 
     def test_create_raises_value_error_on_bad_config_dict(self):
         with self.assertRaises(ValueError):
-            iperf_server.create([{'AndroidDevice': '53R147', 'ssh_config': {}}])
+            iperf_server.create([{
+                'AndroidDevice': '53R147',
+                'ssh_config': {}
+            }])
 
     def test_get_port_from_ss_output_returns_correct_port_ipv4(self):
         ss_output = ('tcp LISTEN  0 5 127.0.0.1:<PORT>  *:*'
                      ' users:(("cmd",pid=<PID>,fd=3))')
         self.assertEqual(
-            iperf_server._get_port_from_ss_output(ss_output, '<PID>'), '<PORT>')
+            iperf_server._get_port_from_ss_output(ss_output, '<PID>'),
+            '<PORT>')
 
     def test_get_port_from_ss_output_returns_correct_port_ipv6(self):
         ss_output = ('tcp LISTEN  0 5 ff:ff:ff:ff:ff:ff:<PORT>  *:*'
                      ' users:(("cmd",pid=<PID>,fd=3))')
         self.assertEqual(
-            iperf_server._get_port_from_ss_output(ss_output, '<PID>'), '<PORT>')
+            iperf_server._get_port_from_ss_output(ss_output, '<PID>'),
+            '<PORT>')
 
 
 class IPerfServerBaseTest(unittest.TestCase):
     """Tests acts.controllers.iperf_server.IPerfServerBase."""
-
     @mock.patch('os.makedirs')
     def test_get_full_file_path_creates_parent_directory(self, mock_makedirs):
         # Will never actually be created/used.
@@ -99,15 +102,11 @@
 
         full_file_path = server._get_full_file_path()
 
-        self.assertTrue(
-            mock_makedirs.called,
-            'Did not attempt to create a directory.'
-        )
+        self.assertTrue(mock_makedirs.called,
+                        'Did not attempt to create a directory.')
         self.assertEqual(
-            os.path.dirname(full_file_path),
-            mock_makedirs.call_args[ARGS][0],
-            'The parent directory of the full file path was not created.'
-        )
+            os.path.dirname(full_file_path), mock_makedirs.call_args[ARGS][0],
+            'The parent directory of the full file path was not created.')
 
 
 class IPerfServerTest(unittest.TestCase):
@@ -152,10 +151,8 @@
         server.start()
 
         self.assertEqual(
-            server._current_log_file,
-            MOCK_LOGFILE_PATH,
-            'The _current_log_file was not received from _get_full_file_path.'
-        )
+            server._current_log_file, MOCK_LOGFILE_PATH,
+            'The _current_log_file was not received from _get_full_file_path.')
 
     @mock.patch('builtins.open')
     @mock.patch('acts.controllers.iperf_server.subprocess')
@@ -167,16 +164,14 @@
 
         log_file = server.stop()
 
-        self.assertEqual(
-            log_file,
-            MOCK_LOGFILE_PATH,
-            'The _current_log_file was not returned by stop().'
-        )
+        self.assertEqual(log_file, MOCK_LOGFILE_PATH,
+                         'The _current_log_file was not returned by stop().')
 
     @mock.patch('builtins.open')
     @mock.patch('acts.controllers.iperf_server.subprocess')
     @mock.patch('acts.controllers.iperf_server.job')
-    def test_start_does_not_run_two_concurrent_processes(self, start_proc, _, __):
+    def test_start_does_not_run_two_concurrent_processes(
+            self, start_proc, _, __):
         server = IPerfServer('port')
         server._get_full_file_path = lambda _: MOCK_LOGFILE_PATH
         server._iperf_process = mock.Mock()
@@ -185,8 +180,7 @@
 
         self.assertFalse(
             start_proc.called,
-            'start() should not begin a second process if another is running.'
-        )
+            'start() should not begin a second process if another is running.')
 
     @mock.patch('acts.utils.stop_standing_subprocess')
     def test_stop_exits_early_if_no_process_has_started(self, stop_proc):
@@ -198,8 +192,7 @@
 
         self.assertFalse(
             stop_proc.called,
-            'stop() should not kill a process if no process is running.'
-        )
+            'stop() should not kill a process if no process is running.')
 
 
 class IPerfServerOverSshTest(unittest.TestCase):
@@ -212,6 +205,7 @@
         """Tests calling start() without calling stop() makes started True."""
         server = IPerfServerOverSsh(*self.INIT_ARGS)
         server._ssh_session = mock.Mock()
+        server._cleanup_iperf_port = mock.Mock()
         server._get_full_file_path = lambda _: MOCK_LOGFILE_PATH
 
         server.start()
@@ -224,6 +218,7 @@
         """Tests calling start() without calling stop() makes started True."""
         server = IPerfServerOverSsh(*self.INIT_ARGS)
         server._ssh_session = mock.Mock()
+        server._cleanup_iperf_port = mock.Mock()
         server._get_full_file_path = lambda _: MOCK_LOGFILE_PATH
 
         server.start()
@@ -236,21 +231,20 @@
     def test_stop_returns_expected_log_file(self, _, __):
         server = IPerfServerOverSsh(*self.INIT_ARGS)
         server._ssh_session = mock.Mock()
+        server._cleanup_iperf_port = mock.Mock()
         server._get_full_file_path = lambda _: MOCK_LOGFILE_PATH
         server._iperf_pid = mock.Mock()
 
         log_file = server.stop()
 
-        self.assertEqual(
-            log_file,
-            MOCK_LOGFILE_PATH,
-            'The expected log file was not returned by stop().'
-        )
+        self.assertEqual(log_file, MOCK_LOGFILE_PATH,
+                         'The expected log file was not returned by stop().')
 
     @mock.patch('acts.controllers.iperf_server.connection')
     def test_start_does_not_run_two_concurrent_processes(self, _):
         server = IPerfServerOverSsh(*self.INIT_ARGS)
         server._ssh_session = mock.Mock()
+        server._cleanup_iperf_port = mock.Mock()
         server._get_full_file_path = lambda _: MOCK_LOGFILE_PATH
         server._iperf_pid = mock.Mock()
 
@@ -258,14 +252,14 @@
 
         self.assertFalse(
             server._ssh_session.run_async.called,
-            'start() should not begin a second process if another is running.'
-        )
+            'start() should not begin a second process if another is running.')
 
     @mock.patch('acts.utils.stop_standing_subprocess')
     @mock.patch('acts.controllers.iperf_server.connection')
     def test_stop_exits_early_if_no_process_has_started(self, _, __):
         server = IPerfServerOverSsh(*self.INIT_ARGS)
         server._ssh_session = mock.Mock()
+        server._cleanup_iperf_port = mock.Mock()
         server._get_full_file_path = lambda _: MOCK_LOGFILE_PATH
         server._iperf_pid = None
 
@@ -273,8 +267,7 @@
 
         self.assertFalse(
             server._ssh_session.run_async.called,
-            'stop() should not kill a process if no process is running.'
-        )
+            'stop() should not kill a process if no process is running.')
 
 
 class IPerfServerOverAdbTest(unittest.TestCase):
@@ -320,11 +313,8 @@
 
         log_file = server.stop()
 
-        self.assertEqual(
-            log_file,
-            MOCK_LOGFILE_PATH,
-            'The expected log file was not returned by stop().'
-        )
+        self.assertEqual(log_file, MOCK_LOGFILE_PATH,
+                         'The expected log file was not returned by stop().')
 
     @mock.patch(ANDROID_DEVICE_PROP)
     def test_start_does_not_run_two_concurrent_processes(self, android_device):
@@ -336,14 +326,13 @@
 
         self.assertFalse(
             android_device.adb.shell_nb.called,
-            'start() should not begin a second process if another is running.'
-        )
+            'start() should not begin a second process if another is running.')
 
     @mock.patch('acts.libs.proc.job.run')
     @mock.patch('builtins.open')
     @mock.patch(ANDROID_DEVICE_PROP)
-    def test_stop_exits_early_if_no_process_has_started(self, android_device, _,
-                                                        __):
+    def test_stop_exits_early_if_no_process_has_started(
+            self, android_device, _, __):
         server = IPerfServerOverAdb('53R147', 'PORT')
         server._get_full_file_path = lambda _: MOCK_LOGFILE_PATH
         server._iperf_pid = None
@@ -352,8 +341,7 @@
 
         self.assertFalse(
             android_device.adb.shell_nb.called,
-            'stop() should not kill a process if no process is running.'
-        )
+            'stop() should not kill a process if no process is running.')
 
 
 if __name__ == '__main__':
diff --git a/acts/framework/tests/controllers/power_metrics_test.py b/acts/framework/tests/controllers/power_metrics_test.py
index 1533998..a14ca8d 100644
--- a/acts/framework/tests/controllers/power_metrics_test.py
+++ b/acts/framework/tests/controllers/power_metrics_test.py
@@ -24,9 +24,9 @@
 from acts.controllers.power_metrics import CURRENT
 from acts.controllers.power_metrics import END_TIMESTAMP
 from acts.controllers.power_metrics import HOUR
+from acts.controllers.power_metrics import Metric
 from acts.controllers.power_metrics import MILLIAMP
 from acts.controllers.power_metrics import MINUTE
-from acts.controllers.power_metrics import Metric
 from acts.controllers.power_metrics import PowerMetrics
 from acts.controllers.power_metrics import START_TIMESTAMP
 from acts.controllers.power_metrics import TIME
@@ -137,20 +137,34 @@
         """Test that given test timestamps, a power metric is generated from
         a subset of samples corresponding to the test."""
         timestamps = {'sample_test': {START_TIMESTAMP: 3500,
-                                      END_TIMESTAMP: 8500}}
+                                      END_TIMESTAMP:   8500}}
 
         mock_power_metric = mock.Mock()
         mock_power_metric_type.side_effect = lambda v: mock_power_metric
-        metrics = power_metrics.generate_test_metrics(self.RAW_DATA,
-                                                      timestamps=timestamps,
-                                                      voltage=self.VOLTAGE)
+        power_metrics.generate_test_metrics(self.RAW_DATA,
+                                            timestamps=timestamps,
+                                            voltage=self.VOLTAGE)
 
         self.assertEqual(mock_power_metric.update_metrics.call_count, 5)
 
+    def test_incomplete_timestamps_are_ignored(self):
+        """Test that given incomplete timestamps, a power metric is generated from
+        a subset of samples corresponding to the test."""
+        sample_test = 'sample_test'
+        test_end = 13500
+        test_timestamps = {sample_test: {
+            END_TIMESTAMP: test_end}}
+        # no error expected
+        metrics = (
+            power_metrics.generate_test_metrics(self.RAW_DATA,
+                                                timestamps=test_timestamps,
+                                                voltage=self.VOLTAGE))
+
+
     def test_numeric_metrics(self):
         """Test that the numeric metrics have correct values."""
         timestamps = {'sample_test': {START_TIMESTAMP: 0,
-                                      END_TIMESTAMP: 10000}}
+                                      END_TIMESTAMP:   10000}}
         metrics = power_metrics.generate_test_metrics(self.RAW_DATA,
                                                       timestamps=timestamps,
                                                       voltage=self.VOLTAGE)
diff --git a/acts_tests/acts_contrib/test_utils/bt/A2dpBaseTest.py b/acts_tests/acts_contrib/test_utils/bt/A2dpBaseTest.py
index e943083..36acdfd 100644
--- a/acts_tests/acts_contrib/test_utils/bt/A2dpBaseTest.py
+++ b/acts_tests/acts_contrib/test_utils/bt/A2dpBaseTest.py
@@ -16,14 +16,21 @@
 """Stream music through connected device from phone test implementation."""
 import acts
 import os
+import pandas as pd
 import shutil
 import time
 
 import acts_contrib.test_utils.coex.audio_test_utils as atu
 import acts_contrib.test_utils.bt.bt_test_utils as btutils
 from acts import asserts
+from acts_contrib.test_utils.bt import bt_constants
+from acts_contrib.test_utils.bt import BtEnum
 from acts_contrib.test_utils.abstract_devices.bluetooth_handsfree_abstract_device import BluetoothHandsfreeAbstractDeviceFactory as bt_factory
 from acts_contrib.test_utils.bt.BluetoothBaseTest import BluetoothBaseTest
+from acts_contrib.test_utils.bt.ble_performance_test_utils import plot_graph
+from acts_contrib.test_utils.power.PowerBTBaseTest import ramp_attenuation
+from acts_contrib.test_utils.bt.loggers import bluetooth_metric_logger as log
+from acts.signals import TestPass, TestError
 
 PHONE_MUSIC_FILE_DIRECTORY = '/sdcard/Music'
 INIT_ATTEN = 0
@@ -44,12 +51,15 @@
     def setup_class(self):
 
         super().setup_class()
+        self.bt_logger = log.BluetoothMetricLogger.for_test_case()
         self.dut = self.android_devices[0]
-        req_params = ['audio_params', 'music_files']
+        req_params = ['audio_params', 'music_files', 'system_path_loss']
+        opt_params = ['bugreport']
         #'audio_params' is a dict, contains the audio device type, audio streaming
         #settings such as volumn, duration, audio recording parameters such as
         #channel, sampling rate/width, and thdn parameters for audio processing
         self.unpack_userparams(req_params)
+        self.unpack_userparams(opt_params, bugreport=None)
         # Find music file and push it to the dut
         music_src = self.music_files[0]
         music_dest = PHONE_MUSIC_FILE_DIRECTORY
@@ -113,6 +123,11 @@
         self.bt_device.power_off()
         btutils.disable_bluetooth(self.dut.droid)
 
+    def on_pass(self, test_name, begin_time):
+
+        if hasattr(self, 'bugreport') and self.bugreport == 1:
+            self._take_bug_report(test_name, begin_time)
+
     def play_and_record_audio(self, duration):
         """Play and record audio for a set duration.
 
@@ -135,27 +150,45 @@
         asserts.assert_true(audio_captured, 'Audio not recorded')
         return audio_captured
 
-    def _get_bt_link_metrics(self):
+    def _get_bt_link_metrics(self, tag=''):
         """Get bt link metrics such as rssi and tx pwls.
 
         Returns:
-            rssi_master: master rssi
-            pwl_master: master tx pwl
-            rssi_slave: slave rssi
+            master_metrics_list: list of metrics of central device
+            slave_metrics_list: list of metric of peripheral device
         """
 
+        self.raw_bt_metrics_path = os.path.join(self.log_path,
+                                                'BT_Raw_Metrics')
         self.media.play()
         # Get master rssi and power level
-        rssi_master = btutils.get_bt_metric(self.dut)['rssi']
-        pwl_master = btutils.get_bt_metric(self.dut)['pwlv']
-        # Get slave rssi if possible
+        process_data_dict = btutils.get_bt_metric(
+            self.dut, tag=tag, log_path=self.raw_bt_metrics_path)
+        rssi_master = process_data_dict.get('rssi')
+        pwl_master = process_data_dict.get('pwlv')
+        rssi_c0_master = process_data_dict.get('rssi_c0')
+        rssi_c1_master = process_data_dict.get('rssi_c1')
+        txpw_c0_master = process_data_dict.get('txpw_c0')
+        txpw_c1_master = process_data_dict.get('txpw_c1')
+        bftx_master = process_data_dict.get('bftx')
+        divtx_master = process_data_dict.get('divtx')
+
         if isinstance(self.bt_device_controller,
                       acts.controllers.android_device.AndroidDevice):
-            rssi_slave = btutils.get_bt_rssi(self.bt_device_controller)
+            rssi_slave = btutils.get_bt_rssi(self.bt_device_controller,
+                                             tag=tag,
+                                             log_path=self.raw_bt_metrics_path)
         else:
             rssi_slave = None
         self.media.stop()
-        return [rssi_master, pwl_master, rssi_slave]
+
+        master_metrics_list = [
+            rssi_master, pwl_master, rssi_c0_master, rssi_c1_master,
+            txpw_c0_master, txpw_c1_master, bftx_master, divtx_master
+        ]
+        slave_metrics_list = [rssi_slave]
+
+        return master_metrics_list, slave_metrics_list
 
     def run_thdn_analysis(self, audio_captured, tag):
         """Calculate Total Harmonic Distortion plus Noise for latest recording.
@@ -205,3 +238,208 @@
         else:
             self.log.info('%i anomalies detected.' % num_anom)
         return anom
+
+    def generate_proto(self, data_points, codec_type, sample_rate,
+                       bits_per_sample, channel_mode):
+        """Generate a results protobuf.
+
+        Args:
+            data_points: list of dicts representing info to go into
+              AudioTestDataPoint protobuffer message.
+            codec_type: The codec type config to store in the proto.
+            sample_rate: The sample rate config to store in the proto.
+            bits_per_sample: The bits per sample config to store in the proto.
+            channel_mode: The channel mode config to store in the proto.
+        Returns:
+             dict: Dictionary with key 'proto' mapping to serialized protobuf,
+               'proto_ascii' mapping to human readable protobuf info, and 'test'
+               mapping to the test class name that generated the results.
+        """
+
+        # Populate protobuf
+        test_case_proto = self.bt_logger.proto_module.BluetoothAudioTestResult(
+        )
+
+        for data_point in data_points:
+            audio_data_proto = test_case_proto.data_points.add()
+            log.recursive_assign(audio_data_proto, data_point)
+
+        codec_proto = test_case_proto.a2dp_codec_config
+        codec_proto.codec_type = bt_constants.codec_types[codec_type]
+        codec_proto.sample_rate = int(sample_rate)
+        codec_proto.bits_per_sample = int(bits_per_sample)
+        codec_proto.channel_mode = bt_constants.channel_modes[channel_mode]
+
+        self.bt_logger.add_config_data_to_proto(test_case_proto, self.dut,
+                                                self.bt_device)
+
+        self.bt_logger.add_proto_to_results(test_case_proto,
+                                            self.__class__.__name__)
+
+        proto_dict = self.bt_logger.get_proto_dict(self.__class__.__name__,
+                                                   test_case_proto)
+        del proto_dict["proto_ascii"]
+        return proto_dict
+
+    def set_test_atten(self, atten):
+        """Set the attenuation(s) for current test condition.
+
+        """
+        if hasattr(self, 'dual_chain') and self.dual_chain == 1:
+            ramp_attenuation(self.atten_c0,
+                             atten,
+                             attenuation_step_max=2,
+                             time_wait_in_between=1)
+            self.log.info('Set Chain 0 attenuation to %d dB', atten)
+            ramp_attenuation(self.atten_c1,
+                             atten + self.gain_mismatch,
+                             attenuation_step_max=2,
+                             time_wait_in_between=1)
+            self.log.info('Set Chain 1 attenuation to %d dB',
+                          atten + self.gain_mismatch)
+        else:
+            ramp_attenuation(self.attenuator, atten)
+            self.log.info('Set attenuation to %d dB', atten)
+
+    def run_a2dp_to_max_range(self, codec_config):
+        attenuation_range = range(self.attenuation_vector['start'],
+                                  self.attenuation_vector['stop'] + 1,
+                                  self.attenuation_vector['step'])
+
+        data_points = []
+        self.file_output = os.path.join(
+            self.log_path, '{}.csv'.format(self.current_test_name))
+
+        # Set Codec if needed
+        current_codec = self.dut.droid.bluetoothA2dpGetCurrentCodecConfig()
+        current_codec_type = BtEnum.BluetoothA2dpCodecType(
+            current_codec['codecType']).name
+        if current_codec_type != codec_config['codec_type']:
+            codec_set = btutils.set_bluetooth_codec(self.dut, **codec_config)
+            asserts.assert_true(codec_set, 'Codec configuration failed.')
+        else:
+            self.log.info('Current codec is {}, no need to change'.format(
+                current_codec_type))
+
+        #loop RSSI with the same codec setting
+        for atten in attenuation_range:
+            self.media.play()
+            self.set_test_atten(atten)
+
+            tag = 'codec_{}_attenuation_{}dB_'.format(
+                codec_config['codec_type'], atten)
+            recorded_file = self.play_and_record_audio(
+                self.audio_params['duration'])
+            thdns = self.run_thdn_analysis(recorded_file, tag)
+
+            # Collect Metrics for dashboard
+            [
+                rssi_master, pwl_master, rssi_c0_master, rssi_c1_master,
+                txpw_c0_master, txpw_c1_master, bftx_master, divtx_master
+            ], [rssi_slave] = self._get_bt_link_metrics(tag)
+
+            data_point = {
+                'attenuation_db':
+                int(self.attenuator.get_atten()),
+                'pathloss':
+                atten + self.system_path_loss,
+                'rssi_primary':
+                rssi_master.get(self.dut.serial, -127),
+                'tx_power_level_master':
+                pwl_master.get(self.dut.serial, -127),
+                'rssi_secondary':
+                rssi_slave.get(self.bt_device_controller.serial, -127),
+                'rssi_c0_dut':
+                rssi_c0_master.get(self.dut.serial, -127),
+                'rssi_c1_dut':
+                rssi_c1_master.get(self.dut.serial, -127),
+                'txpw_c0_dut':
+                txpw_c0_master.get(self.dut.serial, -127),
+                'txpw_c1_dut':
+                txpw_c1_master.get(self.dut.serial, -127),
+                'bftx_state':
+                bftx_master.get(self.dut.serial, -127),
+                'divtx_state':
+                divtx_master.get(self.dut.serial, -127),
+                'total_harmonic_distortion_plus_noise_percent':
+                thdns[0] * 100
+            }
+            self.log.info(data_point)
+            # bokeh data for generating BokehFigure
+            bokeh_data = {
+                'x_label': 'Pathloss (dBm)',
+                'primary_y_label': 'RSSI (dBm)',
+                'log_path': self.log_path,
+                'current_test_name': self.current_test_name
+            }
+            #plot_data for adding line to existing BokehFigure
+            plot_data = {
+                'line_one': {
+                    'x_label': 'Pathloss (dBm)',
+                    'primary_y_label': 'RSSI (dBm)',
+                    'x_column': 'pathloss',
+                    'y_column': 'rssi_primary',
+                    'legend': 'DUT RSSI (dBm)',
+                    'marker': 'circle_x',
+                    'y_axis': 'default'
+                },
+                'line_two': {
+                    'x_column': 'pathloss',
+                    'y_column': 'rssi_secondary',
+                    'legend': 'Remote device RSSI (dBm)',
+                    'marker': 'hex',
+                    'y_axis': 'default'
+                },
+                'line_three': {
+                    'x_column': 'pathloss',
+                    'y_column': 'tx_power_level_master',
+                    'legend': 'DUT TX Power (dBm)',
+                    'marker': 'hex',
+                    'y_axis': 'secondary'
+                }
+            }
+
+            # Check thdn for glitches, stop if max range reached
+            if thdns[0] == 0:
+                proto_dict = self.generate_proto(data_points, **codec_config)
+                A2dpRange_df = pd.DataFrame(data_points)
+                A2dpRange_df.to_csv(self.file_output, index=False)
+                plot_graph(A2dpRange_df,
+                           plot_data,
+                           bokeh_data,
+                           secondary_y_label='DUT TX Power')
+                raise TestError(
+                    'Music play/recording is not working properly or Connection has lost'
+                )
+
+            data_points.append(data_point)
+            A2dpRange_df = pd.DataFrame(data_points)
+
+            for thdn in thdns:
+                if thdn >= self.audio_params['thdn_threshold']:
+                    self.log.info(
+                        'Max range at attenuation {} dB'.format(atten))
+                    self.log.info('DUT rssi {} dBm, DUT tx power level {}, '
+                                  'Remote rssi {} dBm'.format(
+                                      rssi_master, pwl_master, rssi_slave))
+                    proto_dict = self.generate_proto(data_points,
+                                                     **codec_config)
+                    A2dpRange_df.to_csv(self.file_output, index=False)
+                    plot_graph(A2dpRange_df,
+                               plot_data,
+                               bokeh_data,
+                               secondary_y_label='DUT TX Power')
+                    return True
+                    raise TestPass('Max range reached and move to next codec',
+                                   extras=proto_dict)
+        # Save Data points to csv
+        A2dpRange_df.to_csv(self.file_output, index=False)
+        # Plot graph
+        plot_graph(A2dpRange_df,
+                   plot_data,
+                   bokeh_data,
+                   secondary_y_label='DUT TX Power')
+        proto_dict = self.generate_proto(data_points, **codec_config)
+        return True
+        raise TestPass('Could not reach max range, need extra attenuation.',
+                       extras=proto_dict)
diff --git a/acts_tests/acts_contrib/test_utils/bt/BtInterferenceBaseTest.py b/acts_tests/acts_contrib/test_utils/bt/BtInterferenceBaseTest.py
index 3c49bee..99ca5da 100644
--- a/acts_tests/acts_contrib/test_utils/bt/BtInterferenceBaseTest.py
+++ b/acts_tests/acts_contrib/test_utils/bt/BtInterferenceBaseTest.py
@@ -18,7 +18,9 @@
 
 import json
 import math
+import random
 import time
+import logging
 import acts.controllers.iperf_client as ipc
 import acts.controllers.iperf_server as ipf
 import acts_contrib.test_utils.bt.bt_test_utils as btutils
@@ -44,7 +46,7 @@
         ap: access point object
         bandwidth: bandwidth of the WiFi network to be setup
     Returns:
-        self.brconfigs: dict for bridge interface configs
+        brconfigs: dict for bridge interface configs
     """
     wutils.wifi_toggle_state(dut, True)
     brconfigs = wputils.ap_setup(ap, network, bandwidth=bandwidth)
@@ -103,6 +105,52 @@
     return throughput
 
 
+def locate_interference_pair_by_channel(wifi_int_pairs, interference_channels):
+    """Function to find which attenautor to set based on channel info
+    Args:
+        interference_channels: list of interference channels
+    Return:
+        interference_pair_indices: list of indices for interference pair
+            in wifi_int_pairs
+    """
+    interference_pair_indices = []
+    for chan in interference_channels:
+        for i in range(len(wifi_int_pairs)):
+            if wifi_int_pairs[i].channel == chan:
+                interference_pair_indices.append(i)
+    return interference_pair_indices
+
+
+def inject_static_wifi_interference(wifi_int_pairs, interference_level,
+                                    channels):
+    """Function to inject wifi interference to bt link and read rssi.
+
+    Interference of IPERF traffic is always running, by setting attenuation,
+    the gate is opened to release the interference to the setup.
+    Args:
+        interference_level: the signal strength of wifi interference, use
+            attenuation level to represent this
+        channels: wifi channels where interference will
+            be injected, list
+    """
+    all_pair = range(len(wifi_int_pairs))
+    interference_pair_indices = locate_interference_pair_by_channel(
+        wifi_int_pairs, channels)
+    inactive_interference_pairs_indices = [
+        item for item in all_pair if item not in interference_pair_indices
+    ]
+    logging.info('WiFi interference at {} and inactive channels at {}'.format(
+        interference_pair_indices, inactive_interference_pairs_indices))
+    for i in interference_pair_indices:
+        wifi_int_pairs[i].attenuator.set_atten(interference_level)
+        logging.info('Set attenuation {} dB on attenuator {}'.format(
+            wifi_int_pairs[i].attenuator.get_atten(), i + 1))
+    for i in inactive_interference_pairs_indices:
+        wifi_int_pairs[i].attenuator.set_atten(MAX_ATTENUATION)
+        logging.info('Set attenuation {} dB on attenuator {}'.format(
+            wifi_int_pairs[i].attenuator.get_atten(), i + 1))
+
+
 class BtInterferenceBaseTest(A2dpBaseTest):
     def __init__(self, configs):
         super().__init__(configs)
@@ -163,7 +211,6 @@
                 obj.iperf_server.port))
             obj.iperf_server.stop()
             self.log.info('Stop IPERF process on {}'.format(obj.dut.serial))
-            obj.dut.adb.shell('pkill -9 iperf3')
             #only for glinux machine
             #            wputils.bring_down_interface(obj.ether_int.interface)
             obj.attenuator.set_atten(MAX_ATTENUATION)
@@ -172,7 +219,6 @@
     def teardown_test(self):
 
         super().teardown_test()
-
         for obj in self.wifi_int_pairs:
             obj.attenuator.set_atten(MAX_ATTENUATION)
 
diff --git a/acts_tests/acts_contrib/test_utils/bt/ble_performance_test_utils.py b/acts_tests/acts_contrib/test_utils/bt/ble_performance_test_utils.py
index 394e962..01231d3 100644
--- a/acts_tests/acts_contrib/test_utils/bt/ble_performance_test_utils.py
+++ b/acts_tests/acts_contrib/test_utils/bt/ble_performance_test_utils.py
@@ -16,19 +16,25 @@
 
 import logging
 import time
+import datetime
 import statistics
-from queue import Empty
-from concurrent.futures import ThreadPoolExecutor
-
+import os
+from acts_contrib.test_utils.bt.bt_constants import advertising_set_started
+import acts_contrib.test_utils.wifi.wifi_performance_test_utils.bokeh_figure as bokeh_figure
+from acts_contrib.test_utils.bt.bt_constants import ble_scan_settings_phys
+from acts_contrib.test_utils.bt.bt_constants import ble_scan_settings_modes
 from acts_contrib.test_utils.bt.bt_gatt_utils import close_gatt_client
 from acts_contrib.test_utils.bt.bt_coc_test_utils import do_multi_connection_throughput
 from acts_contrib.test_utils.bt.bt_gatt_utils import disconnect_gatt_connection
+from queue import Empty
 from acts_contrib.test_utils.bt.bt_constants import gatt_cb_err
 from acts_contrib.test_utils.bt.bt_constants import gatt_cb_strings
 from acts_contrib.test_utils.bt.bt_constants import l2cap_coc_header_size
 from acts_contrib.test_utils.bt.bt_gatt_utils import GattTestUtilsError
+from acts_contrib.test_utils.bt.bt_test_utils import generate_ble_scan_objects
 from acts_contrib.test_utils.bt.bt_coc_test_utils import orchestrate_coc_connection
 from acts_contrib.test_utils.bt.bt_gatt_utils import orchestrate_gatt_connection
+from concurrent.futures import ThreadPoolExecutor
 
 default_event_timeout = 10
 rssi_read_duration = 25
@@ -91,6 +97,43 @@
     return ble_rssi
 
 
+def read_ble_scan_rssi(client_ad, scan_callback, rssi_read_duration=30):
+    """Function to Read BLE RSSI of the remote BLE device.
+    Args:
+        client_ad: the Android device performing the connection.
+        scan_callback: the scan callback of the server
+    Returns:
+      ble_rssi: RSSI value of the remote BLE device
+      raw_rssi: RSSI list of remote BLE device
+    """
+    raw_rssi = []
+    timestamp = []
+    end_time = time.time() + rssi_read_duration
+    logging.info("Reading BLE Scan RSSI for {} sec".format(rssi_read_duration))
+    while time.time() < end_time:
+        expected_event = gatt_cb_strings['rd_remote_ble_rssi'].format(
+            scan_callback)
+        try:
+            event = client_ad.ed.pop_event(expected_event,
+                                           default_event_timeout)
+        except Empty:
+            logging.error(
+                gatt_cb_err['rd_remote_rssi_err'].format(expected_event))
+            return False
+        rssi_value = event['data']['Result']['rssi']
+        epoch_time = event['time']
+        d = datetime.datetime.fromtimestamp(epoch_time / 1000)
+        tstamp = d.strftime('%Y-%m-%d %H:%M:%S.%f')[:-3]
+        timestamp.append(tstamp)
+        raw_rssi.append(rssi_value)
+    logging.debug("First & Last reading of RSSI :{:03d} & {:03d}".format(
+        raw_rssi[0], raw_rssi[-1]))
+    ble_rssi = statistics.mean(raw_rssi)
+    ble_rssi = round(ble_rssi, 2)
+
+    return ble_rssi, raw_rssi, timestamp
+
+
 def ble_coc_connection(client_ad, server_ad):
     """Sets up the CoC connection between two Android devices.
 
@@ -104,10 +147,10 @@
         client connection ID: Client connection ID
         and server connection ID : server connection ID
     """
-    #secured_conn: True if using secured connection
-    #le_connection_interval: LE Connection interval. 0 means use default.
-    #buffer_size : is the number of bytes per L2CAP data buffer
-    #le_tx_data_length: LE Data Length used by BT Controller to transmit.
+    # secured_conn: True if using secured connection
+    # le_connection_interval: LE Connection interval. 0 means use default.
+    # buffer_size : is the number of bytes per L2CAP data buffer
+    # le_tx_data_length: LE Data Length used by BT Controller to transmit.
     is_secured = False
     le_connection_interval = 30
     buffer_size = 240
@@ -134,7 +177,9 @@
     return True, gatt_callback, gatt_server, bluetooth_gatt, client_conn_id
 
 
-def run_ble_throughput(server_ad, client_conn_id, client_ad,
+def run_ble_throughput(server_ad,
+                       client_conn_id,
+                       client_ad,
                        num_iterations=30):
     """Function to measure Throughput from one client to one-or-many servers
 
@@ -208,3 +253,76 @@
         logging.error(err)
         return False
     return True
+
+
+def plot_graph(df, plot_data, bokeh_data, secondary_y_label=None):
+    """ Plotting for generating bokeh figure
+
+    Args:
+        df: Summary of results contains attenuation, DUT RSSI, remote RSSI and Tx Power
+        plot_data: plot_data for adding line to existing BokehFigure
+        bokeh_data: bokeh data for generating BokehFigure
+        secondary_y_label : label for secondary y axis , None if not available
+    """
+    plot = bokeh_figure.BokehFigure(
+        title='{}'.format(bokeh_data['current_test_name']),
+        x_label=bokeh_data['x_label'],
+        primary_y_label=bokeh_data['primary_y_label'],
+        secondary_y_label=secondary_y_label,
+        axis_label_size='16pt',
+        legend_label_size='16pt',
+        axis_tick_label_size='16pt',
+        sizing_mode='stretch_both')
+
+    for data in plot_data:
+        plot.add_line(df[plot_data[data].get('x_column')],
+                      df[plot_data[data].get('y_column')],
+                      legend=plot_data[data].get('legend'),
+                      marker=plot_data[data].get('marker'),
+                      y_axis=plot_data[data].get('y_axis'))
+
+    results_file_path = os.path.join(
+        bokeh_data['log_path'],
+        '{}.html'.format(bokeh_data['current_test_name']))
+    plot.generate_figure()
+    bokeh_figure.BokehFigure.save_figures([plot], results_file_path)
+
+
+def start_advertising_and_scanning(client_ad, server_ad, Legacymode=True):
+    """Function to start bt5 advertisement.
+
+        Args:
+            client_ad: the Android device performing the scanning.
+            server_ad: the Android device performing the bt advertising
+            Legacymode: True for Legacy advertising mode, false for bt5 advertising mode
+        Returns:
+          adv_callback: the advertising callback
+          scan_callback: the scan_callback
+        """
+    adv_callback = server_ad.droid.bleAdvSetGenCallback()
+    adv_data = {
+        "includeDeviceName": True,
+    }
+    server_ad.droid.bleAdvSetStartAdvertisingSet(
+        {
+            "connectable": False,
+            "legacyMode": Legacymode,
+            "primaryPhy": "PHY_LE_1M",
+            "secondaryPhy": "PHY_LE_1M",
+            "interval": 320
+        }, adv_data, None, None, None, 0, 0, adv_callback)
+    server_ad.ed.pop_event(advertising_set_started.format(adv_callback),
+                           default_event_timeout)
+    logging.info("Bt5 Advertiser Started Successfully")
+    client_ad.droid.bleSetScanSettingsLegacy(False)
+    client_ad.droid.bleSetScanSettingsScanMode(
+        ble_scan_settings_modes['low_latency'])
+    client_ad.droid.bleSetScanSettingsPhy(ble_scan_settings_phys['1m'])
+
+    filter_list, scan_settings, scan_callback = generate_ble_scan_objects(
+        client_ad.droid)
+    adv_device_name = server_ad.droid.bluetoothGetLocalName()
+    client_ad.droid.bleSetScanFilterDeviceName(adv_device_name)
+    client_ad.droid.bleBuildScanFilter(filter_list)
+    client_ad.droid.bleStartBleScan(filter_list, scan_settings, scan_callback)
+    return adv_callback, scan_callback
diff --git a/acts_tests/acts_contrib/test_utils/bt/bt_constants.py b/acts_tests/acts_contrib/test_utils/bt/bt_constants.py
index 71cebd7..ffb56b4 100644
--- a/acts_tests/acts_contrib/test_utils/bt/bt_constants.py
+++ b/acts_tests/acts_contrib/test_utils/bt/bt_constants.py
@@ -348,6 +348,7 @@
     "desc_read": "GattConnect{}onDescriptorRead",
     "desc_read_req": "GattServer{}onDescriptorReadRequest",
     "rd_remote_rssi": "GattConnect{}onReadRemoteRssi",
+    "rd_remote_ble_rssi": "BleScan{}onScanResults",
     "gatt_serv_disc": "GattConnect{}onServicesDiscovered",
     "serv_added": "GattServer{}onServiceAdded",
     "mtu_changed": "GattConnect{}onMtuChanged",
diff --git a/acts_tests/acts_contrib/test_utils/bt/bt_test_utils.py b/acts_tests/acts_contrib/test_utils/bt/bt_test_utils.py
index eeb72d3..ba66535 100644
--- a/acts_tests/acts_contrib/test_utils/bt/bt_test_utils.py
+++ b/acts_tests/acts_contrib/test_utils/bt/bt_test_utils.py
@@ -21,9 +21,12 @@
 import string
 import threading
 import time
+try:
+    import pandas as pd
+except ModuleNotFoundError:
+    pass
 from queue import Empty
 from subprocess import call
-
 from acts import asserts
 from acts_contrib.test_utils.bt.bt_constants import adv_fail
 from acts_contrib.test_utils.bt.bt_constants import adv_succ
@@ -105,8 +108,7 @@
             ad.ed.pop_event(expected_bluetooth_on_event_name,
                             bt_default_timeout)
         except Empty:
-            ad.log.info(
-                "Failed to toggle Bluetooth on(no broadcast received).")
+            ad.log.info("Failed to toggle Bluetooth on(no broadcast received).")
             # Try one more time to poke at the actual state.
             if ad.droid.bluetoothCheckState():
                 ad.log.info(".. actual state is ON")
@@ -230,9 +232,7 @@
     while not connected and (time.time() - start_time < timeout):
         bonded_info = android.droid.bluetoothGetBondedDevices()
         connected_info = android.droid.bluetoothGetConnectedDevices()
-        if headset.mac_address not in [
-                info["address"] for info in bonded_info
-        ]:
+        if headset.mac_address not in [info["address"] for info in bonded_info]:
             # Use SL4A to pair and connect with headset.
             headset.enter_pairing_mode()
             android.droid.bluetoothDiscoverAndBond(headset_mac_address)
@@ -251,6 +251,7 @@
     log.info('Devices connected after pair attempt: %s' % connected)
     return connected
 
+
 def connect_pri_to_sec(pri_ad, sec_ad, profiles_set, attempts=2):
     """Connects pri droid to secondary droid.
 
@@ -312,34 +313,33 @@
 
     # Now try to connect them, the following call will try to initiate all
     # connections.
-    pri_ad.droid.bluetoothConnectBonded(
-        sec_ad.droid.bluetoothGetLocalAddress())
+    pri_ad.droid.bluetoothConnectBonded(sec_ad.droid.bluetoothGetLocalAddress())
 
     end_time = time.time() + 10
     profile_connected = set()
     sec_addr = sec_ad.droid.bluetoothGetLocalAddress()
     pri_ad.log.info("Profiles to be connected {}".format(profiles_set))
     # First use APIs to check profile connection state
-    while (time.time() < end_time
-           and not profile_connected.issuperset(profiles_set)):
-        if (bt_profile_constants['headset_client'] not in profile_connected
-                and bt_profile_constants['headset_client'] in profiles_set):
+    while (time.time() < end_time and
+           not profile_connected.issuperset(profiles_set)):
+        if (bt_profile_constants['headset_client'] not in profile_connected and
+                bt_profile_constants['headset_client'] in profiles_set):
             if is_hfp_client_device_connected(pri_ad, sec_addr):
                 profile_connected.add(bt_profile_constants['headset_client'])
-        if (bt_profile_constants['a2dp'] not in profile_connected
-                and bt_profile_constants['a2dp'] in profiles_set):
+        if (bt_profile_constants['a2dp'] not in profile_connected and
+                bt_profile_constants['a2dp'] in profiles_set):
             if is_a2dp_src_device_connected(pri_ad, sec_addr):
                 profile_connected.add(bt_profile_constants['a2dp'])
-        if (bt_profile_constants['a2dp_sink'] not in profile_connected
-                and bt_profile_constants['a2dp_sink'] in profiles_set):
+        if (bt_profile_constants['a2dp_sink'] not in profile_connected and
+                bt_profile_constants['a2dp_sink'] in profiles_set):
             if is_a2dp_snk_device_connected(pri_ad, sec_addr):
                 profile_connected.add(bt_profile_constants['a2dp_sink'])
-        if (bt_profile_constants['map_mce'] not in profile_connected
-                and bt_profile_constants['map_mce'] in profiles_set):
+        if (bt_profile_constants['map_mce'] not in profile_connected and
+                bt_profile_constants['map_mce'] in profiles_set):
             if is_map_mce_device_connected(pri_ad, sec_addr):
                 profile_connected.add(bt_profile_constants['map_mce'])
-        if (bt_profile_constants['map'] not in profile_connected
-                and bt_profile_constants['map'] in profiles_set):
+        if (bt_profile_constants['map'] not in profile_connected and
+                bt_profile_constants['map'] in profiles_set):
             if is_map_mse_device_connected(pri_ad, sec_addr):
                 profile_connected.add(bt_profile_constants['map'])
         time.sleep(0.1)
@@ -600,8 +600,8 @@
 
 
 def generate_id_by_size(size,
-                        chars=(string.ascii_lowercase +
-                               string.ascii_uppercase + string.digits)):
+                        chars=(string.ascii_lowercase + string.ascii_uppercase +
+                               string.digits)):
     """Generate random ascii characters of input size and input char types
 
     Args:
@@ -710,7 +710,11 @@
     return otp_dict
 
 
-def get_bt_metric(ad_list, duration=1, tag="bt_metric", processed=True):
+def get_bt_metric(ad_list,
+                  duration=1,
+                  bqr_tag='Monitoring , Handle:',
+                  tag='',
+                  log_path=False):
     """ Function to get the bt metric from logcat.
 
     Captures logcat for the specified duration and returns the bqr results.
@@ -719,21 +723,36 @@
 
     Args:
         ad_list: list of android_device objects
-        duration: time duration (seconds) for which the logcat is parsed.
-        tag: tag to be appended to the logcat dump.
-        processed: flag to process bqr output.
+        duration: time duration (seconds) for which the logcat is parsed
+        bqr_tag: tag of bt metrics
+        tag: tag to be appended to the metrics raw data
+        log_path: path of metrics raw data
 
     Returns:
-        metrics_dict: dict of metrics for each android device.
+        process_data: dict of process raw data for each android devices
     """
 
-    # Defining bqr quantitites and their regex to extract
+    # Defining bqr quantites and their regex to extract
     regex_dict = {
-        "vsp_txpl": "VSP_TxPL:\s(\S+)",
         "pwlv": "PwLv:\s(\S+)",
-        "rssi": "RSSI:\s[-](\d+)"
+        "rssi": "RSSI:\s[-](\d+)",
+        "rssi_c0": "RSSI_C0:\s[-](\d+)",
+        "rssi_c1": "RSSI_C1:\s[-](\d+)",
+        "txpw_c0": "\sTxPw_C0:\s(-?\d+)",
+        "txpw_c1": "\sTxPw_C1:\s(-?\d+)",
+        "bftx": "BFTx:\s(\w+)",
+        "divtx": "DivTx:\s(\w+)"
     }
-    metrics_dict = {"rssi": {}, "pwlv": {}, "vsp_txpl": {}}
+    metrics_dict = {
+        "rssi": {},
+        "pwlv": {},
+        "rssi_c0": {},
+        "rssi_c1": {},
+        "txpw_c0": {},
+        "txpw_c1": {},
+        "bftx": {},
+        "divtx": {}
+    }
 
     # Converting a single android device object to list
     if not isinstance(ad_list, list):
@@ -749,8 +768,7 @@
     end_time = utils.get_current_epoch_time()
 
     for ad in ad_list:
-        bt_rssi_log = ad.cat_adb_log(tag, begin_time, end_time)
-        bqr_tag = "Handle:"
+        bt_rssi_log = ad.cat_adb_log(tag + "_bt_metric", begin_time, end_time)
 
         # Extracting supporting bqr quantities
         for metric, regex in regex_dict.items():
@@ -762,33 +780,97 @@
                         m = re.findall(regex, line)[0].strip(",")
                         bqr_metric.append(m)
             metrics_dict[metric][ad.serial] = bqr_metric
+            file_bt_log.close()
 
-        # Formatting the raw data
-        metrics_dict["rssi"][ad.serial] = [
-            (-1) * int(x) for x in metrics_dict["rssi"][ad.serial]
-        ]
-        metrics_dict["pwlv"][ad.serial] = [
-            int(x, 16) if '0x' in x else int(x, 10) for x in metrics_dict["pwlv"][ad.serial]
-        ]
+        # Formatting and saving the raw data
+        metrics_to_be_formatted = [{
+            "name": "rssi",
+            "averagble": "y"
+        }, {
+            "name": "rssi_c0",
+            "averagble": "y"
+        }, {
+            "name": "rssi_c1",
+            "averagble": "y"
+        }, {
+            "name": "pwlv",
+            "averagble": "n"
+        }, {
+            "name": "txpw_c0",
+            "averagble": "n"
+        }, {
+            "name": "txpw_c1",
+            "averagble": "n"
+        }, {
+            "name": "bftx",
+            "averagble": "n"
+        }, {
+            "name": "divtx",
+            "averagble": "n"
+        }]
+        for metric in metrics_to_be_formatted:
+            if metric["averagble"] == "y":
+                metrics_dict[metric["name"]][ad.serial] = [
+                    (-1) * int(x)
+                    for x in metrics_dict[metric["name"]][ad.serial]
+                ]
+            else:
+                metrics_dict[metric["name"]][ad.serial] = [
+                    int(x, 16) if '0x' in x else int(x, 10)
+                    for x in metrics_dict[metric["name"]][ad.serial]
+                ]
+        # Saving metrics raw data for each attenuation
+        if log_path:
+            output_file_name = ad.serial + "_metrics_raw_data_" + tag + ".csv"
+            output_file = os.path.join(log_path, output_file_name)
+            os.makedirs(log_path, exist_ok=True)
+            df_save_metrics = {}
+            for item in metrics_dict.items():
+                df_save_metrics[item[0]] = next(iter(item[1].items()))[1]
+            MetricsDict_df = pd.DataFrame({key:pd.Series(value) for key, value in df_save_metrics.items()})
+            MetricsDict_df.to_csv(output_file)
+        # Defining the process_data_dict
+        process_data = {
+            "rssi": {},
+            "pwlv": {},
+            "rssi_c0": {},
+            "rssi_c1": {},
+            "txpw_c0": {},
+            "txpw_c1": {},
+            "bftx": {},
+            "divtx": {}
+        }
 
-        # Processing formatted data if processing is required
-        if processed:
-            metrics_dict["rssi"][ad.serial] = [
-            x for x in metrics_dict["rssi"][ad.serial] if x !=0 and x != -127
-            ]
-            # Computes the average RSSI
-            metrics_dict["rssi"][ad.serial] = round(
-                sum(metrics_dict["rssi"][ad.serial]) /
-                len(metrics_dict["rssi"][ad.serial]), 2)
-            # Returns last noted value for power level
-            metrics_dict["pwlv"][ad.serial] = float(
-                sum(metrics_dict["pwlv"][ad.serial]) /
-                len(metrics_dict["pwlv"][ad.serial]))
+        # Computing and returning the raw data
+        for metric in metrics_to_be_formatted:
+            if metric["averagble"] == "y":
+                process_data[metric["name"]][ad.serial] = [
+                    x for x in metrics_dict[metric["name"]][ad.serial]
+                    if x != 0 and x != -127
+                ]
 
-    return metrics_dict
+                try:
+                    #DOING AVERAGE
+                    process_data[metric["name"]][ad.serial] = round(
+                        sum(metrics_dict[metric["name"]][ad.serial]) /
+                        len(metrics_dict[metric["name"]][ad.serial]), 2)
+                except ZeroDivisionError:
+                    #SETTING VALUE TO 'n/a'
+                    process_data[metric["name"]][ad.serial] = "n/a"
+            else:
+                try:
+                    #GETTING MOST_COMMON_VALUE
+                    process_data[metric["name"]][ad.serial] = max(
+                        metrics_dict[metric["name"]][ad.serial],
+                        key=metrics_dict[metric["name"]][ad.serial].count)
+                except ValueError:
+                    #SETTING VALUE TO 'n/a'
+                    process_data[metric["name"]][ad.serial] = "n/a"
+
+    return process_data
 
 
-def get_bt_rssi(ad, duration=1, processed=True):
+def get_bt_rssi(ad, duration=1, processed=True, tag='', log_path=False):
     """Function to get average bt rssi from logcat.
 
     This function returns the average RSSI for the given duration. RSSI values are
@@ -801,11 +883,7 @@
     Returns:
         avg_rssi: average RSSI on each android device for the given duration.
     """
-    function_tag = "get_bt_rssi"
-    bqr_results = get_bt_metric(ad,
-                                duration,
-                                tag=function_tag,
-                                processed=processed)
+    bqr_results = get_bt_metric(ad, duration, tag=tag, log_path=log_path)
     return bqr_results["rssi"]
 
 
@@ -1201,8 +1279,7 @@
             test_result = False
         time.sleep(1)
     if not test_result:
-        client_ad.log.error(
-            "Failed to establish a Bluetooth socket connection")
+        client_ad.log.error("Failed to establish a Bluetooth socket connection")
         return False
     return True
 
@@ -1253,9 +1330,8 @@
                     str(curr_attempts)))
             return False
         if not clear_bonded_devices(sec_ad):
-            log.error(
-                "Failed to clear bond for secondary device at attempt {}".
-                format(str(curr_attempts)))
+            log.error("Failed to clear bond for secondary device at attempt {}".
+                      format(str(curr_attempts)))
             return False
         # Wait 2 seconds after unbound
         time.sleep(2)
@@ -1407,11 +1483,10 @@
     droid, ed = android_device.droid, android_device.ed
     if not droid.bluetoothA2dpSetCodecConfigPreference(
             codec_types[codec_type], sample_rates[str(sample_rate)],
-            bits_per_samples[str(bits_per_sample)],
-            channel_modes[channel_mode], codec_specific_1):
-        android_device.log.warning(
-            "SL4A command returned False. Codec was not "
-            "changed.")
+            bits_per_samples[str(bits_per_sample)], channel_modes[channel_mode],
+            codec_specific_1):
+        android_device.log.warning("SL4A command returned False. Codec was not "
+                                   "changed.")
     else:
         try:
             ed.pop_event(bluetooth_a2dp_codec_config_changed,
@@ -1623,8 +1698,8 @@
     out_name = ','.join((testname, device_model, serial))
     snoop_path = os.path.join(ad.device_log_path, 'BluetoothSnoopLogs')
     os.makedirs(snoop_path, exist_ok=True)
-    cmd = ''.join(("adb -s ", serial, " pull ", btsnoop_log_path_on_device,
-                   " ", snoop_path + '/' + out_name, ".btsnoop_hci.log"))
+    cmd = ''.join(("adb -s ", serial, " pull ", btsnoop_log_path_on_device, " ",
+                   snoop_path + '/' + out_name, ".btsnoop_hci.log"))
     exe_cmd(cmd)
     try:
         cmd = ''.join(
@@ -1745,9 +1820,8 @@
             timeout=bt_default_timeout)
         sec_variant = sec_pairing_req["data"]["PairingVariant"]
         sec_pin = sec_pairing_req["data"]["Pin"]
-        sec_ad.log.info(
-            "Secondary device received Pin: {}, Variant: {}".format(
-                sec_pin, sec_variant))
+        sec_ad.log.info("Secondary device received Pin: {}, Variant: {}".format(
+            sec_pin, sec_variant))
     except Empty as err:
         log.error("Wait for pin error: {}".format(err))
         log.error("Pairing request state, Primary: {}, Secondary: {}".format(
@@ -1813,6 +1887,7 @@
     """Media control using sl4a facade for general purpose.
 
     """
+
     def __init__(self, android_device, music_file):
         """Initialize the media_control class.
 
diff --git a/acts_tests/acts_contrib/test_utils/gnss/GnssBlankingBase.py b/acts_tests/acts_contrib/test_utils/gnss/GnssBlankingBase.py
new file mode 100644
index 0000000..9fe9fa4
--- /dev/null
+++ b/acts_tests/acts_contrib/test_utils/gnss/GnssBlankingBase.py
@@ -0,0 +1,505 @@
+#!/usr/bin/env python3
+#
+#   Copyright 2021 - The Android Open Source Project
+#
+#   Licensed under the Apache License, Version 2.0 (the 'License');
+#   you may not use this file except in compliance with the License.
+#   You may obtain a copy of the License at
+#
+#       http://www.apache.org/licenses/LICENSE-2.0
+#
+#   Unless required by applicable law or agreed to in writing, software
+#   distributed under the License is distributed on an 'AS IS' BASIS,
+#   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#   See the License for the specific language governing permissions and
+#   limitations under the License.
+
+import os
+from glob import glob
+from time import sleep
+from collections import namedtuple
+from numpy import arange
+from pandas import DataFrame
+from acts.signals import TestError
+from acts.signals import TestFailure
+from acts.logger import epoch_to_log_line_timestamp
+from acts.context import get_current_context
+from acts_contrib.test_utils.gnss import LabTtffTestBase as lttb
+from acts_contrib.test_utils.gnss.gnss_test_utils import launch_eecoexer
+from acts_contrib.test_utils.gnss.gnss_test_utils import excute_eecoexer_function
+from acts_contrib.test_utils.gnss.gnss_test_utils import start_gnss_by_gtw_gpstool
+from acts_contrib.test_utils.gnss.gnss_test_utils import get_current_epoch_time
+from acts_contrib.test_utils.gnss.gnss_test_utils import check_current_focus_app
+from acts_contrib.test_utils.gnss.gnss_test_utils import process_ttff_by_gtw_gpstool
+from acts_contrib.test_utils.gnss.gnss_test_utils import check_ttff_data
+from acts_contrib.test_utils.gnss.gnss_test_utils import process_gnss_by_gtw_gpstool
+from acts_contrib.test_utils.gnss.gnss_test_utils import start_pixel_logger
+from acts_contrib.test_utils.gnss.gnss_test_utils import stop_pixel_logger
+from acts_contrib.test_utils.gnss.dut_log_test_utils import start_diagmdlog_background
+from acts_contrib.test_utils.gnss.dut_log_test_utils import get_gpstool_logs
+from acts_contrib.test_utils.gnss.dut_log_test_utils import stop_background_diagmdlog
+from acts_contrib.test_utils.gnss.dut_log_test_utils import get_pixellogger_bcm_log
+from acts_contrib.test_utils.gnss.gnss_testlog_utils import parse_gpstool_ttfflog_to_df
+
+
+def range_wi_end(ad, start, stop, step):
+    """
+    Generate a list of data from start to stop with the step. The list includes start and stop value
+    and also supports floating point.
+    Args:
+        start: start value.
+            Type, int or float.
+        stop: stop value.
+            Type, int or float.
+        step: step value.
+            Type, int or float.
+    Returns:
+        range_ls: the list of data.
+    """
+    if step == 0:
+        ad.log.warn('Step is 0. Return empty list')
+        range_ls = []
+    else:
+        if start == stop:
+            range_ls = [stop]
+        else:
+            range_ls = list(arange(start, stop, step))
+            if len(range_ls) > 0:
+                if (step < 0 and range_ls[-1] > stop) or (step > 0 and
+                                                          range_ls[-1] < stop):
+                    range_ls.append(stop)
+    return range_ls
+
+
+def check_ttff_pe(ad, ttff_data, ttff_mode, pe_criteria):
+    """Verify all TTFF results from ttff_data.
+
+    Args:
+        ad: An AndroidDevice object.
+        ttff_data: TTFF data of secs, position error and signal strength.
+        ttff_mode: TTFF Test mode for current test item.
+        pe_criteria: Criteria for current test item.
+
+    """
+    ret = True
+    ad.log.info("%d iterations of TTFF %s tests finished." %
+                (len(ttff_data.keys()), ttff_mode))
+    ad.log.info("%s PASS criteria is %f meters" % (ttff_mode, pe_criteria))
+    ad.log.debug("%s TTFF data: %s" % (ttff_mode, ttff_data))
+
+    if len(ttff_data.keys()) == 0:
+        ad.log.error("GTW_GPSTool didn't process TTFF properly.")
+        raise TestFailure("GTW_GPSTool didn't process TTFF properly.")
+
+    if any(
+            float(ttff_data[key].ttff_pe) >= pe_criteria
+            for key in ttff_data.keys()):
+        ad.log.error("One or more TTFF %s are over test criteria %f meters" %
+                     (ttff_mode, pe_criteria))
+        ret = False
+    else:
+        ad.log.info("All TTFF %s are within test criteria %f meters." %
+                    (ttff_mode, pe_criteria))
+        ret = True
+    return ret
+
+
+class GnssBlankingBase(lttb.LabTtffTestBase):
+    """ LAB GNSS Cellular Coex Tx Power Sweep TTFF/FFPE Tests"""
+
+    def __init__(self, controllers):
+        """ Initializes class attributes. """
+        super().__init__(controllers)
+        self.eecoex_func = ''
+        self.start_pwr = 10
+        self.stop_pwr = 24
+        self.offset = 1
+        self.result_cell_pwr = 10
+        self.gsm_sweep_params = None
+        self.lte_tdd_pc3_sweep_params = None
+        self.lte_tdd_pc2_sweep_params = None
+        self.sa_sensitivity = -150
+        self.gnss_pwr_lvl_offset = -5
+        self.maskfile = None
+
+    def setup_class(self):
+        super().setup_class()
+        req_params = ['sa_sensitivity', 'gnss_pwr_lvl_offset']
+        self.unpack_userparams(req_param_names=req_params)
+        cell_sweep_params = self.user_params.get('cell_pwr_sweep', [])
+        self.gsm_sweep_params = cell_sweep_params.get("GSM", [10, 33, 1])
+        self.lte_tdd_pc3_sweep_params = cell_sweep_params.get(
+            "LTE_TDD_PC3", [10, 24, 1])
+        self.lte_tdd_pc2_sweep_params = cell_sweep_params.get(
+            "LTE_TDD_PC2", [10, 26, 1])
+        self.sa_sensitivity = self.user_params.get('sa_sensitivity', -150)
+        self.gnss_pwr_lvl_offset = self.user_params.get('gnss_pwr_lvl_offset', -5)
+
+    def setup_test(self):
+        super().setup_test()
+        launch_eecoexer(self.dut)
+
+        # Set DUT temperature the limit to 60 degree
+        self.dut.adb.shell(
+            'setprop persist.com.google.eecoexer.cellular.temperature_limit 60')
+
+        # Get current context full path to create the log folder.
+        cur_test_item_dir = get_current_context().get_full_output_path()
+        self.gnss_log_path = os.path.join(self.log_path, cur_test_item_dir)
+        os.makedirs(self.gnss_log_path, exist_ok=True)
+
+        # Start GNSS chip log
+        if self.diag_option == "QCOM":
+            start_diagmdlog_background(self.dut, maskfile=self.maskfile)
+        else:
+            start_pixel_logger(self.dut)
+
+    def teardown_test(self):
+        super().teardown_test()
+        # Set gnss_vendor_log_path based on GNSS solution vendor.
+        gnss_vendor_log_path = os.path.join(self.gnss_log_path,
+                                            self.diag_option)
+        os.makedirs(gnss_vendor_log_path, exist_ok=True)
+
+        # Stop GNSS chip log and pull the logs to local file system.
+        if self.diag_option == "QCOM":
+            stop_background_diagmdlog(self.dut,
+                                      gnss_vendor_log_path,
+                                      keep_logs=False)
+        else:
+            stop_pixel_logger(self.dut)
+            self.log.info('Getting Pixel BCM Log!')
+            get_pixellogger_bcm_log(self.dut,
+                                    gnss_vendor_log_path,
+                                    keep_logs=False)
+
+        # Stop cellular Tx and close GPStool and EEcoexer APPs.
+        self.stop_cell_tx()
+        self.log.debug('Close GPStool APP')
+        self.dut.force_stop_apk("com.android.gpstool")
+        self.log.debug('Close EEcoexer APP')
+        self.dut.force_stop_apk("com.google.eecoexer")
+
+    def stop_cell_tx(self):
+        """
+        Stop EEcoexer Tx power.
+        """
+        # EEcoexer cellular stop Tx command.
+        stop_cell_tx_cmd = 'CELLR,19'
+
+        # Stop cellular Tx by EEcoexer.
+        self.log.info('Stop EEcoexer Test Command: {}'.format(stop_cell_tx_cmd))
+        excute_eecoexer_function(self.dut, stop_cell_tx_cmd)
+
+    def analysis_ttff_ffpe(self, ttff_data, json_tag=''):
+        """
+        Pull logs and parsing logs into json file.
+        Args:
+            ttff_data: ttff_data from test results.
+                Type, list.
+            json_tag: tag for parsed json file name.
+                Type, str.
+        """
+        # Create log directory.
+        gps_log_path = os.path.join(self.gnss_log_path,
+                                    'Cell_Pwr_Sweep_Results')
+
+        # Pull logs of GTW GPStool.
+        get_gpstool_logs(self.dut, gps_log_path, False)
+
+        # Parsing the log of GTW GPStool into pandas dataframe.
+        target_log_name_regx = os.path.join(gps_log_path, 'GPSLogs', 'files',
+                                            'GNSS_*')
+        self.log.info('Get GPStool logs from: {}'.format(target_log_name_regx))
+        gps_api_log_ls = glob(target_log_name_regx)
+        latest_gps_api_log = max(gps_api_log_ls, key=os.path.getctime)
+        self.log.info(
+            'Get latest GPStool log is: {}'.format(latest_gps_api_log))
+        try:
+            df_ttff_ffpe = DataFrame(
+                parse_gpstool_ttfflog_to_df(latest_gps_api_log))
+
+            # Add test case, TTFF and FFPE data into the dataframe.
+            ttff_dict = {}
+            for i in ttff_data:
+                data = ttff_data[i]._asdict()
+                ttff_dict[i] = dict(data)
+            ttff_time = []
+            ttff_pe = []
+            test_case = []
+            for value in ttff_dict.values():
+                ttff_time.append(value['ttff_sec'])
+                ttff_pe.append(value['ttff_pe'])
+                test_case.append(json_tag)
+            self.log.info('test_case length {}'.format(str(len(test_case))))
+
+            df_ttff_ffpe['test_case'] = test_case
+            df_ttff_ffpe['ttff_sec'] = ttff_time
+            df_ttff_ffpe['ttff_pe'] = ttff_pe
+            json_file = 'gps_log_{}.json'.format(json_tag)
+            json_path = os.path.join(gps_log_path, json_file)
+            # Save dataframe into json file.
+            df_ttff_ffpe.to_json(json_path, orient='table', index=False)
+        except ValueError:
+            self.log.warning('Can\'t create the parsed the log data in file.')
+
+    def gnss_hot_start_ttff_ffpe_test(self,
+                                      iteration,
+                                      sweep_enable=False,
+                                      json_tag=''):
+        """
+        GNSS hot start ttff ffpe tset
+
+        Args:
+            iteration: hot start TTFF test iteration.
+                    Type, int.
+                    Default, 1.
+            sweep_enable: Indicator for the function to check if it is run by cell_power_sweep()
+                    Type, bool.
+                    Default, False.
+            json_tag: if the function is run by cell_power_sweep(), the function would use
+                    this as a part of file name to save TTFF and FFPE results into json file.
+                    Type, str.
+                    Default, ''.
+        Raise:
+            TestError: fail to send TTFF start_test_action.
+        """
+        # Start GTW GPStool.
+        test_type = namedtuple('Type', ['command', 'criteria'])
+        test_type_ttff = test_type('Hot Start', self.hs_ttff_criteria)
+        test_type_pe = test_type('Hot Start', self.hs_ttff_pecriteria)
+        self.dut.log.info("Restart GTW GPSTool")
+        start_gnss_by_gtw_gpstool(self.dut, state=True)
+
+        # Get current time and convert to human readable format
+        begin_time = get_current_epoch_time()
+        log_begin_time = epoch_to_log_line_timestamp(begin_time)
+        self.dut.log.debug('Start time is {}'.format(log_begin_time))
+
+        # Run hot start TTFF
+        for i in range(3):
+            self.log.info('Start hot start attempt %d' % (i + 1))
+            self.dut.adb.shell(
+                "am broadcast -a com.android.gpstool.ttff_action "
+                "--es ttff hs --es cycle {} --ez raninterval False".format(
+                    iteration))
+            sleep(1)
+            if self.dut.search_logcat(
+                    "act=com.android.gpstool.start_test_action", begin_time):
+                self.dut.log.info("Send TTFF start_test_action successfully.")
+                break
+        else:
+            check_current_focus_app(self.dut)
+            raise TestError("Fail to send TTFF start_test_action.")
+
+        # Verify hot start TTFF results
+        ttff_data = process_ttff_by_gtw_gpstool(self.dut, begin_time,
+                                                self.simulator_location)
+
+        # Stop GTW GPSTool
+        self.dut.log.info("Stop GTW GPSTool")
+        start_gnss_by_gtw_gpstool(self.dut, state=False)
+
+        if sweep_enable:
+            self.analysis_ttff_ffpe(ttff_data, json_tag)
+
+        result_ttff = check_ttff_data(self.dut,
+                                      ttff_data,
+                                      ttff_mode=test_type_ttff.command,
+                                      criteria=test_type_ttff.criteria)
+        result_pe = check_ttff_pe(self.dut,
+                                  ttff_data,
+                                  ttff_mode=test_type_pe.command,
+                                  pe_criteria=test_type_pe.criteria)
+        if not result_ttff or not result_pe:
+            self.dut.log.warning('%s TTFF fails to reach '
+                                 'designated criteria' % test_type_ttff.command)
+            self.dut.log.info("Stop GTW GPSTool")
+            return False
+
+        return True
+
+    def hot_start_gnss_power_sweep(self,
+                                   start_pwr,
+                                   stop_pwr,
+                                   offset,
+                                   wait,
+                                   iteration=1,
+                                   sweep_enable=False,
+                                   title=''):
+        """
+        GNSS simulator power sweep of hot start test.
+
+        Args:
+            start_pwr: GNSS simulator power sweep start power level.
+                    Type, int.
+            stop_pwr: GNSS simulator power sweep stop power level.
+                    Type, int.
+            offset: GNSS simulator power sweep offset
+                    Type, int.
+            wait: Wait time before the power sweep.
+                    Type, int.
+            iteration: The iteration times of hot start test.
+                    Type, int.
+                    Default, 1.
+            sweep_enable: Indicator for power sweep.
+                          It will be True only in GNSS sensitivity search case.
+                    Type, bool.
+                    Defaule, False.
+            title: the target log folder title for GNSS sensitivity search test items.
+                    Type, str.
+                    Default, ''.
+        """
+
+        # Calculate loop range list from gnss_simulator_power_level and sa_sensitivity
+        range_ls = range_wi_end(self.dut, start_pwr, stop_pwr, offset)
+        sweep_range = ','.join([str(x) for x in range_ls])
+
+        self.log.debug(
+            'Start the GNSS simulator power sweep. The sweep range is [{}]'.
+            format(sweep_range))
+
+        if sweep_enable:
+            self.start_gnss_and_wait(wait)
+        else:
+            self.dut.log.info('Wait %d seconds to start TTFF HS' % wait)
+            sleep(wait)
+
+        # Sweep GNSS simulator power level in range_ls.
+        # Do hot start for every power level.
+        # Check the TTFF result if it can pass the criteria.
+        gnss_pwr_lvl = -130
+        for gnss_pwr_lvl in range_ls:
+
+            # Set GNSS Simulator power level
+            self.log.info('Set GNSS simulator power level to %.1f' %
+                          gnss_pwr_lvl)
+            self.gnss_simulator.set_power(gnss_pwr_lvl)
+            json_tag = title + '_gnss_pwr_' + str(gnss_pwr_lvl)
+
+            # GNSS hot start test
+            if not self.gnss_hot_start_ttff_ffpe_test(iteration, sweep_enable,
+                                                      json_tag):
+                sensitivity = gnss_pwr_lvl - offset
+                return False, sensitivity
+        return True, gnss_pwr_lvl
+
+    def gnss_init_power_setting(self, first_wait=180):
+        """
+        GNSS initial power level setting.
+        Args:
+            first_wait: wait time after the cold start.
+                        Type, int.
+                        Default, 180.
+        Returns:
+            True if the process is done successully and hot start results pass criteria.
+        Raise:
+            TestFailure: fail TTFF test criteria.
+        """
+
+        # Start and set GNSS simulator
+        self.start_and_set_gnss_simulator_power()
+
+        # Start 1st time cold start to obtain ephemeris
+        process_gnss_by_gtw_gpstool(self.dut, self.test_types['cs'].criteria)
+
+        self.hot_start_gnss_power_sweep(self.gnss_simulator_power_level,
+                                        self.sa_sensitivity,
+                                        self.gnss_pwr_lvl_offset, first_wait)
+
+        return True
+
+    def start_gnss_and_wait(self, wait=60):
+        """
+        The process of enable gnss and spend the wait time for GNSS to
+        gather enoung information that make sure the stability of testing.
+
+        Args:
+            wait: wait time between power sweep.
+                Type, int.
+                Default, 60.
+        """
+        # Create log path for waiting section logs of GPStool.
+        gnss_wait_log_dir = os.path.join(self.gnss_log_path, 'GNSS_wait')
+
+        # Enable GNSS to receive satellites' signals for "wait_between_pwr" seconds.
+        self.log.info('Enable GNSS for searching satellites')
+        start_gnss_by_gtw_gpstool(self.dut, state=True)
+        self.log.info('Wait for {} seconds'.format(str(wait)))
+        sleep(wait)
+
+        # Stop GNSS and pull the logs.
+        start_gnss_by_gtw_gpstool(self.dut, state=False)
+        get_gpstool_logs(self.dut, gnss_wait_log_dir, False)
+
+    def cell_power_sweep(self):
+        """
+        Linear search cellular power level. Doing GNSS hot start with cellular coexistence
+        and checking if hot start can pass hot start criteria or not.
+
+        Returns: final power level of cellular power
+        """
+        # Get parameters from user params.
+        ttft_iteration = self.user_params.get('ttff_iteration', 25)
+        wait_before_test = self.user_params.get('wait_before_test', 60)
+        wait_between_pwr = self.user_params.get('wait_between_pwr', 60)
+        power_th = self.start_pwr
+
+        # Generate the power sweep list.
+        power_search_ls = range_wi_end(self.dut, self.start_pwr, self.stop_pwr,
+                                       self.offset)
+
+        # Set GNSS simulator power level.
+        self.gnss_simulator.set_power(self.sa_sensitivity)
+
+        # Create gnss log folders for init and cellular sweep
+        gnss_init_log_dir = os.path.join(self.gnss_log_path, 'GNSS_init')
+
+        # Pull all exist GPStool logs into GNSS_init folder
+        get_gpstool_logs(self.dut, gnss_init_log_dir, False)
+
+        if power_search_ls:
+            # Run the cellular and GNSS coexistence test item.
+            for i, pwr_lvl in enumerate(power_search_ls):
+                self.log.info('Cellular power sweep loop: {}'.format(int(i)))
+                self.log.info('Cellular target power: {}'.format(int(pwr_lvl)))
+
+                # Enable GNSS to receive satellites' signals for "wait_between_pwr" seconds.
+                # Wait more time before 1st power level
+                if i == 0:
+                    wait = wait_before_test
+                else:
+                    wait = wait_between_pwr
+                self.start_gnss_and_wait(wait)
+
+                # Set cellular Tx power level.
+                eecoex_cmd = self.eecoex_func.format(str(pwr_lvl))
+                eecoex_cmd_file_str = eecoex_cmd.replace(',', '_')
+                excute_eecoexer_function(self.dut, eecoex_cmd)
+
+                # Get the last power level that can pass hots start ttff/ffpe spec.
+                if self.gnss_hot_start_ttff_ffpe_test(ttft_iteration, True,
+                                                      eecoex_cmd_file_str):
+                    if i + 1 == len(power_search_ls):
+                        power_th = pwr_lvl
+                else:
+                    if i == 0:
+                        power_th = self.start_pwr
+                    else:
+                        power_th = power_search_ls[i - 1]
+
+                # Stop cellular Tx after a test cycle.
+                self.stop_cell_tx()
+
+        else:
+            # Run the stand alone test item.
+            self.start_gnss_and_wait(wait_between_pwr)
+
+            eecoex_cmd_file_str = 'no_cellular_coex'
+            self.gnss_hot_start_ttff_ffpe_test(ttft_iteration, True,
+                                               eecoex_cmd_file_str)
+
+        self.log.info('The GNSS WWAN coex celluar Tx power is {}'.format(
+            str(power_th)))
+
+        return power_th
diff --git a/acts_tests/acts_contrib/test_utils/gnss/LabTtffTestBase.py b/acts_tests/acts_contrib/test_utils/gnss/LabTtffTestBase.py
new file mode 100644
index 0000000..6a6bd5d
--- /dev/null
+++ b/acts_tests/acts_contrib/test_utils/gnss/LabTtffTestBase.py
@@ -0,0 +1,349 @@
+#!/usr/bin/env python3
+#
+#   Copyright 2020 - The Android Open Source Project
+#
+#   Licensed under the Apache License, Version 2.0 (the 'License');
+#   you may not use this file except in compliance with the License.
+#   You may obtain a copy of the License at
+#
+#       http://www.apache.org/licenses/LICENSE-2.0
+#
+#   Unless required by applicable law or agreed to in writing, software
+#   distributed under the License is distributed on an 'AS IS' BASIS,
+#   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#   See the License for the specific language governing permissions and
+#   limitations under the License.
+
+import os
+import time
+import glob
+import errno
+from collections import namedtuple
+from pandas import DataFrame
+from acts import utils
+from acts import signals
+from acts.base_test import BaseTestClass
+from acts.controllers.gnss_lib import GnssSimulator
+from acts.context import get_current_context
+from acts_contrib.test_utils.gnss import dut_log_test_utils as diaglog
+from acts_contrib.test_utils.gnss import gnss_test_utils as gutils
+from acts_contrib.test_utils.gnss import gnss_testlog_utils as glogutils
+from acts_contrib.test_utils.gnss.gnss_defines import DEVICE_GPSLOG_FOLDER
+from acts_contrib.test_utils.gnss.gnss_defines import GPS_PKG_NAME
+from acts_contrib.test_utils.gnss.gnss_defines import BCM_GPS_XML_PATH
+
+
+class LabTtffTestBase(BaseTestClass):
+    """ LAB TTFF Tests Base Class"""
+    GTW_GPSTOOL_APP = 'gtw_gpstool_apk'
+    GNSS_SIMULATOR_KEY = 'gnss_simulator'
+    GNSS_SIMULATOR_IP_KEY = 'gnss_simulator_ip'
+    GNSS_SIMULATOR_PORT_KEY = 'gnss_simulator_port'
+    GNSS_SIMULATOR_PORT_CTRL_KEY = 'gnss_simulator_port_ctrl'
+    GNSS_SIMULATOR_SCENARIO_KEY = 'gnss_simulator_scenario'
+    GNSS_SIMULATOR_POWER_LEVEL_KEY = 'gnss_simulator_power_level'
+    CUSTOM_FILES_KEY = 'custom_files'
+    CSTTFF_CRITERIA = 'cs_criteria'
+    HSTTFF_CRITERIA = 'hs_criteria'
+    WSTTFF_CRITERIA = 'ws_criteria'
+    CSTTFF_PECRITERIA = 'cs_ttff_pecriteria'
+    HSTTFF_PECRITERIA = 'hs_ttff_pecriteria'
+    WSTTFF_PECRITERIA = 'ws_ttff_pecriteria'
+    TTFF_ITERATION = 'ttff_iteration'
+    SIMULATOR_LOCATION = 'simulator_location'
+    DIAG_OPTION = 'diag_option'
+
+    def __init__(self, controllers):
+        """ Initializes class attributes. """
+
+        super().__init__(controllers)
+
+        self.dut = None
+        self.gnss_simulator = None
+        self.rockbottom_script = None
+        self.gnss_log_path = self.log_path
+        self.gps_xml_bk_path = BCM_GPS_XML_PATH + '.bk'
+
+    def setup_class(self):
+        super().setup_class()
+
+        req_params = [
+            self.GNSS_SIMULATOR_KEY, self.GNSS_SIMULATOR_IP_KEY,
+            self.GNSS_SIMULATOR_PORT_KEY, self.GNSS_SIMULATOR_SCENARIO_KEY,
+            self.GNSS_SIMULATOR_POWER_LEVEL_KEY, self.CSTTFF_CRITERIA,
+            self.HSTTFF_CRITERIA, self.WSTTFF_CRITERIA, self.TTFF_ITERATION,
+            self.SIMULATOR_LOCATION, self.DIAG_OPTION
+        ]
+
+        self.unpack_userparams(req_param_names=req_params)
+        self.dut = self.android_devices[0]
+        self.gnss_simulator_scenario = self.user_params[
+            self.GNSS_SIMULATOR_SCENARIO_KEY]
+        self.gnss_simulator_power_level = self.user_params[
+            self.GNSS_SIMULATOR_POWER_LEVEL_KEY]
+        self.gtw_gpstool_app = self.user_params[self.GTW_GPSTOOL_APP]
+        custom_files = self.user_params.get(self.CUSTOM_FILES_KEY, [])
+        self.cs_ttff_criteria = self.user_params.get(self.CSTTFF_CRITERIA, [])
+        self.hs_ttff_criteria = self.user_params.get(self.HSTTFF_CRITERIA, [])
+        self.ws_ttff_criteria = self.user_params.get(self.WSTTFF_CRITERIA, [])
+        self.cs_ttff_pecriteria = self.user_params.get(self.CSTTFF_PECRITERIA,
+                                                       [])
+        self.hs_ttff_pecriteria = self.user_params.get(self.HSTTFF_PECRITERIA,
+                                                       [])
+        self.ws_ttff_pecriteria = self.user_params.get(self.WSTTFF_PECRITERIA,
+                                                       [])
+        self.ttff_iteration = self.user_params.get(self.TTFF_ITERATION, [])
+        self.simulator_location = self.user_params.get(self.SIMULATOR_LOCATION,
+                                                       [])
+        self.diag_option = self.user_params.get(self.DIAG_OPTION, [])
+
+        # Create gnss_simulator instance
+        gnss_simulator_key = self.user_params[self.GNSS_SIMULATOR_KEY]
+        gnss_simulator_ip = self.user_params[self.GNSS_SIMULATOR_IP_KEY]
+        gnss_simulator_port = self.user_params[self.GNSS_SIMULATOR_PORT_KEY]
+        if gnss_simulator_key == 'gss7000':
+            gnss_simulator_port_ctrl = self.user_params[
+                self.GNSS_SIMULATOR_PORT_CTRL_KEY]
+        else:
+            gnss_simulator_port_ctrl = None
+        self.gnss_simulator = GnssSimulator.AbstractGnssSimulator(
+            gnss_simulator_key, gnss_simulator_ip, gnss_simulator_port,
+            gnss_simulator_port_ctrl)
+
+        test_type = namedtuple('Type', ['command', 'criteria'])
+        self.test_types = {
+            'cs': test_type('Cold Start', self.cs_ttff_criteria),
+            'ws': test_type('Warm Start', self.ws_ttff_criteria),
+            'hs': test_type('Hot Start', self.hs_ttff_criteria)
+        }
+
+        # Unpack the rockbottom script file if its available.
+        for file in custom_files:
+            if 'rockbottom_' + self.dut.model in file:
+                self.rockbottom_script = file
+                break
+
+    def setup_test(self):
+
+        self.clear_gps_log()
+        self.gnss_simulator.stop_scenario()
+        self.gnss_simulator.close()
+        if self.rockbottom_script:
+            self.log.info('Running rockbottom script for this device ' +
+                          self.dut.model)
+            self.dut_rockbottom()
+        else:
+            self.log.info('Not running rockbottom for this device ' +
+                          self.dut.model)
+
+        utils.set_location_service(self.dut, True)
+        gutils.reinstall_package_apk(self.dut, GPS_PKG_NAME,
+                                     self.gtw_gpstool_app)
+
+        # For BCM DUTs, delete gldata.sto and set IgnoreRomAlm="true" based on b/196936791#comment20
+        if self.diag_option == "BCM":
+            gutils.remount_device(self.dut)
+            # Backup gps.xml
+            copy_cmd = "cp {} {}".format(BCM_GPS_XML_PATH, self.gps_xml_bk_path)
+            self.dut.adb.shell(copy_cmd)
+            gutils.delete_bcm_nvmem_sto_file(self.dut)
+            gutils.bcm_gps_ignore_rom_alm(self.dut)
+            # Reboot DUT to apply the setting
+            gutils.reboot(self.dut)
+        self.gnss_simulator.connect()
+
+    def dut_rockbottom(self):
+        """
+        Set the dut to rockbottom state
+
+        """
+        # The rockbottom script might include a device reboot, so it is
+        # necessary to stop SL4A during its execution.
+        self.dut.stop_services()
+        self.log.info('Executing rockbottom script for ' + self.dut.model)
+        os.chmod(self.rockbottom_script, 0o777)
+        os.system('{} {}'.format(self.rockbottom_script, self.dut.serial))
+        # Make sure the DUT is in root mode after coming back
+        self.dut.root_adb()
+        # Restart SL4A
+        self.dut.start_services()
+
+    def teardown_test(self):
+        """Teardown settings for the test class"""
+        super().teardown_test()
+        # Restore the gps.xml everytime after the test.
+        if self.diag_option == "BCM":
+            # Restore gps.xml
+            rm_cmd = "rm -rf {}".format(BCM_GPS_XML_PATH)
+            restore_cmd = "mv {} {}".format(self.gps_xml_bk_path,
+                                            BCM_GPS_XML_PATH)
+            self.dut.adb.shell(rm_cmd)
+            self.dut.adb.shell(restore_cmd)
+
+    def teardown_class(self):
+        """ Executed after completing all selected test cases."""
+        self.clear_gps_log()
+        if self.gnss_simulator:
+            self.gnss_simulator.stop_scenario()
+            self.gnss_simulator.close()
+
+    def start_and_set_gnss_simulator_power(self):
+        """
+        Start GNSS simulator secnario and set power level.
+
+        """
+
+        self.gnss_simulator.start_scenario(self.gnss_simulator_scenario)
+        time.sleep(25)
+        self.gnss_simulator.set_power(self.gnss_simulator_power_level)
+
+    def get_and_verify_ttff(self, mode):
+        """Retrieve ttff with designate mode.
+
+            Args:
+                mode: A string for identify gnss test mode.
+        """
+        if mode not in self.test_types:
+            raise signals.TestError('Unrecognized mode %s' % mode)
+        test_type = self.test_types.get(mode)
+
+        if mode != 'cs':
+            wait_time = 900
+        else:
+            wait_time = 300
+
+        gutils.process_gnss_by_gtw_gpstool(self.dut,
+                                           self.test_types['cs'].criteria)
+        begin_time = gutils.get_current_epoch_time()
+        gutils.start_ttff_by_gtw_gpstool(self.dut,
+                                         ttff_mode=mode,
+                                         iteration=self.ttff_iteration,
+                                         raninterval=True,
+                                         hot_warm_sleep=wait_time)
+        # Since Wear takes little longer to update the TTFF info.
+        # Workround to solve the wearable timing issue
+        if gutils.is_device_wearable(self.dut):
+            time.sleep(20)
+
+        ttff_data = gutils.process_ttff_by_gtw_gpstool(self.dut, begin_time,
+                                                       self.simulator_location)
+
+        # Create folder for GTW GPStool's log
+        gps_log_path = os.path.join(self.gnss_log_path, 'GPSLogs')
+        os.makedirs(gps_log_path, exist_ok=True)
+
+        self.dut.adb.pull("{} {}".format(DEVICE_GPSLOG_FOLDER, gps_log_path))
+
+        gps_api_log = glob.glob(gps_log_path + '/*/GNSS_*.txt')
+        ttff_loop_log = glob.glob(gps_log_path +
+                                  '/*/GPS_{}_*.txt'.format(mode.upper()))
+
+        if not gps_api_log and ttff_loop_log:
+            raise FileNotFoundError(errno.ENOENT, os.strerror(errno.ENOENT),
+                                    gps_log_path)
+
+        df = DataFrame(glogutils.parse_gpstool_ttfflog_to_df(gps_api_log[0]))
+
+        ttff_dict = {}
+        for i in ttff_data:
+            d = ttff_data[i]._asdict()
+            ttff_dict[i] = dict(d)
+
+        ttff_time = []
+        ttff_pe = []
+        ttff_haccu = []
+        for i in ttff_dict.keys():
+            ttff_time.append(ttff_dict[i]['ttff_sec'])
+            ttff_pe.append(ttff_dict[i]['ttff_pe'])
+            ttff_haccu.append(ttff_dict[i]['ttff_haccu'])
+        df['ttff_sec'] = ttff_time
+        df['ttff_pe'] = ttff_pe
+        df['ttff_haccu'] = ttff_haccu
+        df.to_json(gps_log_path + '/gps_log.json', orient='table')
+        result = gutils.check_ttff_data(self.dut,
+                                        ttff_data,
+                                        ttff_mode=test_type.command,
+                                        criteria=test_type.criteria)
+        if not result:
+            raise signals.TestFailure('%s TTFF fails to reach '
+                                      'designated criteria' % test_type.command)
+        return ttff_data
+
+    def verify_pe(self, mode):
+        """
+        Verify ttff Position Error with designate mode.
+
+        Args:
+             mode: A string for identify gnss test mode.
+        """
+
+        ffpe_type = namedtuple('Type', ['command', 'pecriteria'])
+        ffpe_types = {
+            'cs': ffpe_type('Cold Start', self.cs_ttff_pecriteria),
+            'ws': ffpe_type('Warm Start', self.ws_ttff_pecriteria),
+            'hs': ffpe_type('Hot Start', self.hs_ttff_pecriteria)
+        }
+
+        if mode not in ffpe_types:
+            raise signals.TestError('Unrecognized mode %s' % mode)
+        test_type = ffpe_types.get(mode)
+
+        ttff_data = self.get_and_verify_ttff(mode)
+        result = gutils.check_ttff_pe(self.dut,
+                                      ttff_data,
+                                      ttff_mode=test_type.command,
+                                      pe_criteria=test_type.pecriteria)
+        if not result:
+            raise signals.TestFailure('%s TTFF fails to reach '
+                                      'designated criteria' % test_type.command)
+        return ttff_data
+
+    def clear_gps_log(self):
+        """
+        Delete the existing GPS GTW Log from DUT.
+
+        """
+        self.dut.adb.shell("rm -rf {}".format(DEVICE_GPSLOG_FOLDER))
+
+    def gnss_ttff_ffpe(self, mode, sub_context_path=''):
+        """
+        Base ttff and ffpe function
+            Args:
+                mode: Set the TTFF mode for testing. Definitions are as below.
+                      cs(cold start), ws(warm start), hs(hot start)
+                sub_context_path: Set specifc log pathfor ttff_ffpe
+        """
+        # Create log file path
+        full_output_path = get_current_context().get_full_output_path()
+        self.gnss_log_path = os.path.join(full_output_path, sub_context_path)
+        os.makedirs(self.gnss_log_path, exist_ok=True)
+        self.log.debug('Create log path: {}'.format(self.gnss_log_path))
+
+        # Start and set GNSS simulator
+        self.start_and_set_gnss_simulator_power()
+
+        # Start GNSS chip log
+        if self.diag_option == "QCOM":
+            diaglog.start_diagmdlog_background(self.dut, maskfile=self.maskfile)
+        else:
+            gutils.start_pixel_logger(self.dut)
+
+        # Start verifying TTFF and FFPE
+        self.verify_pe(mode)
+
+        # Set gnss_vendor_log_path based on GNSS solution vendor
+        gnss_vendor_log_path = os.path.join(self.gnss_log_path,
+                                            self.diag_option)
+        os.makedirs(gnss_vendor_log_path, exist_ok=True)
+
+        # Stop GNSS chip log and pull the logs to local file system
+        if self.diag_option == "QCOM":
+            diaglog.stop_background_diagmdlog(self.dut,
+                                              gnss_vendor_log_path,
+                                              keep_logs=False)
+        else:
+            gutils.stop_pixel_logger(self.dut)
+            self.log.info('Getting Pixel BCM Log!')
+            diaglog.get_pixellogger_bcm_log(self.dut,
+                                            gnss_vendor_log_path,
+                                            keep_logs=False)
diff --git a/acts_tests/acts_contrib/test_utils/gnss/dut_log_test_utils.py b/acts_tests/acts_contrib/test_utils/gnss/dut_log_test_utils.py
index a685b65..cba71f1 100644
--- a/acts_tests/acts_contrib/test_utils/gnss/dut_log_test_utils.py
+++ b/acts_tests/acts_contrib/test_utils/gnss/dut_log_test_utils.py
@@ -18,12 +18,14 @@
 import time
 import errno
 
+
 DEVICE_CFG_FOLDER = "/data/vendor/radio/diag_logs/cfg/"
 DEVICE_DIAGMDLOG_FOLDER = "/data/vendor/radio/diag_logs/logs/"
 MDLOG_SETTLING_TIME = 2
 MDLOG_PROCESS_KILL_TIME = 3
 NOHUP_CMD = "nohup diag_mdlog -f {} -o {} -s 100 -c &> /dev/null &"
 DEVICE_GPSLOG_FOLDER = '/sdcard/Android/data/com.android.gpstool/files/'
+DEVICE_PIXEL_LOGGER_FOLDER = '/sdcard/Android/data/com.android.pixellogger/files/logs/gps/'
 
 
 def find_device_qxdm_log_mask(ad, maskfile):
@@ -169,9 +171,30 @@
     """
 
     gps_log_path = os.path.join(local_logpath, 'GPSLogs')
+    os.makedirs(gps_log_path, exist_ok=True)
     ad.adb.pull("{} {}".format(DEVICE_GPSLOG_FOLDER, gps_log_path))
     ad.log.debug("gpstool logs are pulled from device")
 
     if not keep_logs:
-        ad.adb.shell("rm -rf " + DEVICE_GPSLOG_FOLDER + "*.*")
-        ad.log.debug("gpstool logs are deleted from device")
\ No newline at end of file
+        gpstool_log_path = os.path.join(DEVICE_GPSLOG_FOLDER, "*")
+        ad.adb.shell("rm -rf " + gpstool_log_path)
+        ad.log.debug("gpstool logs are deleted from device")
+
+def get_pixellogger_bcm_log(ad, local_logpath, keep_logs=True):
+    """
+
+    Pulls BCM Logs from android device
+
+       Args:
+           ad: the target android device, AndroidDevice object
+           local_logpath: Local file path to pull the gpstool logs
+           keep_logs: False, delete log files from the gpstool log path
+    """
+
+    ad.adb.pull("{} {}".format(DEVICE_PIXEL_LOGGER_FOLDER, local_logpath))
+    ad.log.debug("pixellogger logs are pulled from device")
+
+    if not keep_logs:
+        bcm_log_path = os.path.join(DEVICE_PIXEL_LOGGER_FOLDER, "*")
+        ad.adb.shell("rm -rf " + bcm_log_path)
+        ad.log.debug("pixellogger logs are deleted from device")
\ No newline at end of file
diff --git a/acts_tests/acts_contrib/test_utils/gnss/gnss_defines.py b/acts_tests/acts_contrib/test_utils/gnss/gnss_defines.py
new file mode 100644
index 0000000..24eef0f
--- /dev/null
+++ b/acts_tests/acts_contrib/test_utils/gnss/gnss_defines.py
@@ -0,0 +1,20 @@
+#!/usr/bin/env python3
+#
+#   Copyright 2021 - The Android Open Source Project
+#
+#   Licensed under the Apache License, Version 2.0 (the 'License');
+#   you may not use this file except in compliance with the License.
+#   You may obtain a copy of the License at
+#
+#       http://www.apache.org/licenses/LICENSE-2.0
+#
+#   Unless required by applicable law or agreed to in writing, software
+#   distributed under the License is distributed on an 'AS IS' BASIS,
+#   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#   See the License for the specific language governing permissions and
+#   limitations under the License.
+
+DEVICE_GPSLOG_FOLDER = '/sdcard/Android/data/com.android.gpstool/files/'
+GPS_PKG_NAME = 'com.android.gpstool'
+BCM_GPS_XML_PATH = '/vendor/etc/gnss/gps.xml'
+BCM_NVME_STO_PATH = '/data/vendor/gps/gldata.sto'
\ No newline at end of file
diff --git a/acts_tests/acts_contrib/test_utils/gnss/gnss_test_utils.py b/acts_tests/acts_contrib/test_utils/gnss/gnss_test_utils.py
index 7b5c841..5efa817 100644
--- a/acts_tests/acts_contrib/test_utils/gnss/gnss_test_utils.py
+++ b/acts_tests/acts_contrib/test_utils/gnss/gnss_test_utils.py
@@ -22,7 +22,10 @@
 import fnmatch
 import posixpath
 import tempfile
+import zipfile
 from collections import namedtuple
+from datetime import datetime
+from xml.etree import ElementTree
 
 from acts import utils
 from acts import asserts
@@ -39,6 +42,8 @@
 from acts_contrib.test_utils.instrumentation.device.command.instrumentation_command_builder import InstrumentationTestCommandBuilder
 from acts.utils import get_current_epoch_time
 from acts.utils import epoch_to_human_time
+from acts_contrib.test_utils.gnss.gnss_defines import BCM_GPS_XML_PATH
+from acts_contrib.test_utils.gnss.gnss_defines import BCM_NVME_STO_PATH
 
 WifiEnums = wutils.WifiEnums
 PULL_TIMEOUT = 300
@@ -47,7 +52,7 @@
 QXDM_MASKS = ["GPS.cfg", "GPS-general.cfg", "default.cfg"]
 TTFF_REPORT = namedtuple(
     "TTFF_REPORT", "utc_time ttff_loop ttff_sec ttff_pe ttff_ant_cn "
-                   "ttff_base_cn")
+                   "ttff_base_cn ttff_haccu")
 TRACK_REPORT = namedtuple(
     "TRACK_REPORT", "l5flag pe ant_top4cn ant_cn base_top4cn base_cn")
 LOCAL_PROP_FILE_CONTENTS = """\
@@ -89,6 +94,11 @@
 NORMAL_PSDS_SERVER="http://"
 REALTIME_PSDS_SERVER="http://"
 """
+DISABLE_LTO_FILE_CONTENTS_R = """\
+XTRA_SERVER_1="http://"
+XTRA_SERVER_2="http://"
+XTRA_SERVER_3="http://"
+"""
 
 
 class GnssTestUtilsError(Exception):
@@ -121,7 +131,7 @@
     ad.log.info("Reboot device to make changes take effect.")
     ad.reboot()
     ad.unlock_screen(password=None)
-    if not int(ad.adb.shell("settings get global mobile_data")) == 1:
+    if not is_mobile_data_on(ad):
         set_mobile_data(ad, True)
     utils.sync_device_time(ad)
 
@@ -185,7 +195,7 @@
                "--es package com.google.android.location --es user \* "
                "--esa flags %s --esa values %s --esa types %s "
                "com.google.android.gms" % (flag, value, type))
-        ad.adb.shell(cmd)
+        ad.adb.shell(cmd, ignore_status=True)
     ad.adb.shell("am force-stop com.google.android.gms")
     ad.adb.shell("am broadcast -a com.google.android.gms.INITIALIZE")
 
@@ -211,7 +221,9 @@
     remount_device(ad)
     ad.log.info("Enable SUPL mode.")
     ad.adb.shell("echo -e '\nSUPL_MODE=1' >> /etc/gps_debug.conf")
-    if not check_chipset_vendor_by_qualcomm(ad):
+    if is_device_wearable(ad):
+        lto_mode_wearable(ad, True)
+    elif not check_chipset_vendor_by_qualcomm(ad):
         lto_mode(ad, True)
     else:
         reboot(ad)
@@ -226,7 +238,9 @@
     remount_device(ad)
     ad.log.info("Disable SUPL mode.")
     ad.adb.shell("echo -e '\nSUPL_MODE=0' >> /etc/gps_debug.conf")
-    if not check_chipset_vendor_by_qualcomm(ad):
+    if is_device_wearable(ad):
+        lto_mode_wearable(ad, True)
+    elif not check_chipset_vendor_by_qualcomm(ad):
         lto_mode(ad, True)
     else:
         reboot(ad)
@@ -239,7 +253,9 @@
         ad: An AndroidDevice object.
     """
     ad.root_adb()
-    if check_chipset_vendor_by_qualcomm(ad):
+    if is_device_wearable(ad):
+        lto_mode_wearable(ad, False)
+    elif check_chipset_vendor_by_qualcomm(ad):
         ad.log.info("Disable XTRA-daemon until next reboot.")
         ad.adb.shell("killall xtra-daemon", ignore_status=True)
     else:
@@ -268,10 +284,14 @@
     """
     enable_gnss_verbose_logging(ad)
     enable_compact_and_particle_fusion_log(ad)
+    prepare_gps_overlay(ad)
     if check_chipset_vendor_by_qualcomm(ad):
         disable_xtra_throttle(ad)
     enable_supl_mode(ad)
-    ad.adb.shell("settings put system screen_off_timeout 1800000")
+    if is_device_wearable(ad):
+        ad.adb.shell("settings put global stay_on_while_plugged_in 7")
+    else:
+        ad.adb.shell("settings put system screen_off_timeout 1800000")
     wutils.wifi_toggle_state(ad, False)
     ad.log.info("Setting Bluetooth state to False")
     ad.droid.bluetoothToggleState(False)
@@ -280,6 +300,61 @@
     disable_private_dns_mode(ad)
     reboot(ad)
     init_gtw_gpstool(ad)
+    if not is_mobile_data_on(ad):
+        set_mobile_data(ad, True)
+
+
+def prepare_gps_overlay(ad):
+    """Set pixellogger gps log mask to
+    resolve gps logs unreplayable from brcm vendor
+    """
+    if not check_chipset_vendor_by_qualcomm(ad):
+        overlay_file = "/data/vendor/gps/overlay/gps_overlay.xml"
+        xml_file = generate_gps_overlay_xml(ad)
+        try:
+            ad.log.info("Push gps_overlay to device")
+            ad.adb.push(xml_file, overlay_file)
+            ad.adb.shell(f"chmod 777 {overlay_file}")
+        finally:
+            xml_folder = os.path.abspath(os.path.join(xml_file, os.pardir))
+            shutil.rmtree(xml_folder)
+
+
+def generate_gps_overlay_xml(ad):
+    """For r11 devices, the overlay setting is 'Replayable default'
+    For other brcm devices, the setting is 'Replayable debug'
+
+    Returns:
+        path to the xml file
+    """
+    root_attrib = {
+        "xmlns": "http://www.glpals.com/",
+        "xmlns:xsi": "http://www.w3.org/2001/XMLSchema-instance",
+        "xsi:schemaLocation": "http://www.glpals.com/ glconfig.xsd",
+    }
+    sub_attrib = {"EnableOnChipStopNotification": "true"}
+    if not is_device_wearable(ad):
+        sub_attrib["LogPriMask"] = "LOG_DEBUG"
+        sub_attrib["LogFacMask"] = "LOG_GLLIO | LOG_GLLAPI | LOG_NMEA | LOG_RAWDATA"
+        sub_attrib["OnChipLogPriMask"] = "LOG_DEBUG"
+        sub_attrib["OnChipLogFacMask"] = "LOG_GLLIO | LOG_GLLAPI | LOG_NMEA | LOG_RAWDATA"
+
+    temp_path = tempfile.mkdtemp()
+    xml_file = os.path.join(temp_path, "gps_overlay.xml")
+
+    root = ElementTree.Element('glgps')
+    for key, value in root_attrib.items():
+        root.attrib[key] = value
+
+    ad.log.debug("Sub attrib is %s", sub_attrib)
+
+    sub = ElementTree.SubElement(root, 'gll')
+    for key, value in sub_attrib.items():
+        sub.attrib[key] = value
+
+    xml = ElementTree.ElementTree(root)
+    xml.write(xml_file, xml_declaration=True, encoding="utf-8", method="xml")
+    return xml_file
 
 
 def connect_to_wifi_network(ad, network):
@@ -324,6 +399,7 @@
     """
     remount_device(ad)
     utils.set_location_service(ad, True)
+    ad.adb.shell("cmd location set-location-enabled true")
     location_mode = int(ad.adb.shell("settings get secure location_mode"))
     ad.log.info("Current Location Mode >> %d" % location_mode)
     if location_mode != 3:
@@ -356,7 +432,7 @@
     reboot(ad)
 
 
-def get_gnss_qxdm_log(ad, qdb_path):
+def get_gnss_qxdm_log(ad, qdb_path=None):
     """Get /storage/emulated/0/Android/data/com.android.gpstool/files and
     /data/vendor/radio/diag_logs/logs for test item.
 
@@ -370,16 +446,17 @@
     gnss_log_path = posixpath.join(log_path, gnss_log_name)
     os.makedirs(gnss_log_path, exist_ok=True)
     ad.log.info("Pull GnssStatus Log to %s" % gnss_log_path)
-    ad.adb.pull("%s %s" % (GNSSSTATUS_LOG_PATH+".", gnss_log_path),
+    ad.adb.pull("%s %s" % (GNSSSTATUS_LOG_PATH + ".", gnss_log_path),
                 timeout=PULL_TIMEOUT, ignore_status=True)
     shutil.make_archive(gnss_log_path, "zip", gnss_log_path)
-    shutil.rmtree(gnss_log_path)
+    shutil.rmtree(gnss_log_path, ignore_errors=True)
     if check_chipset_vendor_by_qualcomm(ad):
         output_path = (
-            "/sdcard/Android/data/com.android.pixellogger/files/logs/diag_logs")
+            "/sdcard/Android/data/com.android.pixellogger/files/logs/"
+            "diag_logs/.")
     else:
         output_path = (
-            "/sdcard/Android/data/com.android.pixellogger/files/logs/gps/")
+            "/sdcard/Android/data/com.android.pixellogger/files/logs/gps/.")
     qxdm_log_name = "PixelLogger_%s_%s" % (ad.model, ad.serial)
     qxdm_log_path = posixpath.join(log_path, qxdm_log_name)
     os.makedirs(qxdm_log_path, exist_ok=True)
@@ -395,7 +472,7 @@
                 continue
             break
     shutil.make_archive(qxdm_log_path, "zip", qxdm_log_path)
-    shutil.rmtree(qxdm_log_path)
+    shutil.rmtree(qxdm_log_path, ignore_errors=True)
 
 
 def set_mobile_data(ad, state):
@@ -407,19 +484,27 @@
     """
     ad.root_adb()
     if state:
-        ad.log.info("Enable mobile data.")
-        ad.adb.shell("svc data enable")
+        if is_device_wearable(ad):
+            ad.log.info("Enable wearable mobile data.")
+            ad.adb.shell("settings put global cell_on 1")
+        else:
+            ad.log.info("Enable mobile data via RPC call.")
+            ad.droid.telephonyToggleDataConnection(True)
     else:
-        ad.log.info("Disable mobile data.")
-        ad.adb.shell("svc data disable")
+        if is_device_wearable(ad):
+            ad.log.info("Disable wearable mobile data.")
+            ad.adb.shell("settings put global cell_on 0")
+        else:
+            ad.log.info("Disable mobile data via RPC call.")
+            ad.droid.telephonyToggleDataConnection(False)
     time.sleep(5)
-    out = int(ad.adb.shell("settings get global mobile_data"))
-    if state and out == 1:
-        ad.log.info("Mobile data is enabled and set to %d" % out)
-    elif not state and out == 0:
-        ad.log.info("Mobile data is disabled and set to %d" % out)
+    ret_val = is_mobile_data_on(ad)
+    if state and ret_val:
+        ad.log.info("Mobile data is enabled and set to %s" % ret_val)
+    elif not state and not ret_val:
+        ad.log.info("Mobile data is disabled and set to %s" % ret_val)
     else:
-        ad.log.error("Mobile data is at unknown state and set to %d" % out)
+        ad.log.error("Mobile data is at unknown state and set to %s" % ret_val)
 
 
 def gnss_trigger_modem_ssr_by_adb(ad, dwelltime=60):
@@ -504,6 +589,11 @@
             ad.log.info("XTRA downloaded and injected successfully.")
             return True
         ad.log.error("XTRA downloaded FAIL.")
+    elif is_device_wearable(ad):
+        lto_results = ad.adb.shell("ls -al /data/vendor/gps/lto*")
+        if "lto2.dat" in lto_results:
+            ad.log.info("LTO downloaded and injected successfully.")
+            return True
     else:
         lto_results = ad.search_logcat("GnssPsdsAidl: injectPsdsData: "
                                        "psdsType: 1", begin_time)
@@ -525,11 +615,10 @@
     Returns:
         The temp path of pulled apk.
     """
-    apk_path = None
     out = ad.adb.shell("pm path %s" % package_name)
     result = re.search(r"package:(.*)", out)
     if not result:
-        tutils.abort_all_tests(ad.log, "Couldn't find apk of %s" % package_name)
+        raise signals.TestError("Couldn't find apk of %s" % package_name)
     else:
         apk_source = result.group(1)
         ad.log.info("Get apk of %s from %s" % (package_name, apk_source))
@@ -593,9 +682,10 @@
     remount_device(ad)
     gpstool_path = pull_package_apk(ad, "com.android.gpstool")
     reinstall_package_apk(ad, "com.android.gpstool", gpstool_path)
+    shutil.rmtree(gpstool_path, ignore_errors=True)
 
 
-def fastboot_factory_reset(ad):
+def fastboot_factory_reset(ad, state=True):
     """Factory reset the device in fastboot mode.
        Pull sl4a apk from device. Terminate all sl4a sessions,
        Reboot the device to bootloader,
@@ -605,23 +695,24 @@
 
     Args:
         ad: An AndroidDevice object.
+        State: True for exit_setup_wizard, False for not exit_setup_wizard.
 
     Returns:
         True if factory reset process complete.
     """
     status = True
-    skip_setup_wizard = True
+    mds_path = ""
+    gnss_cfg_file = ""
     gnss_cfg_path = "/vendor/etc/mdlog"
     default_gnss_cfg = "/vendor/etc/mdlog/DEFAULT+SECURITY+FULLDPL+GPS.cfg"
     sl4a_path = pull_package_apk(ad, SL4A_APK_NAME)
     gpstool_path = pull_package_apk(ad, "com.android.gpstool")
-    mds_path = pull_package_apk(ad, "com.google.mdstest")
     if check_chipset_vendor_by_qualcomm(ad):
+        mds_path = pull_package_apk(ad, "com.google.mdstest")
         gnss_cfg_file = pull_gnss_cfg_file(ad, default_gnss_cfg)
     stop_pixel_logger(ad)
     ad.stop_services()
-    attempts = 3
-    for i in range(1, attempts + 1):
+    for i in range(1, 4):
         try:
             if ad.serial in list_adb_devices():
                 ad.log.info("Reboot to bootloader")
@@ -639,10 +730,13 @@
                 break
             if ad.is_sl4a_installed():
                 break
+            if is_device_wearable(ad):
+                ad.log.info("Wait 5 mins for wearable projects system busy time.")
+                time.sleep(300)
             reinstall_package_apk(ad, SL4A_APK_NAME, sl4a_path)
             reinstall_package_apk(ad, "com.android.gpstool", gpstool_path)
-            reinstall_package_apk(ad, "com.google.mdstest", mds_path)
             if check_chipset_vendor_by_qualcomm(ad):
+                reinstall_package_apk(ad, "com.google.mdstest", mds_path)
                 ad.push_system_file(gnss_cfg_file, gnss_cfg_path)
             time.sleep(10)
             break
@@ -655,11 +749,13 @@
         ad.start_adb_logcat()
     except Exception as e:
         ad.log.error(e)
-    if skip_setup_wizard:
+    if state:
         ad.exit_setup_wizard()
     if ad.skip_sl4a:
         return status
     tutils.bring_up_sl4a(ad)
+    for path in [sl4a_path, gpstool_path, mds_path, gnss_cfg_file]:
+        shutil.rmtree(path, ignore_errors=True)
     return status
 
 
@@ -762,7 +858,15 @@
     raise signals.TestFailure("Fail to get %s location fixed within %d "
                               "attempts." % (type.upper(), retries))
 
-def start_ttff_by_gtw_gpstool(ad, ttff_mode, iteration, aid_data=False):
+
+def start_ttff_by_gtw_gpstool(ad,
+                              ttff_mode,
+                              iteration,
+                              aid_data=False,
+                              raninterval=False,
+                              mininterval=10,
+                              maxinterval=40,
+                              hot_warm_sleep=300):
     """Identify which TTFF mode for different test items.
 
     Args:
@@ -770,17 +874,28 @@
         ttff_mode: TTFF Test mode for current test item.
         iteration: Iteration of TTFF cycles.
         aid_data: Boolean for identify aid_data existed or not
+        raninterval: Boolean for identify random interval of TTFF in enable or not.
+        mininterval: Minimum value of random interval pool. The unit is second.
+        maxinterval: Maximum value of random interval pool. The unit is second.
+        hot_warm_sleep: Wait time for acquiring Almanac.
     """
     begin_time = get_current_epoch_time()
     if (ttff_mode == "hs" or ttff_mode == "ws") and not aid_data:
-        ad.log.info("Wait 5 minutes to start TTFF %s..." % ttff_mode.upper())
-        time.sleep(300)
+        ad.log.info("Wait {} seconds to start TTFF {}...".format(
+            hot_warm_sleep, ttff_mode.upper()))
+        time.sleep(hot_warm_sleep)
     if ttff_mode == "cs":
         ad.log.info("Start TTFF Cold Start...")
         time.sleep(3)
+    elif ttff_mode == "csa":
+        ad.log.info("Start TTFF CSWith Assist...")
+        time.sleep(3)
     for i in range(1, 4):
         ad.adb.shell("am broadcast -a com.android.gpstool.ttff_action "
-                     "--es ttff %s --es cycle %d" % (ttff_mode, iteration))
+                     "--es ttff {} --es cycle {}  --ez raninterval {} "
+                     "--ei mininterval {} --ei maxinterval {}".format(
+                         ttff_mode, iteration, raninterval, mininterval,
+                         maxinterval))
         time.sleep(1)
         if ad.search_logcat("act=com.android.gpstool.start_test_action",
                             begin_time):
@@ -806,30 +921,13 @@
         meas_flag: True to enable GnssMeasurement. False is not to. Default
         set to False.
     """
-    gnss_crash_list = [".*Fatal signal.*gnss",
-                       ".*Fatal signal.*xtra",
-                       ".*F DEBUG.*gnss"]
     process_gnss_by_gtw_gpstool(
         ad, criteria=criteria, type=type, meas_flag=meas_flag)
     ad.log.info("Start %s tracking test for %d minutes" % (type.upper(),
                                                            testtime))
     begin_time = get_current_epoch_time()
     while get_current_epoch_time() - begin_time < testtime * 60 * 1000:
-        if not ad.is_adb_logcat_on:
-            ad.start_adb_logcat()
-        for attr in gnss_crash_list:
-            gnss_crash_result = ad.adb.shell(
-                "logcat -d | grep -E -i '%s'" % attr)
-            if gnss_crash_result:
-                start_gnss_by_gtw_gpstool(ad, state=False, type=type)
-                raise signals.TestFailure(
-                    "Test failed due to GNSS HAL crashed. \n%s" %
-                    gnss_crash_result)
-        gpstool_crash_result = ad.search_logcat("Force finishing activity "
-                                                "com.android.gpstool/.GPSTool",
-                                                begin_time)
-        if gpstool_crash_result:
-            raise signals.TestError("GPSTool crashed. Abort test.")
+        detect_crash_during_tracking(ad, begin_time, type)
     ad.log.info("Successfully tested for %d minutes" % testtime)
     start_gnss_by_gtw_gpstool(ad, state=False, type=type)
 
@@ -967,6 +1065,8 @@
                         loc_time = int(
                             gnss_location_log[10].split("=")[-1].strip(","))
                         utc_time = epoch_to_human_time(loc_time)
+                        ttff_haccu = float(
+                            gnss_location_log[11].split("=")[-1].strip(","))
                 elif type == "flp":
                     flp_results = ad.search_logcat("GPSService: FLP Location",
                                                    begin_time)
@@ -976,12 +1076,14 @@
                             "log_message"].split()
                         ttff_lat = float(flp_location_log[8].split(",")[0])
                         ttff_lon = float(flp_location_log[8].split(",")[1])
+                        ttff_haccu = float(flp_location_log[9].split("=")[1])
                         utc_time = epoch_to_human_time(get_current_epoch_time())
             else:
                 ttff_ant_cn = float(ttff_log[19].strip("]"))
                 ttff_base_cn = float(ttff_log[26].strip("]"))
                 ttff_lat = 0
                 ttff_lon = 0
+                ttff_haccu = 0
                 utc_time = epoch_to_human_time(get_current_epoch_time())
             ad.log.debug("TTFF Loop %d - (Lat, Lon) = (%s, %s)" % (ttff_loop,
                                                                    ttff_lat,
@@ -993,16 +1095,19 @@
                                                ttff_sec=ttff_sec,
                                                ttff_pe=ttff_pe,
                                                ttff_ant_cn=ttff_ant_cn,
-                                               ttff_base_cn=ttff_base_cn)
+                                               ttff_base_cn=ttff_base_cn,
+                                               ttff_haccu=ttff_haccu)
             ad.log.info("UTC Time = %s, Loop %d = %.1f seconds, "
                         "Position Error = %.1f meters, "
                         "Antenna Average Signal = %.1f dbHz, "
-                        "Baseband Average Signal = %.1f dbHz" % (utc_time,
+                        "Baseband Average Signal = %.1f dbHz, "
+                        "Horizontal Accuracy = %.1f meters" % (utc_time,
                                                                  ttff_loop,
                                                                  ttff_sec,
                                                                  ttff_pe,
                                                                  ttff_ant_cn,
-                                                                 ttff_base_cn))
+                                                                 ttff_base_cn,
+                                                                 ttff_haccu))
         stop_gps_results = ad.search_logcat("stop gps test", begin_time)
         if stop_gps_results:
             ad.send_keycode("HOME")
@@ -1067,6 +1172,8 @@
                    ttff_data.keys()]
     base_cn_list = [float(ttff_data[key].ttff_base_cn) for key in
                     ttff_data.keys()]
+    haccu_list = [float(ttff_data[key].ttff_haccu) for key in
+                    ttff_data.keys()]
     timeoutcount = sec_list.count(0.0)
     if len(sec_list) == timeoutcount:
         avgttff = 9527
@@ -1080,6 +1187,7 @@
     maxdis = max(pe_list)
     ant_avgcn = sum(ant_cn_list)/len(ant_cn_list)
     base_avgcn = sum(base_cn_list)/len(base_cn_list)
+    avg_haccu = sum(haccu_list)/len(haccu_list)
     ad.log.info(prop_basename+"AvgTime %.1f" % avgttff)
     ad.log.info(prop_basename+"MaxTime %.1f" % maxttff)
     ad.log.info(prop_basename+"TimeoutCount %d" % timeoutcount)
@@ -1087,6 +1195,7 @@
     ad.log.info(prop_basename+"MaxDis %.1f" % maxdis)
     ad.log.info(prop_basename+"Ant_AvgSignal %.1f" % ant_avgcn)
     ad.log.info(prop_basename+"Base_AvgSignal %.1f" % base_avgcn)
+    ad.log.info(prop_basename+"Avg_Horizontal_Accuracy %.1f" % avg_haccu)
 
 
 def calculate_position_error(latitude, longitude, true_position):
@@ -1119,12 +1228,16 @@
     """
     ad.log.info("Launch Google Map.")
     try:
-        ad.adb.shell("am start -S -n com.google.android.apps.maps/"
-                     "com.google.android.maps.MapsActivity")
+        if is_device_wearable(ad):
+            cmd = ("am start -S -n com.google.android.apps.maps/"
+                   "com.google.android.apps.gmmwearable.MainActivity")
+        else:
+            cmd = ("am start -S -n com.google.android.apps.maps/"
+                   "com.google.android.maps.MapsActivity")
+        ad.adb.shell(cmd)
         ad.send_keycode("BACK")
         ad.force_stop_apk("com.google.android.apps.maps")
-        ad.adb.shell("am start -S -n com.google.android.apps.maps/"
-                     "com.google.android.maps.MapsActivity")
+        ad.adb.shell(cmd)
     except Exception as e:
         ad.log.error(e)
         raise signals.TestError("Failed to launch google map.")
@@ -1159,7 +1272,7 @@
         ad.log.info("Try to get location report from GnssLocationProvider API "
                     "- attempt %d" % (i+1))
         while get_current_epoch_time() - begin_time <= 30000:
-            logcat_results = ad.search_logcat("REPORT_LOCATION", begin_time)
+            logcat_results = ad.search_logcat("reportLocation", begin_time)
             if logcat_results:
                 ad.log.info("%s" % logcat_results[-1]["log_message"])
                 ad.log.info("GnssLocationProvider reports location "
@@ -1170,6 +1283,7 @@
     ad.log.error("GnssLocationProvider is unable to report location.")
     return False
 
+
 def check_network_location(ad, retries, location_type, criteria=30):
     """Verify if NLP reports location after requesting via GPSTool.
 
@@ -1297,20 +1411,35 @@
         ad: An AndroidDevice object.
         extra_msg: Extra message before or after the change.
     """
+    mpss_version = ""
+    brcm_gps_version = ""
+    brcm_sensorhub_version = ""
     try:
         build_version = ad.adb.getprop("ro.build.id")
         baseband_version = ad.adb.getprop("gsm.version.baseband")
         gms_version = ad.adb.shell(
             "dumpsys package com.google.android.gms | grep versionName"
         ).split("\n")[0].split("=")[1]
-        mpss_version = ad.adb.shell("cat /sys/devices/soc0/images | grep MPSS "
-                                    "| cut -d ':' -f 3")
+        if check_chipset_vendor_by_qualcomm(ad):
+            mpss_version = ad.adb.shell(
+                "cat /sys/devices/soc0/images | grep MPSS | cut -d ':' -f 3")
+        else:
+            brcm_gps_version = ad.adb.shell("cat /data/vendor/gps/chip.info")
+            sensorhub_version = ad.adb.shell(
+                "cat /vendor/firmware/SensorHub.patch | grep ChangeList")
+            brcm_sensorhub_version = re.compile(
+                r'<ChangeList=(\w+)>').search(sensorhub_version).group(1)
         if not extra_msg:
             ad.log.info("TestResult Build_Version %s" % build_version)
             ad.log.info("TestResult Baseband_Version %s" % baseband_version)
             ad.log.info(
                 "TestResult GMS_Version %s" % gms_version.replace(" ", ""))
-            ad.log.info("TestResult MPSS_Version %s" % mpss_version)
+            if check_chipset_vendor_by_qualcomm(ad):
+                ad.log.info("TestResult MPSS_Version %s" % mpss_version)
+            else:
+                ad.log.info("TestResult GPS_Version %s" % brcm_gps_version)
+                ad.log.info(
+                    "TestResult SensorHub_Version %s" % brcm_sensorhub_version)
         else:
             ad.log.info(
                 "%s, Baseband_Version = %s" % (extra_msg, baseband_version))
@@ -1332,7 +1461,14 @@
             ad.adb.shell("am start -S -n com.android.gpstool/.GPSTool "
                          "--es mode toggle --es cycle %d" % iteration)
             time.sleep(1)
-            if ad.search_logcat("cmp=com.android.gpstool/.ToggleGPS",
+            if is_device_wearable(ad):
+                # Wait 20 seconds for Wearable low performance time.
+                time.sleep(20)
+                if ad.search_logcat("ToggleGPS onResume",
+                                begin_time):
+                    ad.log.info("Send ToggleGPS start_test_action successfully.")
+                    break
+            elif ad.search_logcat("cmp=com.android.gpstool/.ToggleGPS",
                                 begin_time):
                 ad.log.info("Send ToggleGPS start_test_action successfully.")
                 break
@@ -1341,7 +1477,11 @@
             raise signals.TestError("Fail to send ToggleGPS "
                                     "start_test_action within 3 attempts.")
         time.sleep(2)
-        test_start = ad.search_logcat("GPSTool_ToggleGPS: startService",
+        if is_device_wearable(ad):
+            test_start = ad.search_logcat("GPSService: create toggle GPS log",
+                                      begin_time)
+        else:
+            test_start = ad.search_logcat("GPSTool_ToggleGPS: startService",
                                       begin_time)
         if test_start:
             ad.log.info(test_start[-1]["log_message"].split(":")[-1].strip())
@@ -1486,10 +1626,10 @@
         pe_criteria: Criteria for current test item.
 
     """
-    ad.log.info("%d iterations of TTFF %s tests finished.",
-                (len(ttff_data.keys()), ttff_mode))
-    ad.log.info("%s PASS criteria is %f meters", (ttff_mode, pe_criteria))
-    ad.log.debug("%s TTFF data: %s", (ttff_mode, ttff_data))
+    ad.log.info("%d iterations of TTFF %s tests finished."
+                % (len(ttff_data.keys()), ttff_mode))
+    ad.log.info("%s PASS criteria is %f meters" % (ttff_mode, pe_criteria))
+    ad.log.debug("%s TTFF data: %s" % (ttff_mode, ttff_data))
 
     if len(ttff_data.keys()) == 0:
         ad.log.error("GTW_GPSTool didn't process TTFF properly.")
@@ -1497,11 +1637,13 @@
 
     elif any(float(ttff_data[key].ttff_pe) >= pe_criteria for key in
              ttff_data.keys()):
-        ad.log.error("One or more TTFF %s are over test criteria %f meters",
-                     (ttff_mode, pe_criteria))
+        ad.log.error("One or more TTFF %s are over test criteria %f meters"
+                     % (ttff_mode, pe_criteria))
         raise signals.TestFailure("GTW_GPSTool didn't process TTFF properly.")
-    ad.log.info("All TTFF %s are within test criteria %f meters.",
-                (ttff_mode, pe_criteria))
+    else:
+        ad.log.info("All TTFF %s are within test criteria %f meters." % (
+            ttff_mode, pe_criteria))
+        return True
 
 
 def check_adblog_functionality(ad):
@@ -1560,7 +1702,7 @@
 
 
 def check_chipset_vendor_by_qualcomm(ad):
-    """Check if cipset vendor is by Qualcomm.
+    """Check if chipset vendor is by Qualcomm.
 
     Args:
         ad: An AndroidDevice object.
@@ -1598,14 +1740,14 @@
                    "NORMAL_PSDS_SERVER",
                    "REALTIME_PSDS_SERVER"]
     delete_lto_file(ad)
-    tmp_path = tempfile.mkdtemp()
-    ad.pull_files("/etc/gps_debug.conf", tmp_path)
-    gps_conf_path = os.path.join(tmp_path, "gps_debug.conf")
-    gps_conf_file = open(gps_conf_path, "r")
-    lines = gps_conf_file.readlines()
-    gps_conf_file.close()
-    fout = open(gps_conf_path, "w")
     if state:
+        tmp_path = tempfile.mkdtemp()
+        ad.pull_files("/etc/gps_debug.conf", tmp_path)
+        gps_conf_path = os.path.join(tmp_path, "gps_debug.conf")
+        gps_conf_file = open(gps_conf_path, "r")
+        lines = gps_conf_file.readlines()
+        gps_conf_file.close()
+        fout = open(gps_conf_path, "w")
         for line in lines:
             for server in server_list:
                 if server in line:
@@ -1615,6 +1757,7 @@
         ad.push_system_file(gps_conf_path, "/etc/gps_debug.conf")
         ad.log.info("Push back modified gps_debug.conf")
         ad.log.info("LTO/RTO/RTI enabled")
+        shutil.rmtree(tmp_path, ignore_errors=True)
     else:
         ad.adb.shell("echo %r >> /etc/gps_debug.conf" %
                      DISABLE_LTO_FILE_CONTENTS)
@@ -1622,6 +1765,78 @@
     reboot(ad)
 
 
+def lto_mode_wearable(ad, state):
+    """Enable or Disable LTO mode for wearable in Android R release.
+
+    Args:
+        ad: An AndroidDevice object.
+        state: True to enable. False to disable.
+    """
+    rto_enable = '    RtoEnable="true"\n'
+    rto_disable = '    RtoEnable="false"\n'
+    rti_enable = '    RtiEnable="true"\n'
+    rti_disable = '    RtiEnable="false"\n'
+    sync_lto_enable = '    HttpDirectSyncLto="true"\n'
+    sync_lto_disable = '    HttpDirectSyncLto="false"\n'
+    server_list = ["XTRA_SERVER_1", "XTRA_SERVER_2", "XTRA_SERVER_3"]
+    delete_lto_file(ad)
+    tmp_path = tempfile.mkdtemp()
+    ad.pull_files("/vendor/etc/gnss/gps.xml", tmp_path)
+    gps_xml_path = os.path.join(tmp_path, "gps.xml")
+    gps_xml_file = open(gps_xml_path, "r")
+    lines = gps_xml_file.readlines()
+    gps_xml_file.close()
+    fout = open(gps_xml_path, "w")
+    for line in lines:
+        if state:
+            if rto_disable in line:
+                line = line.replace(line, rto_enable)
+                ad.log.info("RTO enabled")
+            elif rti_disable in line:
+                line = line.replace(line, rti_enable)
+                ad.log.info("RTI enabled")
+            elif sync_lto_disable in line:
+                line = line.replace(line, sync_lto_enable)
+                ad.log.info("LTO sync enabled")
+        else:
+            if rto_enable in line:
+                line = line.replace(line, rto_disable)
+                ad.log.info("RTO disabled")
+            elif rti_enable in line:
+                line = line.replace(line, rti_disable)
+                ad.log.info("RTI disabled")
+            elif sync_lto_enable in line:
+                line = line.replace(line, sync_lto_disable)
+                ad.log.info("LTO sync disabled")
+        fout.write(line)
+    fout.close()
+    ad.push_system_file(gps_xml_path, "/vendor/etc/gnss/gps.xml")
+    ad.log.info("Push back modified gps.xml")
+    shutil.rmtree(tmp_path, ignore_errors=True)
+    if state:
+        xtra_tmp_path = tempfile.mkdtemp()
+        ad.pull_files("/etc/gps_debug.conf", xtra_tmp_path)
+        gps_conf_path = os.path.join(xtra_tmp_path, "gps_debug.conf")
+        gps_conf_file = open(gps_conf_path, "r")
+        lines = gps_conf_file.readlines()
+        gps_conf_file.close()
+        fout = open(gps_conf_path, "w")
+        for line in lines:
+            for server in server_list:
+                if server in line:
+                    line = line.replace(line, "")
+            fout.write(line)
+        fout.close()
+        ad.push_system_file(gps_conf_path, "/etc/gps_debug.conf")
+        ad.log.info("Push back modified gps_debug.conf")
+        ad.log.info("LTO/RTO/RTI enabled")
+        shutil.rmtree(xtra_tmp_path, ignore_errors=True)
+    else:
+        ad.adb.shell(
+            "echo %r >> /etc/gps_debug.conf" % DISABLE_LTO_FILE_CONTENTS_R)
+        ad.log.info("LTO/RTO/RTI disabled")
+
+
 def start_pixel_logger(ad, max_log_size_mb=100, max_number_of_files=500):
     """adb to start pixel logger for GNSS logging.
 
@@ -1635,31 +1850,35 @@
     start_timeout_sec = 60
     default_gnss_cfg = "/vendor/etc/mdlog/DEFAULT+SECURITY+FULLDPL+GPS.cfg"
     if check_chipset_vendor_by_qualcomm(ad):
-        start_cmd = ("am start-foreground-service -a com.android.pixellogger"
-                     ".service.logging.LoggingService.ACTION_START_LOGGING "
+        start_cmd = ("am startservice -a com.android.pixellogger."
+                     "service.logging.LoggingService.ACTION_START_LOGGING "
                      "-e intent_key_cfg_path '%s' "
                      "--ei intent_key_max_log_size_mb %d "
-                     "--ei intent_key_max_number_of_files %d" % (
-            default_gnss_cfg, max_log_size_mb, max_number_of_files))
+                     "--ei intent_key_max_number_of_files %d" %
+                     (default_gnss_cfg, max_log_size_mb, max_number_of_files))
     else:
         start_cmd = ("am startservice -a com.android.pixellogger."
                      "service.logging.LoggingService.ACTION_START_LOGGING "
-                     "-e intent_logger brcm_gps")
+                     "-e intent_logger brcm_gps "
+                     "--ei intent_key_max_log_size_mb %d "
+                     "--ei intent_key_max_number_of_files %d" %
+                     (max_log_size_mb, max_number_of_files))
     for attempt in range(retries):
-        begin_time = get_current_epoch_time()
-        ad.log.info("Start Pixel Logger. - Attempt %d" % (attempt + 1))
+        begin_time = get_current_epoch_time() - 3000
+        ad.log.info("Start Pixel Logger - Attempt %d" % (attempt + 1))
         ad.adb.shell(start_cmd)
         while get_current_epoch_time() - begin_time <= start_timeout_sec * 1000:
             if not ad.is_adb_logcat_on:
                 ad.start_adb_logcat()
             if check_chipset_vendor_by_qualcomm(ad):
-                start_result = ad.search_logcat("Start logging", begin_time)
+                start_result = ad.search_logcat(
+                    "ModemLogger: Start logging", begin_time)
             else:
                 start_result = ad.search_logcat("startRecording", begin_time)
             if start_result:
                 ad.log.info("Pixel Logger starts recording successfully.")
                 return True
-        ad.force_stop_apk("com.android.pixellogger")
+        stop_pixel_logger(ad)
     else:
         ad.log.warn("Pixel Logger fails to start recording in %d seconds "
                     "within %d attempts." % (start_timeout_sec, retries))
@@ -1672,17 +1891,18 @@
         ad: An AndroidDevice object.
     """
     retries = 3
-    stop_timeout_sec = 300
+    stop_timeout_sec = 60
+    zip_timeout_sec = 30
     if check_chipset_vendor_by_qualcomm(ad):
-        stop_cmd = ("am start-foreground-service -a com.android.pixellogger"
-                    ".service.logging.LoggingService.ACTION_STOP_LOGGING")
+        stop_cmd = ("am startservice -a com.android.pixellogger."
+                    "service.logging.LoggingService.ACTION_STOP_LOGGING")
     else:
         stop_cmd = ("am startservice -a com.android.pixellogger."
                     "service.logging.LoggingService.ACTION_STOP_LOGGING "
                     "-e intent_logger brcm_gps")
     for attempt in range(retries):
-        begin_time = get_current_epoch_time()
-        ad.log.info("Stop Pixel Logger. - Attempt %d" % (attempt + 1))
+        begin_time = get_current_epoch_time() - 3000
+        ad.log.info("Stop Pixel Logger - Attempt %d" % (attempt + 1))
         ad.adb.shell(stop_cmd)
         while get_current_epoch_time() - begin_time <= stop_timeout_sec * 1000:
             if not ad.is_adb_logcat_on:
@@ -1691,7 +1911,17 @@
                 "LoggingService: Stopping service", begin_time)
             if stop_result:
                 ad.log.info("Pixel Logger stops successfully.")
-                return True
+                zip_end_time = time.time() + zip_timeout_sec
+                while time.time() < zip_end_time:
+                    zip_file_created = ad.search_logcat(
+                        "FileUtil: Zip file has been created", begin_time)
+                    if zip_file_created:
+                        ad.log.info("Pixel Logger created zip file "
+                                    "successfully.")
+                        return True
+                else:
+                    ad.log.warn("Pixel Logger failed to create zip file.")
+                    return False
         ad.force_stop_apk("com.android.pixellogger")
     else:
         ad.log.warn("Pixel Logger fails to stop in %d seconds within %d "
@@ -1699,10 +1929,12 @@
 
 
 def launch_eecoexer(ad):
-    """adb to stop pixel logger for GNSS logging.
+    """Launch EEcoexer.
 
     Args:
         ad: An AndroidDevice object.
+    Raise:
+        signals.TestError if DUT fails to launch EEcoexer
     """
     launch_cmd = ("am start -a android.intent.action.MAIN -n"
                   "com.google.eecoexer"
@@ -1716,20 +1948,26 @@
 
 
 def excute_eecoexer_function(ad, eecoexer_args):
-    """adb to stop pixel logger for GNSS logging.
+    """Execute EEcoexer commands.
 
     Args:
         ad: An AndroidDevice object.
         eecoexer_args: EEcoexer function arguments
     """
+    cat_index = eecoexer_args.split(',')[:2]
+    cat_index = ','.join(cat_index)
     enqueue_cmd = ("am broadcast -a com.google.eecoexer.action.LISTENER"
                    " --es sms_body ENQUEUE,{}".format(eecoexer_args))
     exe_cmd = ("am broadcast -a com.google.eecoexer.action.LISTENER"
                " --es sms_body EXECUTE")
+    wait_for_cmd = ("am broadcast -a com.google.eecoexer.action.LISTENER"
+                   " --es sms_body WAIT_FOR_COMPLETE,{}".format(cat_index))
     ad.log.info("EEcoexer Add Enqueue: {}".format(eecoexer_args))
     ad.adb.shell(enqueue_cmd)
     ad.log.info("EEcoexer Excute.")
     ad.adb.shell(exe_cmd)
+    ad.log.info("Wait EEcoexer for complete")
+    ad.adb.shell(wait_for_cmd)
 
 
 def restart_gps_daemons(ad):
@@ -1756,3 +1994,490 @@
                 break
         else:
             raise signals.TestError("Unable to restart \"%s\"" % service)
+
+
+def is_device_wearable(ad):
+    """Check device is wearable project or not.
+
+    Args:
+        ad: An AndroidDevice object.
+    """
+    package = ad.adb.getprop("ro.cw.home_package_names")
+    ad.log.debug("[ro.cw.home_package_names]: [%s]" % package)
+    return "wearable" in package
+
+
+def is_mobile_data_on(ad):
+    """Check if mobile data of device is on.
+
+    Args:
+        ad: An AndroidDevice object.
+    """
+    if is_device_wearable(ad):
+        cell_on = ad.adb.shell("settings get global cell_on")
+        ad.log.debug("Current mobile status is %s" % cell_on)
+        return "1" in cell_on
+    else:
+        return ad.droid.telephonyIsDataEnabled()
+
+
+def human_to_epoch_time(human_time):
+    """Convert human readable time to epoch time.
+
+    Args:
+        human_time: Human readable time. (Ex: 2020-08-04 13:24:28.900)
+
+    Returns:
+        epoch: Epoch time in milliseconds.
+    """
+    if "/" in human_time:
+        human_time.replace("/", "-")
+    try:
+        epoch_start = datetime.utcfromtimestamp(0)
+        if "." in human_time:
+            epoch_time = datetime.strptime(human_time, "%Y-%m-%d %H:%M:%S.%f")
+        else:
+            epoch_time = datetime.strptime(human_time, "%Y-%m-%d %H:%M:%S")
+        epoch = int((epoch_time - epoch_start).total_seconds() * 1000)
+        return epoch
+    except ValueError:
+        return None
+
+
+def check_dpo_rate_via_gnss_meas(ad, begin_time, dpo_threshold):
+    """Check DPO engage rate through "HardwareClockDiscontinuityCount" in
+    GnssMeasurement callback.
+
+    Args:
+        ad: An AndroidDevice object.
+        begin_time: test begin time.
+        dpo_threshold: The value to set threshold. (Ex: dpo_threshold = 60)
+    """
+    time_regex = r'(\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2}.\d{3})'
+    dpo_results = ad.search_logcat("HardwareClockDiscontinuityCount",
+                                   begin_time)
+    if not dpo_results:
+        raise signals.TestError(
+            "No \"HardwareClockDiscontinuityCount\" is found in logs.")
+    ad.log.info(dpo_results[0]["log_message"])
+    ad.log.info(dpo_results[-1]["log_message"])
+    start_time = re.compile(
+        time_regex).search(dpo_results[0]["log_message"]).group(1)
+    end_time = re.compile(
+        time_regex).search(dpo_results[-1]["log_message"]).group(1)
+    gnss_start_epoch = human_to_epoch_time(start_time)
+    gnss_stop_epoch = human_to_epoch_time(end_time)
+    test_time_in_sec = round((gnss_stop_epoch - gnss_start_epoch) / 1000) + 1
+    first_dpo_count = int(dpo_results[0]["log_message"].split()[-1])
+    final_dpo_count = int(dpo_results[-1]["log_message"].split()[-1])
+    dpo_rate = ((final_dpo_count - first_dpo_count)/test_time_in_sec)
+    dpo_engage_rate = "{percent:.2%}".format(percent=dpo_rate)
+    ad.log.info("DPO is ON for %d seconds during %d seconds test." % (
+        final_dpo_count - first_dpo_count, test_time_in_sec))
+    ad.log.info("TestResult DPO_Engage_Rate " + dpo_engage_rate)
+    threshold = "{percent:.0%}".format(percent=dpo_threshold / 100)
+    asserts.assert_true(dpo_rate * 100 > dpo_threshold,
+                        "DPO only engaged %s in %d seconds test with "
+                        "threshold %s." % (dpo_engage_rate,
+                                           test_time_in_sec,
+                                           threshold))
+
+
+def parse_brcm_nmea_log(ad, nmea_pattern, brcm_error_log_allowlist):
+    """Parse specific NMEA pattern out of BRCM NMEA log.
+
+    Args:
+        ad: An AndroidDevice object.
+        nmea_pattern: Specific NMEA pattern to parse.
+        brcm_error_log_allowlist: Benign error logs to exclude.
+
+    Returns:
+        brcm_log_list: A list of specific NMEA pattern logs.
+    """
+    brcm_log_list = []
+    brcm_log_error_pattern = ["lhd: FS: Start Failsafe dump", "E slog"]
+    brcm_error_log_list = []
+    stop_pixel_logger(ad)
+    pixellogger_path = (
+        "/sdcard/Android/data/com.android.pixellogger/files/logs/gps/.")
+    tmp_log_path = tempfile.mkdtemp()
+    ad.pull_files(pixellogger_path, tmp_log_path)
+    for path_key in os.listdir(tmp_log_path):
+        zip_path = posixpath.join(tmp_log_path, path_key)
+        if path_key.endswith(".zip"):
+            ad.log.info("Processing zip file: {}".format(zip_path))
+            with zipfile.ZipFile(zip_path, "r") as zip_file:
+                zip_file.extractall(tmp_log_path)
+                gl_logs = zip_file.namelist()
+                # b/214145973 check if hidden exists in pixel logger zip file
+                tmp_file = [name for name in gl_logs if 'tmp' in name]
+                if tmp_file:
+                    ad.log.warn(f"Hidden file {tmp_file} exists in pixel logger zip file")
+            break
+        elif os.path.isdir(zip_path):
+            ad.log.info("BRCM logs didn't zip properly. Log path is directory.")
+            tmp_log_path = zip_path
+            gl_logs = os.listdir(tmp_log_path)
+            ad.log.info("Processing BRCM log files: {}".format(gl_logs))
+            break
+    else:
+        raise signals.TestError(
+            "No BRCM logs found in {}".format(os.listdir(tmp_log_path)))
+    gl_logs = [log for log in gl_logs
+               if log.startswith("gl") and log.endswith(".log")]
+    for file in gl_logs:
+        nmea_log_path = posixpath.join(tmp_log_path, file)
+        ad.log.info("Parsing log pattern of \"%s\" in %s" % (nmea_pattern,
+                                                             nmea_log_path))
+        brcm_log = open(nmea_log_path, "r", encoding="UTF-8", errors="ignore")
+        lines = brcm_log.readlines()
+        for line in lines:
+            if nmea_pattern in line:
+                brcm_log_list.append(line)
+            for attr in brcm_log_error_pattern:
+                if attr in line:
+                    benign_log = False
+                    for allow_log in brcm_error_log_allowlist:
+                        if allow_log in line:
+                            benign_log = True
+                            ad.log.info("\"%s\" is in allow-list and removed "
+                                        "from error." % allow_log)
+                    if not benign_log:
+                        brcm_error_log_list.append(line)
+    brcm_error_log = "".join(brcm_error_log_list)
+    shutil.rmtree(tmp_log_path, ignore_errors=True)
+    return brcm_log_list, brcm_error_log
+
+
+def check_dpo_rate_via_brcm_log(ad, dpo_threshold, brcm_error_log_allowlist):
+    """Check DPO engage rate through "$PGLOR,11,STA" in BRCM Log.
+    D - Disabled, Always full power.
+    F - Enabled, now in full power mode.
+    S - Enabled, now in power save mode.
+    H - Host off load mode.
+
+    Args:
+        ad: An AndroidDevice object.
+        dpo_threshold: The value to set threshold. (Ex: dpo_threshold = 60)
+        brcm_error_log_allowlist: Benign error logs to exclude.
+    """
+    always_full_power_count = 0
+    full_power_count = 0
+    power_save_count = 0
+    pglor_list, brcm_error_log = parse_brcm_nmea_log(
+        ad, "$PGLOR,11,STA", brcm_error_log_allowlist)
+    if not pglor_list:
+        raise signals.TestFailure("Fail to get DPO logs from pixel logger")
+
+    for pglor in pglor_list:
+        power_res = re.compile(r',P,(\w),').search(pglor).group(1)
+        if power_res == "D":
+            always_full_power_count += 1
+        elif power_res == "F":
+            full_power_count += 1
+        elif power_res == "S":
+            power_save_count += 1
+    ad.log.info(sorted(pglor_list)[0])
+    ad.log.info(sorted(pglor_list)[-1])
+    ad.log.info("TestResult Total_Count %d" % len(pglor_list))
+    ad.log.info("TestResult Always_Full_Power_Count %d" %
+                always_full_power_count)
+    ad.log.info("TestResult Full_Power_Mode_Count %d" % full_power_count)
+    ad.log.info("TestResult Power_Save_Mode_Count %d" % power_save_count)
+    dpo_rate = (power_save_count / len(pglor_list))
+    dpo_engage_rate = "{percent:.2%}".format(percent=dpo_rate)
+    ad.log.info("Power Save Mode is ON for %d seconds during %d seconds test."
+                % (power_save_count, len(pglor_list)))
+    ad.log.info("TestResult DPO_Engage_Rate " + dpo_engage_rate)
+    threshold = "{percent:.0%}".format(percent=dpo_threshold / 100)
+    asserts.assert_true((dpo_rate * 100 > dpo_threshold) and not brcm_error_log,
+                        "Power Save Mode only engaged %s in %d seconds test "
+                        "with threshold %s.\nAbnormal behavior found as below."
+                        "\n%s" % (dpo_engage_rate,
+                                  len(pglor_list),
+                                  threshold,
+                                  brcm_error_log))
+
+
+def pair_to_wearable(ad, ad1):
+    """Pair phone to watch via Bluetooth.
+
+    Args:
+        ad: A pixel phone.
+        ad1: A wearable project.
+    """
+    check_location_service(ad1)
+    utils.sync_device_time(ad1)
+    bt_model_name = ad.adb.getprop("ro.product.model")
+    bt_sn_name = ad.adb.getprop("ro.serialno")
+    bluetooth_name = bt_model_name +" " + bt_sn_name[10:]
+    fastboot_factory_reset(ad, False)
+    ad.log.info("Wait 1 min for wearable system busy time.")
+    time.sleep(60)
+    ad.adb.shell("input keyevent 4")
+    # Clear Denali paired data in phone.
+    ad1.adb.shell("pm clear com.google.android.gms")
+    ad1.adb.shell("pm clear com.google.android.apps.wear.companion")
+    ad1.adb.shell("am start -S -n com.google.android.apps.wear.companion/"
+                        "com.google.android.apps.wear.companion.application.RootActivity")
+    uia_click(ad1, "Next")
+    uia_click(ad1, "I agree")
+    uia_click(ad1, bluetooth_name)
+    uia_click(ad1, "Pair")
+    uia_click(ad1, "Skip")
+    uia_click(ad1, "Skip")
+    uia_click(ad1, "Finish")
+    ad.log.info("Wait 3 mins for complete pairing process.")
+    time.sleep(180)
+    ad.adb.shell("settings put global stay_on_while_plugged_in 7")
+    check_location_service(ad)
+    enable_gnss_verbose_logging(ad)
+    if is_bluetooth_connected(ad, ad1):
+        ad.log.info("Pairing successfully.")
+    else:
+        raise signals.TestFailure("Fail to pair watch and phone successfully.")
+
+
+def is_bluetooth_connected(ad, ad1):
+    """Check if device's Bluetooth status is connected or not.
+
+    Args:
+    ad: A wearable project
+    ad1: A pixel phone.
+    """
+    return ad.droid.bluetoothIsDeviceConnected(ad1.droid.bluetoothGetLocalAddress())
+
+
+def detect_crash_during_tracking(ad, begin_time, type):
+    """Check if GNSS or GPSTool crash happened druing GNSS Tracking.
+
+    Args:
+    ad: An AndroidDevice object.
+    begin_time: Start Time to check if crash happened in logs.
+    type: Using GNSS or FLP reading method in GNSS tracking.
+    """
+    gnss_crash_list = [".*Fatal signal.*gnss",
+                       ".*Fatal signal.*xtra",
+                       ".*F DEBUG.*gnss",
+                       ".*Fatal signal.*gpsd"]
+    if not ad.is_adb_logcat_on:
+        ad.start_adb_logcat()
+    for attr in gnss_crash_list:
+        gnss_crash_result = ad.adb.shell(
+            "logcat -d | grep -E -i '%s'" % attr)
+        if gnss_crash_result:
+            start_gnss_by_gtw_gpstool(ad, state=False, type=type)
+            raise signals.TestFailure(
+                "Test failed due to GNSS HAL crashed. \n%s" %
+                gnss_crash_result)
+    gpstool_crash_result = ad.search_logcat("Force finishing activity "
+                                            "com.android.gpstool/.GPSTool",
+                                            begin_time)
+    if gpstool_crash_result:
+            raise signals.TestError("GPSTool crashed. Abort test.")
+
+
+def is_wearable_btwifi(ad):
+    """Check device is wearable btwifi sku or not.
+
+    Args:
+        ad: An AndroidDevice object.
+    """
+    package = ad.adb.getprop("ro.product.product.name")
+    ad.log.debug("[ro.product.product.name]: [%s]" % package)
+    return "btwifi" in package
+
+
+def compare_watch_phone_location(ad,watch_file, phone_file):
+    """Compare watch and phone's FLP location to see if the same or not.
+
+    Args:
+        ad: An AndroidDevice object.
+        watch_file: watch's FLP locations
+        phone_file: phone's FLP locations
+    """
+    not_match_location_counts = 0
+    not_match_location = []
+    for watch_key, watch_value in watch_file.items():
+        if phone_file.get(watch_key):
+            lat_ads = abs(float(watch_value[0]) - float(phone_file[watch_key][0]))
+            lon_ads = abs(float(watch_value[1]) - float(phone_file[watch_key][1]))
+            if lat_ads > 0.000002 or lon_ads > 0.000002:
+                not_match_location_counts += 1
+                not_match_location += (watch_key, watch_value, phone_file[watch_key])
+    if not_match_location_counts > 0:
+        ad.log.info("There are %s not match locations: %s" %(not_match_location_counts, not_match_location))
+        ad.log.info("Watch's locations are not using Phone's locations.")
+        return False
+    else:
+        ad.log.info("Watch's locations are using Phone's location.")
+        return True
+
+
+def check_tracking_file(ad):
+    """Check tracking file in device and save "Latitude", "Longitude", and "Time" information.
+
+    Args:
+        ad: An AndroidDevice object.
+
+    Returns:
+        location_reports: A dict with [latitude, longitude]
+    """
+    location_reports = dict()
+    test_logfile = {}
+    file_count = int(ad.adb.shell("find %s -type f -iname *.txt | wc -l"
+                                  % GNSSSTATUS_LOG_PATH))
+    if file_count != 1:
+        ad.log.error("%d API logs exist." % file_count)
+    dir_file = ad.adb.shell("ls %s" % GNSSSTATUS_LOG_PATH).split()
+    for path_key in dir_file:
+        if fnmatch.fnmatch(path_key, "*.txt"):
+            logpath = posixpath.join(GNSSSTATUS_LOG_PATH, path_key)
+            out = ad.adb.shell("wc -c %s" % logpath)
+            file_size = int(out.split(" ")[0])
+            if file_size < 10:
+                ad.log.info("Skip log %s due to log size %d bytes" %
+                            (path_key, file_size))
+                continue
+            test_logfile = logpath
+    if not test_logfile:
+        raise signals.TestError("Failed to get test log file in device.")
+    lines = ad.adb.shell("cat %s" % test_logfile).split("\n")
+    for file_data in lines:
+        if "Latitude:" in file_data:
+            file_lat = ("%.6f" %float(file_data[9:]))
+        elif "Longitude:" in file_data:
+            file_long = ("%.6f" %float(file_data[11:]))
+        elif "Time:" in file_data:
+            file_time = (file_data[17:25])
+            location_reports[file_time] = [file_lat, file_long]
+    return location_reports
+
+
+def uia_click(ad, matching_text):
+    """Use uiautomator to click objects.
+
+    Args:
+        ad: An AndroidDevice object.
+        matching_text: Text of the target object to click
+    """
+    if ad.uia(textMatches=matching_text).wait.exists(timeout=60000):
+
+        ad.uia(textMatches=matching_text).click()
+        ad.log.info("Click button %s" % matching_text)
+    else:
+        ad.log.error("No button named %s" % matching_text)
+
+
+def delete_bcm_nvmem_sto_file(ad):
+    """Delete BCM's NVMEM ephemeris gldata.sto.
+
+    Args:
+        ad: An AndroidDevice object.
+    """
+    remount_device(ad)
+    rm_cmd = "rm -rf {}".format(BCM_NVME_STO_PATH)
+    status = ad.adb.shell(rm_cmd)
+    ad.log.info("Delete BCM's NVMEM ephemeris files.\n%s" % status)
+
+
+def bcm_gps_xml_add_option(ad,
+                           search_line=None,
+                           append_txt=None,
+                           gps_xml_path=BCM_GPS_XML_PATH):
+    """Append parameter setting in gps.xml for BCM solution
+
+    Args:
+        ad: An AndroidDevice object.
+        search_line: Pattern matching of target
+        line for appending new line data.
+        append_txt: New line that will be appended after the search_line.
+        gps_xml_path: gps.xml file location of DUT
+    """
+    remount_device(ad)
+    #Update gps.xml
+    if not search_line or not append_txt:
+        ad.log.info("Nothing for update.")
+    else:
+        tmp_log_path = tempfile.mkdtemp()
+        ad.pull_files(gps_xml_path, tmp_log_path)
+        gps_xml_tmp_path = os.path.join(tmp_log_path, "gps.xml")
+        gps_xml_file = open(gps_xml_tmp_path, "r")
+        lines = gps_xml_file.readlines()
+        gps_xml_file.close()
+        fout = open(gps_xml_tmp_path, "w")
+        append_txt_tag = append_txt.strip()
+        for line in lines:
+            if append_txt_tag in line:
+                ad.log.info('{} is already in the file. Skip'.format(append_txt))
+                continue
+            fout.write(line)
+            if search_line in line:
+                fout.write(append_txt)
+                ad.log.info("Update new line: '{}' in gps.xml.".format(append_txt))
+        fout.close()
+
+        # Update gps.xml with gps_new.xml
+        ad.push_system_file(gps_xml_tmp_path, gps_xml_path)
+
+        # remove temp folder
+        shutil.rmtree(tmp_log_path, ignore_errors=True)
+
+
+def bcm_gps_ignore_rom_alm(ad):
+    """ Update BCM gps.xml with ignoreRomAlm="True"
+    Args:
+        ad: An AndroidDevice object.
+    """
+    search_line_tag = '<gll\n'
+    append_line_str = '       IgnoreRomAlm=\"true\"\n'
+    bcm_gps_xml_add_option(ad, search_line_tag, append_line_str)
+
+
+def check_inject_time(ad):
+    """Check if watch could get the UTC time.
+
+    Args:
+        ad: An AndroidDevice object.
+    """
+    for i in range(1, 6):
+        time.sleep(10)
+        inject_time_results = ad.search_logcat("GPSIC.OUT.gps_inject_time")
+        ad.log.info("Check time injected - attempt %s" % i)
+        if inject_time_results:
+            ad.log.info("Time is injected successfully.")
+            return True
+    raise signals.TestFailure("Fail to get time injected within %s attempts." % i)
+
+
+def enable_framework_log(ad):
+    """Enable framework log for wearable to check UTC time download.
+
+    Args:
+        ad: An AndroidDevice object.
+    """
+    remount_device(ad)
+    time.sleep(3)
+    ad.log.info("Start to enable framwork log for wearable.")
+    ad.adb.shell("echo 'log.tag.LocationManagerService=VERBOSE' >> /data/local.prop")
+    ad.adb.shell("echo 'log.tag.GnssLocationProvider=VERBOSE' >> /data/local.prop")
+    ad.adb.shell("echo 'log.tag.GpsNetInitiatedHandler=VERBOSE' >> /data/local.prop")
+    ad.adb.shell("echo 'log.tag.GnssNetInitiatedHandler=VERBOSE' >> /data/local.prop")
+    ad.adb.shell("echo 'log.tag.GnssNetworkConnectivityHandler=VERBOSE' >> /data/local.prop")
+    ad.adb.shell("echo 'log.tag.NtpTimeHelper=VERBOSE' >> /data/local.prop")
+    ad.adb.shell("echo 'log.tag.ConnectivityService=VERBOSE' >> /data/local.prop")
+    ad.adb.shell("echo 'log.tag.GnssPsdsDownloader=VERBOSE' >> /data/local.prop")
+    ad.adb.shell("echo 'log.tag.GnssVisibilityControl=VERBOSE'  >> /data/local.prop")
+    ad.adb.shell("echo 'log.tag.Gnss=VERBOSE' >> /data/local.prop")
+    ad.adb.shell("echo 'log.tag.GnssConfiguration=VERBOSE' >> /data/local.prop")
+    ad.adb.shell("echo 'log.tag.ImsPhone=VERBOSE' >> /data/local.prop")
+    ad.adb.shell("echo 'log.tag.GsmCdmaPhone=VERBOSE' >> /data/local.prop")
+    ad.adb.shell("echo 'log.tag.Phone=VERBOSE' >> /data/local.prop")
+    ad.adb.shell("echo 'log.tag.GCoreFlp=VERBOSE' >> /data/local.prop")
+    ad.adb.shell("chmod 644 /data/local.prop")
+    ad.adb.shell("echo 'LogEnabled=true' > /data/vendor/gps/libgps.conf")
+    ad.adb.shell("chown gps.system /data/vendor/gps/libgps.conf")
+    ad.adb.shell("sync")
+    reboot(ad)
+    ad.log.info("Wait 2 mins for Wearable booting system busy")
+    time.sleep(120)
diff --git a/acts_tests/acts_contrib/test_utils/power/PowerBaseTest.py b/acts_tests/acts_contrib/test_utils/power/PowerBaseTest.py
index bf653af..11094fb 100644
--- a/acts_tests/acts_contrib/test_utils/power/PowerBaseTest.py
+++ b/acts_tests/acts_contrib/test_utils/power/PowerBaseTest.py
@@ -21,6 +21,7 @@
 import time
 
 import acts.controllers.power_monitor as power_monitor_lib
+import acts.controllers.monsoon as monsoon_controller
 import acts.controllers.iperf_server as ipf
 from acts import asserts
 from acts import base_test
@@ -105,7 +106,14 @@
 
         Raises an exception if there are no controllers available.
         """
-        if hasattr(self, 'monsoons'):
+        if hasattr(self, 'bitses'):
+            if hasattr(self, 'monsoons'):
+                self.log.info('Destroying monsoon controller.')
+                monsoon_controller.destroy(self.monsoons)
+                time.sleep(2)
+            self.power_monitor = self.bitses[0]
+            self.power_monitor.setup(registry=self.user_params)
+        elif hasattr(self, 'monsoons'):
             self.power_monitor = power_monitor_lib.PowerMonitorMonsoonFacade(
                 self.monsoons[0])
             self.monsoons[0].set_max_current(8.0)
@@ -141,7 +149,8 @@
                                extra_wait=None,
                                iperf_duration=None,
                                pass_fail_tolerance=THRESHOLD_TOLERANCE_DEFAULT,
-                               mon_voltage=PHONE_BATTERY_VOLTAGE_DEFAULT)
+                               mon_voltage=PHONE_BATTERY_VOLTAGE_DEFAULT,
+                               ap_dtim_period=None)
 
         # Setup the must have controllers, phone and monsoon
         self.dut = self.android_devices[0]
diff --git a/acts_tests/acts_contrib/test_utils/power/PowerGTWGnssBaseTest.py b/acts_tests/acts_contrib/test_utils/power/PowerGTWGnssBaseTest.py
index 3f8fd3a..baedb7e 100644
--- a/acts_tests/acts_contrib/test_utils/power/PowerGTWGnssBaseTest.py
+++ b/acts_tests/acts_contrib/test_utils/power/PowerGTWGnssBaseTest.py
@@ -92,6 +92,7 @@
             lowpower: a boolean to set GNSS Low Power Mode.
             meas: a boolean to set GNSS Measurement registeration.
         """
+        c_power, c_tracking, c_acquisition = self.request_power_stat()
         self.ad.adb.shell('settings put secure location_mode 3')
         gutils.start_gnss_by_gtw_gpstool(self.ad, True, 'gnss', True, freq,
                                          lowpower, meas)
@@ -106,6 +107,10 @@
         self.ad.send_keycode('WAKEUP')
 
         gutils.start_gnss_by_gtw_gpstool(self.ad, False, 'gnss')
+        n_power, n_tracking, n_acquisition = self.request_power_stat()
+        self.ad.log.info("TestResult Total_power: %.2f" %(n_power - c_power))
+        self.ad.log.info("TestResult Tracking: %.2f" %(n_tracking - c_tracking))
+        self.ad.log.info("TestResult Acquisition: %.2f" %(n_acquisition - c_acquisition))
         gutils.parse_gtw_gpstool_log(self.ad, self.test_location, type='gnss')
 
     def calibrate_avg_current(self, samples):
@@ -139,3 +144,33 @@
             self.ad.log.info(result)
             raise signals.TestFailure('DPO is not able to Turn: %s' % enable)
         self.dut_rockbottom()
+
+    def request_power_stat(self):
+        """Request the power state via command.
+        Returns:
+            total_power, tracking, acquisition power consumption.
+            If the device does not support, return 0, 0, 0
+        """
+        self.ad.adb.shell('cmd location providers send-extra-command gps request_power_stats')
+        time.sleep(1)
+        res = self.ad.adb.shell('dumpsys location | grep -A 10 -i \'power stats\'')
+        if res:
+            for line in res.split("\n"):
+                if "total power" in line:
+                    total_power = line.split(" ")[-1].split("mJ")[0]
+                if "single-band tracking" in line:
+                    single_tracking = line.split(" ")[-1].split("mJ")[0]
+                    self.ad.log.info(single_tracking)
+                if "multi-band tracking" in line:
+                    multi_tracking = line.split(" ")[-1].split("mJ")[0]
+                if "single-band acquisition" in line:
+                    single_acquisition = line.split(" ")[-1].split("mJ")[0]
+                if "multi-band acquisition" in line:
+                    multi_acquisition = line.split(" ")[-1].split("mJ")[0]
+            tracking = float(single_tracking) + float(multi_tracking)
+            acquisition = float(single_acquisition) + float(multi_acquisition)
+            self.ad.log.info("total power: %.2f" %float(total_power))
+            self.ad.log.info("tracking: %.2f" %tracking)
+            self.ad.log.info("acquisition: %.2f" %acquisition)
+            return float(total_power), tracking, acquisition
+        return 0, 0, 0
diff --git a/acts_tests/acts_contrib/test_utils/power/PowerWiFiBaseTest.py b/acts_tests/acts_contrib/test_utils/power/PowerWiFiBaseTest.py
index b57a06e..4a93afb 100644
--- a/acts_tests/acts_contrib/test_utils/power/PowerWiFiBaseTest.py
+++ b/acts_tests/acts_contrib/test_utils/power/PowerWiFiBaseTest.py
@@ -49,7 +49,7 @@
             self.iperf_server = self.iperf_servers[0]
         if self.iperf_duration:
             self.mon_duration = self.iperf_duration - self.mon_offset - IPERF_TAIL
-            self.create_monsoon_info()
+            self.mon_info = self.create_monsoon_info()
 
         wutils.set_wifi_country_code(self.dut, 'US')
 
@@ -120,7 +120,8 @@
                             network,
                             bandwidth=80,
                             connect=True,
-                            ap=None):
+                            ap=None,
+                            dtim_period=None):
         """Setup AP and connect DUT to it.
 
         Args:
@@ -128,17 +129,23 @@
             bandwidth: bandwidth of the WiFi network to be setup
             connect: indicator of if connect dut to the network after setup
             ap: access point object, default is None to find the main AP
+            dtim_period: the dtim period of access point
         Returns:
             self.brconfigs: dict for bridge interface configs
         """
         wutils.wifi_toggle_state(self.dut, True)
+        if not dtim_period:
+            dtim_period = self.ap_dtim_period
         if not ap:
             if hasattr(self, 'access_points'):
-                self.brconfigs = wputils.ap_setup(self.access_point,
-                                                  network,
-                                                  bandwidth=bandwidth)
+                self.brconfigs = wputils.ap_setup(
+                    self.access_point,
+                    network,
+                    bandwidth=bandwidth,
+                    dtim_period=dtim_period)
         else:
-            self.brconfigs = wputils.ap_setup(ap, network, bandwidth=bandwidth)
+            self.brconfigs = wputils.ap_setup(
+                ap, network, bandwidth=bandwidth, dtim_period=dtim_period)
         if connect:
             wutils.wifi_connect(self.dut, network, num_of_tries=3)
 
@@ -154,9 +161,12 @@
         tag = ''
         if self.iperf_duration:
             throughput = self.process_iperf_results()
-            plot_title = '{}_{}_{}_RSSI_{0:d}dBm_Throughput_{1:.2f}Mbps'.format(
-                self.test_name, self.dut.model,
-                self.dut.build_info['build_id'], self.RSSI, throughput)
+            plot_title = ('{0}_{1}_{2}_RSSI_{3:d}dBm_Throughput_{4:.2f}'
+                          'Mbps'.format(self.test_name,
+                                        self.dut.model,
+                                        self.dut.build_info['build_id'],
+                                        self.RSSI,
+                                        throughput))
             plot_utils.current_waveform_plot(samples, self.mon_voltage,
                                              self.mon_info.data_path,
                                              plot_title)
diff --git a/acts_tests/acts_contrib/test_utils/power/plot_utils.py b/acts_tests/acts_contrib/test_utils/power/plot_utils.py
index b2a42ab..99d22ac 100644
--- a/acts_tests/acts_contrib/test_utils/power/plot_utils.py
+++ b/acts_tests/acts_contrib/test_utils/power/plot_utils.py
@@ -14,16 +14,18 @@
 #   See the License for the specific language governing permissions and
 #   limitations under the License.
 
-import os
+import datetime
 import logging
-import numpy
 import math
+import numpy
+import os
 
-from bokeh.layouts import layout
-from bokeh.models import CustomJS, ColumnDataSource
 from bokeh.models import tools as bokeh_tools
+from bokeh.models import CustomJS, ColumnDataSource
 from bokeh.models.widgets import DataTable, TableColumn
+from bokeh.models.formatters import DatetimeTickFormatter
 from bokeh.plotting import figure, output_file, save
+from bokeh.layouts import layout
 
 
 def current_waveform_plot(samples, voltage, dest_path, plot_title):
@@ -48,16 +50,17 @@
     """
     logging.info('Plotting the power measurement data.')
 
-    time_relative = [sample[0] for sample in samples]
-    duration = time_relative[-1] - time_relative[0]
+    duration = samples[-1][0] - samples[0][0]
     current_data = [sample[1] * 1000 for sample in samples]
     avg_current = sum(current_data) / len(current_data)
-
     color = ['navy'] * len(samples)
+    time_realtime = [
+        datetime.datetime.fromtimestamp(sample[0]) for sample in samples
+    ]
 
     # Preparing the data and source link for bokehn java callback
     source = ColumnDataSource(
-        data=dict(x=time_relative, y=current_data, color=color))
+        data=dict(x=time_realtime, y=current_data, color=color))
     s2 = ColumnDataSource(
         data=dict(a=[duration],
                   b=[round(avg_current, 2)],
@@ -81,7 +84,8 @@
     output_file(os.path.join(dest_path, plot_title + '.html'))
     tools = 'box_zoom,box_select,pan,crosshair,redo,undo,reset,hover,save'
     # Create a new plot with the datatable above
-    plot = figure(plot_width=1300,
+    plot = figure(x_axis_type='datetime',
+                  plot_width=1300,
                   plot_height=700,
                   title=plot_title,
                   tools=tools)
@@ -91,6 +95,13 @@
     plot.circle('x', 'y', source=source, size=0.5, fill_color='color')
     plot.xaxis.axis_label = 'Time (s)'
     plot.yaxis.axis_label = 'Current (mA)'
+    plot.xaxis.formatter = DatetimeTickFormatter(
+        seconds=["%H:%M:%S"],
+        milliseconds=["%H:%M:%S:%3Ns"],
+        microseconds=["%H:%M:%S:%fus"],
+        minutes=["%H:%M:%S"],
+        minsec=["%H:%M:%S"],
+        hours=["%H:%M:%S"])
 
     # Callback JavaScript
     source.selected.js_on_change(
@@ -120,7 +131,7 @@
         }
         ym /= inds.length
         ts = max - min
-        d2['a'].push(Math.round(ts*1000.0)/1000.0)
+        d2['a'].push(Math.round(ts*1000.0)/1000000.0)
         d2['b'].push(Math.round(ym*100.0)/100.0)
         d2['c'].push(Math.round(ym*4.2*100.0)/100.0)
         d2['d'].push(Math.round(ym*4.2*ts*100.0)/100.0)
diff --git a/acts_tests/acts_contrib/test_utils/wifi/aware/AwareBaseTest.py b/acts_tests/acts_contrib/test_utils/wifi/aware/AwareBaseTest.py
index c0c1075..14c5c0e 100644
--- a/acts_tests/acts_contrib/test_utils/wifi/aware/AwareBaseTest.py
+++ b/acts_tests/acts_contrib/test_utils/wifi/aware/AwareBaseTest.py
@@ -36,7 +36,7 @@
     device_startup_offset = 2
 
     def setup_class(self):
-        opt_param = ["pixel_models", "cnss_diag_file"]
+        opt_param = ["pixel_models", "cnss_diag_file", "ranging_role_concurrency_flexible_models"]
         self.unpack_userparams(opt_param_names=opt_param)
         if hasattr(self, "cnss_diag_file"):
             if isinstance(self.cnss_diag_file, list):
diff --git a/acts_tests/acts_contrib/test_utils/wifi/ota_chamber.py b/acts_tests/acts_contrib/test_utils/wifi/ota_chamber.py
index 4274603..d74e785 100644
--- a/acts_tests/acts_contrib/test_utils/wifi/ota_chamber.py
+++ b/acts_tests/acts_contrib/test_utils/wifi/ota_chamber.py
@@ -89,6 +89,7 @@
         self.log = logger.create_tagged_trace_logger('OtaChamber|{}'.format(
             self.device_id))
         self.current_mode = None
+        self.SUPPORTED_BANDS = ['2.4GHz', 'UNII-1', 'UNII-2', 'UNII-3', '6GHz']
 
     def set_orientation(self, orientation):
         self.log.info('Setting orientation to {} degrees.'.format(orientation))
@@ -128,6 +129,7 @@
         utils.exe_cmd('sudo {} -d {} -i 0'.format(self.TURNTABLE_FILE_PATH,
                                                   self.device_id))
         self.current_mode = None
+        self.SUPPORTED_BANDS = ['2.4GHz', 'UNII-1', 'UNII-2', 'UNII-3', '6GHz']
 
     def set_orientation(self, orientation):
         self.log.info('Setting orientation to {} degrees.'.format(orientation))
@@ -165,6 +167,7 @@
         self.chamber = ChamberAutoConnect(flow.Flow(), self.config)
         self.stirrer_ids = [0, 1, 2]
         self.current_mode = None
+        self.SUPPORTED_BANDS = ['2.4GHz', 'UNII-1', 'UNII-2', 'UNII-3']
 
     # Capture print output decorator
     @staticmethod
@@ -248,8 +251,8 @@
     def __init__(self, config):
         self.config = config.copy()
         self.device_id = self.config['device_id']
-        self.log = logger.create_tagged_trace_logger('EInstrumentChamber|{}'.format(
-            self.device_id))
+        self.log = logger.create_tagged_trace_logger(
+            'EInstrumentChamber|{}'.format(self.device_id))
         self.current_mode = None
         self.ser = self._get_serial(config['port'])
 
diff --git a/acts_tests/acts_contrib/test_utils/wifi/ota_sniffer.py b/acts_tests/acts_contrib/test_utils/wifi/ota_sniffer.py
index a20936f..395fed2 100644
--- a/acts_tests/acts_contrib/test_utils/wifi/ota_sniffer.py
+++ b/acts_tests/acts_contrib/test_utils/wifi/ota_sniffer.py
@@ -136,7 +136,7 @@
             network: dict of network credentials.
             duration: duration of the sniff.
         """
-        self.log.info('Starting sniffer.')
+        self.log.debug('Starting sniffer.')
 
     def stop_capture(self):
         """Stops the sniffer.
@@ -145,7 +145,7 @@
             log_file: name of processed sniffer.
         """
 
-        self.log.info('Stopping sniffer.')
+        self.log.debug('Stopping sniffer.')
         log_file = self._get_full_file_path()
         with open(log_file, 'w') as file:
             file.write('this is a sniffer dump.')
@@ -235,6 +235,7 @@
         self.sniffer_output_file_type = config['output_file_type']
         self.sniffer_snap_length = config['snap_length']
         self.sniffer_interface = config['interface']
+        self.sniffer_disabled = False
 
         #Logging into sniffer
         self.log.info('Logging into sniffer.')
@@ -325,13 +326,13 @@
         Args:
             sniffer_command: sniffer command to execute.
         """
-        self.log.info('Starting sniffer.')
+        self.log.debug('Starting sniffer.')
         sniffer_job = self._sniffer_server.run_async(sniffer_command)
         self.sniffer_proc_pid = sniffer_job.stdout
 
     def _stop_tshark(self):
         """ Stops the sniffer."""
-        self.log.info('Stopping sniffer')
+        self.log.debug('Stopping sniffer')
 
         # while loop to kill the sniffer process
         stop_time = time.time() + SNIFFER_TIMEOUT
@@ -408,7 +409,7 @@
         """
         # Checking for existing sniffer processes
         if self._started:
-            self.log.info('Sniffer already running')
+            self.log.debug('Sniffer already running')
             return
 
         # Configure sniffer
@@ -429,7 +430,7 @@
         """
         # Checking if there is an ongoing sniffer capture
         if not self._started:
-            self.log.error('No sniffer process running')
+            self.log.debug('No sniffer process running')
             return
         # Killing sniffer process
         self._stop_tshark()
@@ -495,6 +496,30 @@
         # e.g. setting monitor mode (which will fail if above is not complete)
         time.sleep(1)
 
+    def start_capture(self, network, chan, bw, duration=60):
+        """Starts sniffer capture on the specified machine.
+
+        Args:
+            network: dict describing network to sniff on.
+            duration: duration of sniff.
+        """
+        # If sniffer doesnt support the channel, return
+        if '6g' in str(chan):
+            self.log.debug('Channel not supported on sniffer')
+            return
+        # Checking for existing sniffer processes
+        if self._started:
+            self.log.debug('Sniffer already running')
+            return
+
+        # Configure sniffer
+        self._configure_sniffer(network, chan, bw)
+        tshark_command = self._get_tshark_command(duration)
+        sniffer_command = self._get_sniffer_command(tshark_command)
+
+        # Starting sniffer capture by executing tshark command
+        self._run_tshark(sniffer_command)
+
     def set_monitor_mode(self, chan, bw):
         """Function to configure interface to monitor mode
 
diff --git a/acts_tests/acts_contrib/test_utils/wifi/rtt/RttBaseTest.py b/acts_tests/acts_contrib/test_utils/wifi/rtt/RttBaseTest.py
index bb58b02..c11f66a 100644
--- a/acts_tests/acts_contrib/test_utils/wifi/rtt/RttBaseTest.py
+++ b/acts_tests/acts_contrib/test_utils/wifi/rtt/RttBaseTest.py
@@ -29,7 +29,7 @@
 class RttBaseTest(BaseTestClass):
 
     def setup_class(self):
-        opt_param = ["pixel_models", "cnss_diag_file"]
+        opt_param = ["pixel_models", "cnss_diag_file", "ranging_role_concurrency_flexible_models"]
         self.unpack_userparams(opt_param_names=opt_param)
         if hasattr(self, "cnss_diag_file"):
             if isinstance(self.cnss_diag_file, list):
diff --git a/acts_tests/acts_contrib/test_utils/wifi/wifi_performance_test_utils.py b/acts_tests/acts_contrib/test_utils/wifi/wifi_performance_test_utils.py
deleted file mode 100644
index 8afe1d5..0000000
--- a/acts_tests/acts_contrib/test_utils/wifi/wifi_performance_test_utils.py
+++ /dev/null
@@ -1,1718 +0,0 @@
-#!/usr/bin/env python3.4
-#
-#   Copyright 2019 - The Android Open Source Project
-#
-#   Licensed under the Apache License, Version 2.0 (the 'License');
-#   you may not use this file except in compliance with the License.
-#   You may obtain a copy of the License at
-#
-#       http://www.apache.org/licenses/LICENSE-2.0
-#
-#   Unless required by applicable law or agreed to in writing, software
-#   distributed under the License is distributed on an 'AS IS' BASIS,
-#   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#   See the License for the specific language governing permissions and
-#   limitations under the License.
-
-import bokeh, bokeh.plotting, bokeh.io
-import collections
-import hashlib
-import ipaddress
-import itertools
-import json
-import logging
-import math
-import os
-import re
-import statistics
-import time
-from acts.controllers.android_device import AndroidDevice
-from acts.controllers.utils_lib import ssh
-from acts import asserts
-from acts import utils
-from acts_contrib.test_utils.wifi import wifi_test_utils as wutils
-from concurrent.futures import ThreadPoolExecutor
-
-SHORT_SLEEP = 1
-MED_SLEEP = 6
-TEST_TIMEOUT = 10
-STATION_DUMP = 'iw wlan0 station dump'
-SCAN = 'wpa_cli scan'
-SCAN_RESULTS = 'wpa_cli scan_results'
-SIGNAL_POLL = 'wpa_cli signal_poll'
-WPA_CLI_STATUS = 'wpa_cli status'
-DISCONNECTION_MESSAGE_BRCM = 'driver adapter not found'
-CONST_3dB = 3.01029995664
-RSSI_ERROR_VAL = float('nan')
-RTT_REGEX = re.compile(r'^\[(?P<timestamp>\S+)\] .*? time=(?P<rtt>\S+)')
-LOSS_REGEX = re.compile(r'(?P<loss>\S+)% packet loss')
-FW_REGEX = re.compile(r'FW:(?P<firmware>\S+) HW:')
-
-
-# Threading decorator
-def nonblocking(f):
-    """Creates a decorator transforming function calls to non-blocking"""
-    def wrap(*args, **kwargs):
-        executor = ThreadPoolExecutor(max_workers=1)
-        thread_future = executor.submit(f, *args, **kwargs)
-        # Ensure resources are freed up when executor ruturns or raises
-        executor.shutdown(wait=False)
-        return thread_future
-
-    return wrap
-
-
-# JSON serializer
-def serialize_dict(input_dict):
-    """Function to serialize dicts to enable JSON output"""
-    output_dict = collections.OrderedDict()
-    for key, value in input_dict.items():
-        output_dict[_serialize_value(key)] = _serialize_value(value)
-    return output_dict
-
-
-def _serialize_value(value):
-    """Function to recursively serialize dict entries to enable JSON output"""
-    if isinstance(value, tuple):
-        return str(value)
-    if isinstance(value, list):
-        return [_serialize_value(x) for x in value]
-    elif isinstance(value, dict):
-        return serialize_dict(value)
-    else:
-        return value
-
-
-# Miscellaneous Wifi Utilities
-def extract_sub_dict(full_dict, fields):
-    sub_dict = collections.OrderedDict(
-        (field, full_dict[field]) for field in fields)
-    return sub_dict
-
-
-def validate_network(dut, ssid):
-    """Check that DUT has a valid internet connection through expected SSID
-
-    Args:
-        dut: android device of interest
-        ssid: expected ssid
-    """
-    current_network = dut.droid.wifiGetConnectionInfo()
-    try:
-        connected = wutils.validate_connection(dut) is not None
-    except:
-        connected = False
-    if connected and current_network['SSID'] == ssid:
-        return True
-    else:
-        return False
-
-
-def get_server_address(ssh_connection, dut_ip, subnet_mask):
-    """Get server address on a specific subnet,
-
-    This function retrieves the LAN or WAN IP of a remote machine used in
-    testing. If subnet_mask is set to 'public' it returns a machines global ip,
-    else it returns the ip belonging to the dut local network given the dut's
-    ip and subnet mask.
-
-    Args:
-        ssh_connection: object representing server for which we want an ip
-        dut_ip: string in ip address format, i.e., xxx.xxx.xxx.xxx
-        subnet_mask: string representing subnet mask (public for global ip)
-    """
-    ifconfig_out = ssh_connection.run('ifconfig').stdout
-    ip_list = re.findall('inet (?:addr:)?(\d+.\d+.\d+.\d+)', ifconfig_out)
-    ip_list = [ipaddress.ip_address(ip) for ip in ip_list]
-
-    if subnet_mask == 'public':
-        for ip in ip_list:
-            # is_global is not used to allow for CGNAT ips in 100.x.y.z range
-            if not ip.is_private:
-                return str(ip)
-    else:
-        dut_network = ipaddress.ip_network('{}/{}'.format(dut_ip, subnet_mask),
-                                           strict=False)
-        for ip in ip_list:
-            if ip in dut_network:
-                return str(ip)
-    logging.error('No IP address found in requested subnet')
-
-
-# Plotting Utilities
-class BokehFigure():
-    """Class enabling  simplified Bokeh plotting."""
-
-    COLORS = [
-        'black',
-        'blue',
-        'blueviolet',
-        'brown',
-        'burlywood',
-        'cadetblue',
-        'cornflowerblue',
-        'crimson',
-        'cyan',
-        'darkblue',
-        'darkgreen',
-        'darkmagenta',
-        'darkorange',
-        'darkred',
-        'deepskyblue',
-        'goldenrod',
-        'green',
-        'grey',
-        'indigo',
-        'navy',
-        'olive',
-        'orange',
-        'red',
-        'salmon',
-        'teal',
-        'yellow',
-    ]
-    MARKERS = [
-        'asterisk', 'circle', 'circle_cross', 'circle_x', 'cross', 'diamond',
-        'diamond_cross', 'hex', 'inverted_triangle', 'square', 'square_x',
-        'square_cross', 'triangle', 'x'
-    ]
-
-    TOOLS = ('box_zoom,box_select,pan,crosshair,redo,undo,reset,hover,save')
-    TOOLTIPS = [
-        ('index', '$index'),
-        ('(x,y)', '($x, $y)'),
-        ('info', '@hover_text'),
-    ]
-
-    def __init__(self,
-                 title=None,
-                 x_label=None,
-                 primary_y_label=None,
-                 secondary_y_label=None,
-                 height=700,
-                 width=1100,
-                 title_size='15pt',
-                 axis_label_size='12pt',
-                 json_file=None):
-        if json_file:
-            self.load_from_json(json_file)
-        else:
-            self.figure_data = []
-            self.fig_property = {
-                'title': title,
-                'x_label': x_label,
-                'primary_y_label': primary_y_label,
-                'secondary_y_label': secondary_y_label,
-                'num_lines': 0,
-                'height': height,
-                'width': width,
-                'title_size': title_size,
-                'axis_label_size': axis_label_size
-            }
-
-    def init_plot(self):
-        self.plot = bokeh.plotting.figure(
-            sizing_mode='scale_both',
-            plot_width=self.fig_property['width'],
-            plot_height=self.fig_property['height'],
-            title=self.fig_property['title'],
-            tools=self.TOOLS,
-            output_backend='webgl')
-        self.plot.hover.tooltips = self.TOOLTIPS
-        self.plot.add_tools(
-            bokeh.models.tools.WheelZoomTool(dimensions='width'))
-        self.plot.add_tools(
-            bokeh.models.tools.WheelZoomTool(dimensions='height'))
-
-    def _filter_line(self, x_data, y_data, hover_text=None):
-        """Function to remove NaN points from bokeh plots."""
-        x_data_filtered = []
-        y_data_filtered = []
-        hover_text_filtered = []
-        for x, y, hover in itertools.zip_longest(x_data, y_data, hover_text):
-            if not math.isnan(y):
-                x_data_filtered.append(x)
-                y_data_filtered.append(y)
-                hover_text_filtered.append(hover)
-        return x_data_filtered, y_data_filtered, hover_text_filtered
-
-    def add_line(self,
-                 x_data,
-                 y_data,
-                 legend,
-                 hover_text=None,
-                 color=None,
-                 width=3,
-                 style='solid',
-                 marker=None,
-                 marker_size=10,
-                 shaded_region=None,
-                 y_axis='default'):
-        """Function to add line to existing BokehFigure.
-
-        Args:
-            x_data: list containing x-axis values for line
-            y_data: list containing y_axis values for line
-            legend: string containing line title
-            hover_text: text to display when hovering over lines
-            color: string describing line color
-            width: integer line width
-            style: string describing line style, e.g, solid or dashed
-            marker: string specifying line marker, e.g., cross
-            shaded region: data describing shaded region to plot
-            y_axis: identifier for y-axis to plot line against
-        """
-        if y_axis not in ['default', 'secondary']:
-            raise ValueError('y_axis must be default or secondary')
-        if color == None:
-            color = self.COLORS[self.fig_property['num_lines'] %
-                                len(self.COLORS)]
-        if style == 'dashed':
-            style = [5, 5]
-        if not hover_text:
-            hover_text = ['y={}'.format(y) for y in y_data]
-        x_data_filter, y_data_filter, hover_text_filter = self._filter_line(
-            x_data, y_data, hover_text)
-        self.figure_data.append({
-            'x_data': x_data_filter,
-            'y_data': y_data_filter,
-            'legend': legend,
-            'hover_text': hover_text_filter,
-            'color': color,
-            'width': width,
-            'style': style,
-            'marker': marker,
-            'marker_size': marker_size,
-            'shaded_region': shaded_region,
-            'y_axis': y_axis
-        })
-        self.fig_property['num_lines'] += 1
-
-    def add_scatter(self,
-                    x_data,
-                    y_data,
-                    legend,
-                    hover_text=None,
-                    color=None,
-                    marker=None,
-                    marker_size=10,
-                    y_axis='default'):
-        """Function to add line to existing BokehFigure.
-
-        Args:
-            x_data: list containing x-axis values for line
-            y_data: list containing y_axis values for line
-            legend: string containing line title
-            hover_text: text to display when hovering over lines
-            color: string describing line color
-            marker: string specifying marker, e.g., cross
-            y_axis: identifier for y-axis to plot line against
-        """
-        if y_axis not in ['default', 'secondary']:
-            raise ValueError('y_axis must be default or secondary')
-        if color == None:
-            color = self.COLORS[self.fig_property['num_lines'] %
-                                len(self.COLORS)]
-        if marker == None:
-            marker = self.MARKERS[self.fig_property['num_lines'] %
-                                  len(self.MARKERS)]
-        if not hover_text:
-            hover_text = ['y={}'.format(y) for y in y_data]
-        self.figure_data.append({
-            'x_data': x_data,
-            'y_data': y_data,
-            'legend': legend,
-            'hover_text': hover_text,
-            'color': color,
-            'width': 0,
-            'style': 'solid',
-            'marker': marker,
-            'marker_size': marker_size,
-            'shaded_region': None,
-            'y_axis': y_axis
-        })
-        self.fig_property['num_lines'] += 1
-
-    def generate_figure(self, output_file=None, save_json=True):
-        """Function to generate and save BokehFigure.
-
-        Args:
-            output_file: string specifying output file path
-        """
-        self.init_plot()
-        two_axes = False
-        for line in self.figure_data:
-            source = bokeh.models.ColumnDataSource(
-                data=dict(x=line['x_data'],
-                          y=line['y_data'],
-                          hover_text=line['hover_text']))
-            if line['width'] > 0:
-                self.plot.line(x='x',
-                               y='y',
-                               legend_label=line['legend'],
-                               line_width=line['width'],
-                               color=line['color'],
-                               line_dash=line['style'],
-                               name=line['y_axis'],
-                               y_range_name=line['y_axis'],
-                               source=source)
-            if line['shaded_region']:
-                band_x = line['shaded_region']['x_vector']
-                band_x.extend(line['shaded_region']['x_vector'][::-1])
-                band_y = line['shaded_region']['lower_limit']
-                band_y.extend(line['shaded_region']['upper_limit'][::-1])
-                self.plot.patch(band_x,
-                                band_y,
-                                color='#7570B3',
-                                line_alpha=0.1,
-                                fill_alpha=0.1)
-            if line['marker'] in self.MARKERS:
-                marker_func = getattr(self.plot, line['marker'])
-                marker_func(x='x',
-                            y='y',
-                            size=line['marker_size'],
-                            legend_label=line['legend'],
-                            line_color=line['color'],
-                            fill_color=line['color'],
-                            name=line['y_axis'],
-                            y_range_name=line['y_axis'],
-                            source=source)
-            if line['y_axis'] == 'secondary':
-                two_axes = True
-
-        #x-axis formatting
-        self.plot.xaxis.axis_label = self.fig_property['x_label']
-        self.plot.x_range.range_padding = 0
-        self.plot.xaxis[0].axis_label_text_font_size = self.fig_property[
-            'axis_label_size']
-        #y-axis formatting
-        self.plot.yaxis[0].axis_label = self.fig_property['primary_y_label']
-        self.plot.yaxis[0].axis_label_text_font_size = self.fig_property[
-            'axis_label_size']
-        self.plot.y_range = bokeh.models.DataRange1d(names=['default'])
-        if two_axes and 'secondary' not in self.plot.extra_y_ranges:
-            self.plot.extra_y_ranges = {
-                'secondary': bokeh.models.DataRange1d(names=['secondary'])
-            }
-            self.plot.add_layout(
-                bokeh.models.LinearAxis(
-                    y_range_name='secondary',
-                    axis_label=self.fig_property['secondary_y_label'],
-                    axis_label_text_font_size=self.
-                    fig_property['axis_label_size']), 'right')
-        # plot formatting
-        self.plot.legend.location = 'top_right'
-        self.plot.legend.click_policy = 'hide'
-        self.plot.title.text_font_size = self.fig_property['title_size']
-
-        if output_file is not None:
-            self.save_figure(output_file, save_json)
-        return self.plot
-
-    def load_from_json(self, file_path):
-        with open(file_path, 'r') as json_file:
-            fig_dict = json.load(json_file)
-        self.fig_property = fig_dict['fig_property']
-        self.figure_data = fig_dict['figure_data']
-
-    def _save_figure_json(self, output_file):
-        """Function to save a json format of a figure"""
-        figure_dict = collections.OrderedDict(fig_property=self.fig_property,
-                                              figure_data=self.figure_data)
-        output_file = output_file.replace('.html', '_plot_data.json')
-        with open(output_file, 'w') as outfile:
-            json.dump(figure_dict, outfile, indent=4)
-
-    def save_figure(self, output_file, save_json=True):
-        """Function to save BokehFigure.
-
-        Args:
-            output_file: string specifying output file path
-            save_json: flag controlling json outputs
-        """
-        if save_json:
-            self._save_figure_json(output_file)
-        bokeh.io.output_file(output_file)
-        bokeh.io.save(self.plot)
-
-    @staticmethod
-    def save_figures(figure_array, output_file_path, save_json=True):
-        """Function to save list of BokehFigures in one file.
-
-        Args:
-            figure_array: list of BokehFigure object to be plotted
-            output_file: string specifying output file path
-        """
-        for idx, figure in enumerate(figure_array):
-            figure.generate_figure()
-            if save_json:
-                json_file_path = output_file_path.replace(
-                    '.html', '{}-plot_data.json'.format(idx))
-                figure._save_figure_json(json_file_path)
-        plot_array = [figure.plot for figure in figure_array]
-        all_plots = bokeh.layouts.column(children=plot_array,
-                                         sizing_mode='scale_width')
-        bokeh.plotting.output_file(output_file_path)
-        bokeh.plotting.save(all_plots)
-
-
-# Ping utilities
-class PingResult(object):
-    """An object that contains the results of running ping command.
-
-    Attributes:
-        connected: True if a connection was made. False otherwise.
-        packet_loss_percentage: The total percentage of packets lost.
-        transmission_times: The list of PingTransmissionTimes containing the
-            timestamps gathered for transmitted packets.
-        rtts: An list-like object enumerating all round-trip-times of
-            transmitted packets.
-        timestamps: A list-like object enumerating the beginning timestamps of
-            each packet transmission.
-        ping_interarrivals: A list-like object enumerating the amount of time
-            between the beginning of each subsequent transmission.
-    """
-    def __init__(self, ping_output):
-        self.packet_loss_percentage = 100
-        self.transmission_times = []
-
-        self.rtts = _ListWrap(self.transmission_times, lambda entry: entry.rtt)
-        self.timestamps = _ListWrap(self.transmission_times,
-                                    lambda entry: entry.timestamp)
-        self.ping_interarrivals = _PingInterarrivals(self.transmission_times)
-
-        self.start_time = 0
-        for line in ping_output:
-            if 'loss' in line:
-                match = re.search(LOSS_REGEX, line)
-                self.packet_loss_percentage = float(match.group('loss'))
-            if 'time=' in line:
-                match = re.search(RTT_REGEX, line)
-                if self.start_time == 0:
-                    self.start_time = float(match.group('timestamp'))
-                self.transmission_times.append(
-                    PingTransmissionTimes(
-                        float(match.group('timestamp')) - self.start_time,
-                        float(match.group('rtt'))))
-        self.connected = len(
-            ping_output) > 1 and self.packet_loss_percentage < 100
-
-    def __getitem__(self, item):
-        if item == 'rtt':
-            return self.rtts
-        if item == 'connected':
-            return self.connected
-        if item == 'packet_loss_percentage':
-            return self.packet_loss_percentage
-        raise ValueError('Invalid key. Please use an attribute instead.')
-
-    def as_dict(self):
-        return {
-            'connected': 1 if self.connected else 0,
-            'rtt': list(self.rtts),
-            'time_stamp': list(self.timestamps),
-            'ping_interarrivals': list(self.ping_interarrivals),
-            'packet_loss_percentage': self.packet_loss_percentage
-        }
-
-
-class PingTransmissionTimes(object):
-    """A class that holds the timestamps for a packet sent via the ping command.
-
-    Attributes:
-        rtt: The round trip time for the packet sent.
-        timestamp: The timestamp the packet started its trip.
-    """
-    def __init__(self, timestamp, rtt):
-        self.rtt = rtt
-        self.timestamp = timestamp
-
-
-class _ListWrap(object):
-    """A convenient helper class for treating list iterators as native lists."""
-    def __init__(self, wrapped_list, func):
-        self.__wrapped_list = wrapped_list
-        self.__func = func
-
-    def __getitem__(self, key):
-        return self.__func(self.__wrapped_list[key])
-
-    def __iter__(self):
-        for item in self.__wrapped_list:
-            yield self.__func(item)
-
-    def __len__(self):
-        return len(self.__wrapped_list)
-
-
-class _PingInterarrivals(object):
-    """A helper class for treating ping interarrivals as a native list."""
-    def __init__(self, ping_entries):
-        self.__ping_entries = ping_entries
-
-    def __getitem__(self, key):
-        return (self.__ping_entries[key + 1].timestamp -
-                self.__ping_entries[key].timestamp)
-
-    def __iter__(self):
-        for index in range(len(self.__ping_entries) - 1):
-            yield self[index]
-
-    def __len__(self):
-        return max(0, len(self.__ping_entries) - 1)
-
-
-def get_ping_stats(src_device, dest_address, ping_duration, ping_interval,
-                   ping_size):
-    """Run ping to or from the DUT.
-
-    The function computes either pings the DUT or pings a remote ip from
-    DUT.
-
-    Args:
-        src_device: object representing device to ping from
-        dest_address: ip address to ping
-        ping_duration: timeout to set on the the ping process (in seconds)
-        ping_interval: time between pings (in seconds)
-        ping_size: size of ping packet payload
-    Returns:
-        ping_result: dict containing ping results and other meta data
-    """
-    ping_count = int(ping_duration / ping_interval)
-    ping_deadline = int(ping_count * ping_interval) + 1
-    ping_cmd_linux = 'ping -c {} -w {} -i {} -s {} -D'.format(
-        ping_count,
-        ping_deadline,
-        ping_interval,
-        ping_size,
-    )
-
-    ping_cmd_macos = 'ping -c {} -t {} -i {} -s {}'.format(
-        ping_count,
-        ping_deadline,
-        ping_interval,
-        ping_size,
-    )
-
-    if isinstance(src_device, AndroidDevice):
-        ping_cmd = '{} {}'.format(ping_cmd_linux, dest_address)
-        ping_output = src_device.adb.shell(ping_cmd,
-                                           timeout=ping_deadline + SHORT_SLEEP,
-                                           ignore_status=True)
-    elif isinstance(src_device, ssh.connection.SshConnection):
-        platform = src_device.run('uname').stdout
-        if 'linux' in platform.lower():
-            ping_cmd = 'sudo {} {}'.format(ping_cmd_linux, dest_address)
-        elif 'darwin' in platform.lower():
-            ping_cmd = "sudo {} {}| while IFS= read -r line; do printf '[%s] %s\n' \"$(gdate '+%s.%N')\" \"$line\"; done".format(
-                ping_cmd_macos, dest_address)
-        ping_output = src_device.run(ping_cmd,
-                                     timeout=ping_deadline + SHORT_SLEEP,
-                                     ignore_status=True).stdout
-    else:
-        raise TypeError('Unable to ping using src_device of type %s.' %
-                        type(src_device))
-    return PingResult(ping_output.splitlines())
-
-
-@nonblocking
-def get_ping_stats_nb(src_device, dest_address, ping_duration, ping_interval,
-                      ping_size):
-    return get_ping_stats(src_device, dest_address, ping_duration,
-                          ping_interval, ping_size)
-
-
-# Iperf utilities
-@nonblocking
-def start_iperf_client_nb(iperf_client, iperf_server_address, iperf_args, tag,
-                          timeout):
-    return iperf_client.start(iperf_server_address, iperf_args, tag, timeout)
-
-
-def get_iperf_arg_string(duration,
-                         reverse_direction,
-                         interval=1,
-                         traffic_type='TCP',
-                         socket_size=None,
-                         num_processes=1,
-                         udp_throughput='1000M',
-                         ipv6=False):
-    """Function to format iperf client arguments.
-
-    This function takes in iperf client parameters and returns a properly
-    formatter iperf arg string to be used in throughput tests.
-
-    Args:
-        duration: iperf duration in seconds
-        reverse_direction: boolean controlling the -R flag for iperf clients
-        interval: iperf print interval
-        traffic_type: string specifying TCP or UDP traffic
-        socket_size: string specifying TCP window or socket buffer, e.g., 2M
-        num_processes: int specifying number of iperf processes
-        udp_throughput: string specifying TX throughput in UDP tests, e.g. 100M
-        ipv6: boolean controlling the use of IP V6
-    Returns:
-        iperf_args: string of formatted iperf args
-    """
-    iperf_args = '-i {} -t {} -J '.format(interval, duration)
-    if ipv6:
-        iperf_args = iperf_args + '-6 '
-    if traffic_type.upper() == 'UDP':
-        iperf_args = iperf_args + '-u -b {} -l 1470 -P {} '.format(
-            udp_throughput, num_processes)
-    elif traffic_type.upper() == 'TCP':
-        iperf_args = iperf_args + '-P {} '.format(num_processes)
-    if socket_size:
-        iperf_args = iperf_args + '-w {} '.format(socket_size)
-    if reverse_direction:
-        iperf_args = iperf_args + ' -R'
-    return iperf_args
-
-
-# Attenuator Utilities
-def atten_by_label(atten_list, path_label, atten_level):
-    """Attenuate signals according to their path label.
-
-    Args:
-        atten_list: list of attenuators to iterate over
-        path_label: path label on which to set desired attenuation
-        atten_level: attenuation desired on path
-    """
-    for atten in atten_list:
-        if path_label in atten.path:
-            atten.set_atten(atten_level, retry=True)
-
-
-def get_atten_for_target_rssi(target_rssi, attenuators, dut, ping_server):
-    """Function to estimate attenuation to hit a target RSSI.
-
-    This function estimates a constant attenuation setting on all atennuation
-    ports to hit a target RSSI. The estimate is not meant to be exact or
-    guaranteed.
-
-    Args:
-        target_rssi: rssi of interest
-        attenuators: list of attenuator ports
-        dut: android device object assumed connected to a wifi network.
-        ping_server: ssh connection object to ping server
-    Returns:
-        target_atten: attenuation setting to achieve target_rssi
-    """
-    logging.info('Searching attenuation for RSSI = {}dB'.format(target_rssi))
-    # Set attenuator to 0 dB
-    for atten in attenuators:
-        atten.set_atten(0, strict=False, retry=True)
-    # Start ping traffic
-    dut_ip = dut.droid.connectivityGetIPv4Addresses('wlan0')[0]
-    # Measure starting RSSI
-    ping_future = get_ping_stats_nb(src_device=ping_server,
-                                    dest_address=dut_ip,
-                                    ping_duration=1.5,
-                                    ping_interval=0.02,
-                                    ping_size=64)
-    current_rssi = get_connected_rssi(dut,
-                                      num_measurements=4,
-                                      polling_frequency=0.25,
-                                      first_measurement_delay=0.5,
-                                      disconnect_warning=1,
-                                      ignore_samples=1)
-    current_rssi = current_rssi['signal_poll_rssi']['mean']
-    ping_future.result()
-    target_atten = 0
-    logging.debug('RSSI @ {0:.2f}dB attenuation = {1:.2f}'.format(
-        target_atten, current_rssi))
-    within_range = 0
-    for idx in range(20):
-        atten_delta = max(min(current_rssi - target_rssi, 20), -20)
-        target_atten = int((target_atten + atten_delta) * 4) / 4
-        if target_atten < 0:
-            return 0
-        if target_atten > attenuators[0].get_max_atten():
-            return attenuators[0].get_max_atten()
-        for atten in attenuators:
-            atten.set_atten(target_atten, strict=False, retry=True)
-        ping_future = get_ping_stats_nb(src_device=ping_server,
-                                        dest_address=dut_ip,
-                                        ping_duration=1.5,
-                                        ping_interval=0.02,
-                                        ping_size=64)
-        current_rssi = get_connected_rssi(dut,
-                                          num_measurements=4,
-                                          polling_frequency=0.25,
-                                          first_measurement_delay=0.5,
-                                          disconnect_warning=1,
-                                          ignore_samples=1)
-        current_rssi = current_rssi['signal_poll_rssi']['mean']
-        ping_future.result()
-        logging.info('RSSI @ {0:.2f}dB attenuation = {1:.2f}'.format(
-            target_atten, current_rssi))
-        if abs(current_rssi - target_rssi) < 1:
-            if within_range:
-                logging.info(
-                    'Reached RSSI: {0:.2f}. Target RSSI: {1:.2f}.'
-                    'Attenuation: {2:.2f}, Iterations = {3:.2f}'.format(
-                        current_rssi, target_rssi, target_atten, idx))
-                return target_atten
-            else:
-                within_range = True
-        else:
-            within_range = False
-    return target_atten
-
-
-def get_current_atten_dut_chain_map(attenuators,
-                                    dut,
-                                    ping_server,
-                                    ping_from_dut=False):
-    """Function to detect mapping between attenuator ports and DUT chains.
-
-    This function detects the mapping between attenuator ports and DUT chains
-    in cases where DUT chains are connected to only one attenuator port. The
-    function assumes the DUT is already connected to a wifi network. The
-    function starts by measuring per chain RSSI at 0 attenuation, then
-    attenuates one port at a time looking for the chain that reports a lower
-    RSSI.
-
-    Args:
-        attenuators: list of attenuator ports
-        dut: android device object assumed connected to a wifi network.
-        ping_server: ssh connection object to ping server
-        ping_from_dut: boolean controlling whether to ping from or to dut
-    Returns:
-        chain_map: list of dut chains, one entry per attenuator port
-    """
-    # Set attenuator to 0 dB
-    for atten in attenuators:
-        atten.set_atten(0, strict=False, retry=True)
-    # Start ping traffic
-    dut_ip = dut.droid.connectivityGetIPv4Addresses('wlan0')[0]
-    if ping_from_dut:
-        ping_future = get_ping_stats_nb(dut, ping_server._settings.hostname,
-                                        11, 0.02, 64)
-    else:
-        ping_future = get_ping_stats_nb(ping_server, dut_ip, 11, 0.02, 64)
-    # Measure starting RSSI
-    base_rssi = get_connected_rssi(dut, 4, 0.25, 1)
-    chain0_base_rssi = base_rssi['chain_0_rssi']['mean']
-    chain1_base_rssi = base_rssi['chain_1_rssi']['mean']
-    if chain0_base_rssi < -70 or chain1_base_rssi < -70:
-        logging.warning('RSSI might be too low to get reliable chain map.')
-    # Compile chain map by attenuating one path at a time and seeing which
-    # chain's RSSI degrades
-    chain_map = []
-    for test_atten in attenuators:
-        # Set one attenuator to 30 dB down
-        test_atten.set_atten(30, strict=False, retry=True)
-        # Get new RSSI
-        test_rssi = get_connected_rssi(dut, 4, 0.25, 1)
-        # Assign attenuator to path that has lower RSSI
-        if chain0_base_rssi > -70 and chain0_base_rssi - test_rssi[
-                'chain_0_rssi']['mean'] > 10:
-            chain_map.append('DUT-Chain-0')
-        elif chain1_base_rssi > -70 and chain1_base_rssi - test_rssi[
-                'chain_1_rssi']['mean'] > 10:
-            chain_map.append('DUT-Chain-1')
-        else:
-            chain_map.append(None)
-        # Reset attenuator to 0
-        test_atten.set_atten(0, strict=False, retry=True)
-    ping_future.result()
-    logging.debug('Chain Map: {}'.format(chain_map))
-    return chain_map
-
-
-def get_full_rf_connection_map(attenuators,
-                               dut,
-                               ping_server,
-                               networks,
-                               ping_from_dut=False):
-    """Function to detect per-network connections between attenuator and DUT.
-
-    This function detects the mapping between attenuator ports and DUT chains
-    on all networks in its arguments. The function connects the DUT to each
-    network then calls get_current_atten_dut_chain_map to get the connection
-    map on the current network. The function outputs the results in two formats
-    to enable easy access when users are interested in indexing by network or
-    attenuator port.
-
-    Args:
-        attenuators: list of attenuator ports
-        dut: android device object assumed connected to a wifi network.
-        ping_server: ssh connection object to ping server
-        networks: dict of network IDs and configs
-    Returns:
-        rf_map_by_network: dict of RF connections indexed by network.
-        rf_map_by_atten: list of RF connections indexed by attenuator
-    """
-    for atten in attenuators:
-        atten.set_atten(0, strict=False, retry=True)
-
-    rf_map_by_network = collections.OrderedDict()
-    rf_map_by_atten = [[] for atten in attenuators]
-    for net_id, net_config in networks.items():
-        wutils.reset_wifi(dut)
-        wutils.wifi_connect(dut,
-                            net_config,
-                            num_of_tries=1,
-                            assert_on_fail=False,
-                            check_connectivity=False)
-        rf_map_by_network[net_id] = get_current_atten_dut_chain_map(
-            attenuators, dut, ping_server, ping_from_dut)
-        for idx, chain in enumerate(rf_map_by_network[net_id]):
-            if chain:
-                rf_map_by_atten[idx].append({
-                    'network': net_id,
-                    'dut_chain': chain
-                })
-    logging.debug('RF Map (by Network): {}'.format(rf_map_by_network))
-    logging.debug('RF Map (by Atten): {}'.format(rf_map_by_atten))
-
-    return rf_map_by_network, rf_map_by_atten
-
-
-# Generic device utils
-def get_dut_temperature(dut):
-    """Function to get dut temperature.
-
-    The function fetches and returns the reading from the temperature sensor
-    used for skin temperature and thermal throttling.
-
-    Args:
-        dut: AndroidDevice of interest
-    Returns:
-        temperature: device temperature. 0 if temperature could not be read
-    """
-    candidate_zones = [
-        '/sys/devices/virtual/thermal/tz-by-name/skin-therm/temp',
-        '/sys/devices/virtual/thermal/tz-by-name/sdm-therm-monitor/temp',
-        '/sys/devices/virtual/thermal/tz-by-name/sdm-therm-adc/temp',
-        '/sys/devices/virtual/thermal/tz-by-name/back_therm/temp',
-        '/dev/thermal/tz-by-name/quiet_therm/temp'
-    ]
-    for zone in candidate_zones:
-        try:
-            temperature = int(dut.adb.shell('cat {}'.format(zone)))
-            break
-        except:
-            temperature = 0
-    if temperature == 0:
-        logging.debug('Could not check DUT temperature.')
-    elif temperature > 100:
-        temperature = temperature / 1000
-    return temperature
-
-
-def wait_for_dut_cooldown(dut, target_temp=50, timeout=300):
-    """Function to wait for a DUT to cool down.
-
-    Args:
-        dut: AndroidDevice of interest
-        target_temp: target cooldown temperature
-        timeout: maxt time to wait for cooldown
-    """
-    start_time = time.time()
-    while time.time() - start_time < timeout:
-        temperature = get_dut_temperature(dut)
-        if temperature < target_temp:
-            break
-        time.sleep(SHORT_SLEEP)
-    elapsed_time = time.time() - start_time
-    logging.debug('DUT Final Temperature: {}C. Cooldown duration: {}'.format(
-        temperature, elapsed_time))
-
-
-def health_check(dut, batt_thresh=5, temp_threshold=53, cooldown=1):
-    """Function to check health status of a DUT.
-
-    The function checks both battery levels and temperature to avoid DUT
-    powering off during the test.
-
-    Args:
-        dut: AndroidDevice of interest
-        batt_thresh: battery level threshold
-        temp_threshold: temperature threshold
-        cooldown: flag to wait for DUT to cool down when overheating
-    Returns:
-        health_check: boolean confirming device is healthy
-    """
-    health_check = True
-    battery_level = utils.get_battery_level(dut)
-    if battery_level < batt_thresh:
-        logging.warning('Battery level low ({}%)'.format(battery_level))
-        health_check = False
-    else:
-        logging.debug('Battery level = {}%'.format(battery_level))
-
-    temperature = get_dut_temperature(dut)
-    if temperature > temp_threshold:
-        if cooldown:
-            logging.warning(
-                'Waiting for DUT to cooldown. ({} C)'.format(temperature))
-            wait_for_dut_cooldown(dut, target_temp=temp_threshold - 5)
-        else:
-            logging.warning('DUT Overheating ({} C)'.format(temperature))
-            health_check = False
-    else:
-        logging.debug('DUT Temperature = {} C'.format(temperature))
-    return health_check
-
-
-# Wifi Device utils
-def detect_wifi_platform(dut):
-    ini_check = len(dut.get_file_names('/vendor/firmware/wlan/qca_cld/'))
-    if ini_check:
-        wifi_platform = 'qcom'
-    else:
-        wifi_platform = 'brcm'
-    return wifi_platform
-
-
-def detect_wifi_decorator(f):
-    def wrap(*args, **kwargs):
-        if 'dut' in kwargs:
-            dut = kwargs['dut']
-        else:
-            dut = next(arg for arg in args if type(arg) == AndroidDevice)
-        f_decorated = '{}_{}'.format(f.__name__, detect_wifi_platform(dut))
-        f_decorated = globals()[f_decorated]
-        return (f_decorated(*args, **kwargs))
-
-    return wrap
-
-
-# Rssi Utilities
-def empty_rssi_result():
-    return collections.OrderedDict([('data', []), ('mean', None),
-                                    ('stdev', None)])
-
-
-@detect_wifi_decorator
-def get_connected_rssi(dut,
-                       num_measurements=1,
-                       polling_frequency=SHORT_SLEEP,
-                       first_measurement_delay=0,
-                       disconnect_warning=True,
-                       ignore_samples=0,
-                       interface=None):
-    """Gets all RSSI values reported for the connected access point/BSSID.
-
-    Args:
-        dut: android device object from which to get RSSI
-        num_measurements: number of scans done, and RSSIs collected
-        polling_frequency: time to wait between RSSI measurements
-        disconnect_warning: boolean controlling disconnection logging messages
-        ignore_samples: number of leading samples to ignore
-    Returns:
-        connected_rssi: dict containing the measurements results for
-        all reported RSSI values (signal_poll, per chain, etc.) and their
-        statistics
-    """
-    pass
-
-
-@nonblocking
-def get_connected_rssi_nb(dut,
-                          num_measurements=1,
-                          polling_frequency=SHORT_SLEEP,
-                          first_measurement_delay=0,
-                          disconnect_warning=True,
-                          ignore_samples=0,
-                          interface=None):
-    return get_connected_rssi(dut, num_measurements, polling_frequency,
-                              first_measurement_delay, disconnect_warning,
-                              ignore_samples, interface)
-
-
-def get_connected_rssi_qcom(dut,
-                            num_measurements=1,
-                            polling_frequency=SHORT_SLEEP,
-                            first_measurement_delay=0,
-                            disconnect_warning=True,
-                            ignore_samples=0,
-                            interface=None):
-    # yapf: disable
-    connected_rssi = collections.OrderedDict(
-        [('time_stamp', []),
-         ('bssid', []), ('ssid', []), ('frequency', []),
-         ('signal_poll_rssi', empty_rssi_result()),
-         ('signal_poll_avg_rssi', empty_rssi_result()),
-         ('chain_0_rssi', empty_rssi_result()),
-         ('chain_1_rssi', empty_rssi_result())])
-    # yapf: enable
-    previous_bssid = 'disconnected'
-    t0 = time.time()
-    time.sleep(first_measurement_delay)
-    for idx in range(num_measurements):
-        measurement_start_time = time.time()
-        connected_rssi['time_stamp'].append(measurement_start_time - t0)
-        # Get signal poll RSSI
-        try:
-            if interface is None:
-                status_output = dut.adb.shell(WPA_CLI_STATUS)
-            else:
-                status_output = dut.adb.shell(
-                    'wpa_cli -i {} status'.format(interface))
-        except:
-            status_output = ''
-        match = re.search('bssid=.*', status_output)
-        if match:
-            current_bssid = match.group(0).split('=')[1]
-            connected_rssi['bssid'].append(current_bssid)
-        else:
-            current_bssid = 'disconnected'
-            connected_rssi['bssid'].append(current_bssid)
-            if disconnect_warning and previous_bssid != 'disconnected':
-                logging.warning('WIFI DISCONNECT DETECTED!')
-        previous_bssid = current_bssid
-        match = re.search('\s+ssid=.*', status_output)
-        if match:
-            ssid = match.group(0).split('=')[1]
-            connected_rssi['ssid'].append(ssid)
-        else:
-            connected_rssi['ssid'].append('disconnected')
-        try:
-            if interface is None:
-                signal_poll_output = dut.adb.shell(SIGNAL_POLL)
-            else:
-                signal_poll_output = dut.adb.shell(
-                    'wpa_cli -i {} signal_poll'.format(interface))
-        except:
-            signal_poll_output = ''
-        match = re.search('FREQUENCY=.*', signal_poll_output)
-        if match:
-            frequency = int(match.group(0).split('=')[1])
-            connected_rssi['frequency'].append(frequency)
-        else:
-            connected_rssi['frequency'].append(RSSI_ERROR_VAL)
-        match = re.search('RSSI=.*', signal_poll_output)
-        if match:
-            temp_rssi = int(match.group(0).split('=')[1])
-            if temp_rssi == -9999 or temp_rssi == 0:
-                connected_rssi['signal_poll_rssi']['data'].append(
-                    RSSI_ERROR_VAL)
-            else:
-                connected_rssi['signal_poll_rssi']['data'].append(temp_rssi)
-        else:
-            connected_rssi['signal_poll_rssi']['data'].append(RSSI_ERROR_VAL)
-        match = re.search('AVG_RSSI=.*', signal_poll_output)
-        if match:
-            connected_rssi['signal_poll_avg_rssi']['data'].append(
-                int(match.group(0).split('=')[1]))
-        else:
-            connected_rssi['signal_poll_avg_rssi']['data'].append(
-                RSSI_ERROR_VAL)
-
-        # Get per chain RSSI
-        try:
-            if interface is None:
-                per_chain_rssi = dut.adb.shell(STATION_DUMP)
-            else:
-                per_chain_rssi = ''
-        except:
-            per_chain_rssi = ''
-        match = re.search('.*signal avg:.*', per_chain_rssi)
-        if match:
-            per_chain_rssi = per_chain_rssi[per_chain_rssi.find('[') +
-                                            1:per_chain_rssi.find(']')]
-            per_chain_rssi = per_chain_rssi.split(', ')
-            connected_rssi['chain_0_rssi']['data'].append(
-                int(per_chain_rssi[0]))
-            connected_rssi['chain_1_rssi']['data'].append(
-                int(per_chain_rssi[1]))
-        else:
-            connected_rssi['chain_0_rssi']['data'].append(RSSI_ERROR_VAL)
-            connected_rssi['chain_1_rssi']['data'].append(RSSI_ERROR_VAL)
-        measurement_elapsed_time = time.time() - measurement_start_time
-        time.sleep(max(0, polling_frequency - measurement_elapsed_time))
-
-    # Compute mean RSSIs. Only average valid readings.
-    # Output RSSI_ERROR_VAL if no valid connected readings found.
-    for key, val in connected_rssi.copy().items():
-        if 'data' not in val:
-            continue
-        filtered_rssi_values = [x for x in val['data'] if not math.isnan(x)]
-        if len(filtered_rssi_values) > ignore_samples:
-            filtered_rssi_values = filtered_rssi_values[ignore_samples:]
-        if filtered_rssi_values:
-            connected_rssi[key]['mean'] = statistics.mean(filtered_rssi_values)
-            if len(filtered_rssi_values) > 1:
-                connected_rssi[key]['stdev'] = statistics.stdev(
-                    filtered_rssi_values)
-            else:
-                connected_rssi[key]['stdev'] = 0
-        else:
-            connected_rssi[key]['mean'] = RSSI_ERROR_VAL
-            connected_rssi[key]['stdev'] = RSSI_ERROR_VAL
-    return connected_rssi
-
-
-def get_connected_rssi_brcm(dut,
-                            num_measurements=1,
-                            polling_frequency=SHORT_SLEEP,
-                            first_measurement_delay=0,
-                            disconnect_warning=True,
-                            ignore_samples=0,
-                            interface=None):
-    # yapf: disable
-    connected_rssi = collections.OrderedDict(
-        [('time_stamp', []),
-         ('bssid', []), ('ssid', []), ('frequency', []),
-         ('signal_poll_rssi', empty_rssi_result()),
-         ('signal_poll_avg_rssi', empty_rssi_result()),
-         ('chain_0_rssi', empty_rssi_result()),
-         ('chain_1_rssi', empty_rssi_result())])
-
-    # yapf: enable
-    previous_bssid = 'disconnected'
-    t0 = time.time()
-    time.sleep(first_measurement_delay)
-    for idx in range(num_measurements):
-        measurement_start_time = time.time()
-        connected_rssi['time_stamp'].append(measurement_start_time - t0)
-        # Get signal poll RSSI
-        status_output = dut.adb.shell('wl assoc')
-        match = re.search('BSSID:.*', status_output)
-
-        if match:
-            current_bssid = match.group(0).split('\t')[0]
-            current_bssid = current_bssid.split(' ')[1]
-            connected_rssi['bssid'].append(current_bssid)
-
-        else:
-            current_bssid = 'disconnected'
-            connected_rssi['bssid'].append(current_bssid)
-            if disconnect_warning and previous_bssid != 'disconnected':
-                logging.warning('WIFI DISCONNECT DETECTED!')
-
-        previous_bssid = current_bssid
-        match = re.search('SSID:.*', status_output)
-        if match:
-            ssid = match.group(0).split(': ')[1]
-            connected_rssi['ssid'].append(ssid)
-        else:
-            connected_rssi['ssid'].append('disconnected')
-
-        #TODO: SEARCH MAP ; PICK CENTER CHANNEL
-        match = re.search('Primary channel:.*', status_output)
-        if match:
-            frequency = int(match.group(0).split(':')[1])
-            connected_rssi['frequency'].append(frequency)
-        else:
-            connected_rssi['frequency'].append(RSSI_ERROR_VAL)
-
-        try:
-            per_chain_rssi = dut.adb.shell('wl phy_rssi_ant')
-        except:
-            per_chain_rssi = DISCONNECTION_MESSAGE_BRCM
-        if DISCONNECTION_MESSAGE_BRCM not in per_chain_rssi:
-            per_chain_rssi = per_chain_rssi.split(' ')
-            chain_0_rssi = int(per_chain_rssi[1])
-            chain_1_rssi = int(per_chain_rssi[4])
-            connected_rssi['chain_0_rssi']['data'].append(chain_0_rssi)
-            connected_rssi['chain_1_rssi']['data'].append(chain_1_rssi)
-            combined_rssi = math.pow(10, chain_0_rssi / 10) + math.pow(
-                10, chain_1_rssi / 10)
-            combined_rssi = 10 * math.log10(combined_rssi)
-            connected_rssi['signal_poll_rssi']['data'].append(combined_rssi)
-            connected_rssi['signal_poll_avg_rssi']['data'].append(
-                combined_rssi)
-        else:
-            connected_rssi['chain_0_rssi']['data'].append(RSSI_ERROR_VAL)
-            connected_rssi['chain_1_rssi']['data'].append(RSSI_ERROR_VAL)
-            connected_rssi['signal_poll_rssi']['data'].append(RSSI_ERROR_VAL)
-            connected_rssi['signal_poll_avg_rssi']['data'].append(
-                RSSI_ERROR_VAL)
-        measurement_elapsed_time = time.time() - measurement_start_time
-        time.sleep(max(0, polling_frequency - measurement_elapsed_time))
-
-    # Statistics, Statistics
-    for key, val in connected_rssi.copy().items():
-        if 'data' not in val:
-            continue
-        filtered_rssi_values = [x for x in val['data'] if not math.isnan(x)]
-        if len(filtered_rssi_values) > ignore_samples:
-            filtered_rssi_values = filtered_rssi_values[ignore_samples:]
-        if filtered_rssi_values:
-            connected_rssi[key]['mean'] = statistics.mean(filtered_rssi_values)
-            if len(filtered_rssi_values) > 1:
-                connected_rssi[key]['stdev'] = statistics.stdev(
-                    filtered_rssi_values)
-            else:
-                connected_rssi[key]['stdev'] = 0
-        else:
-            connected_rssi[key]['mean'] = RSSI_ERROR_VAL
-            connected_rssi[key]['stdev'] = RSSI_ERROR_VAL
-
-    return connected_rssi
-
-
-@detect_wifi_decorator
-def get_scan_rssi(dut, tracked_bssids, num_measurements=1):
-    """Gets scan RSSI for specified BSSIDs.
-
-    Args:
-        dut: android device object from which to get RSSI
-        tracked_bssids: array of BSSIDs to gather RSSI data for
-        num_measurements: number of scans done, and RSSIs collected
-    Returns:
-        scan_rssi: dict containing the measurement results as well as the
-        statistics of the scan RSSI for all BSSIDs in tracked_bssids
-    """
-    pass
-
-
-@nonblocking
-def get_scan_rssi_nb(dut, tracked_bssids, num_measurements=1):
-    return get_scan_rssi(dut, tracked_bssids, num_measurements)
-
-
-def get_scan_rssi_qcom(dut, tracked_bssids, num_measurements=1):
-    scan_rssi = collections.OrderedDict()
-    for bssid in tracked_bssids:
-        scan_rssi[bssid] = empty_rssi_result()
-    for idx in range(num_measurements):
-        scan_output = dut.adb.shell(SCAN)
-        time.sleep(MED_SLEEP)
-        scan_output = dut.adb.shell(SCAN_RESULTS)
-        for bssid in tracked_bssids:
-            bssid_result = re.search(bssid + '.*',
-                                     scan_output,
-                                     flags=re.IGNORECASE)
-            if bssid_result:
-                bssid_result = bssid_result.group(0).split('\t')
-                scan_rssi[bssid]['data'].append(int(bssid_result[2]))
-            else:
-                scan_rssi[bssid]['data'].append(RSSI_ERROR_VAL)
-    # Compute mean RSSIs. Only average valid readings.
-    # Output RSSI_ERROR_VAL if no readings found.
-    for key, val in scan_rssi.items():
-        filtered_rssi_values = [x for x in val['data'] if not math.isnan(x)]
-        if filtered_rssi_values:
-            scan_rssi[key]['mean'] = statistics.mean(filtered_rssi_values)
-            if len(filtered_rssi_values) > 1:
-                scan_rssi[key]['stdev'] = statistics.stdev(
-                    filtered_rssi_values)
-            else:
-                scan_rssi[key]['stdev'] = 0
-        else:
-            scan_rssi[key]['mean'] = RSSI_ERROR_VAL
-            scan_rssi[key]['stdev'] = RSSI_ERROR_VAL
-    return scan_rssi
-
-
-def get_scan_rssi_brcm(dut, tracked_bssids, num_measurements=1):
-    scan_rssi = collections.OrderedDict()
-    for bssid in tracked_bssids:
-        scan_rssi[bssid] = empty_rssi_result()
-    for idx in range(num_measurements):
-        scan_output = dut.adb.shell('cmd wifi start-scan')
-        time.sleep(MED_SLEEP)
-        scan_output = dut.adb.shell('cmd wifi list-scan-results')
-        for bssid in tracked_bssids:
-            bssid_result = re.search(bssid + '.*',
-                                     scan_output,
-                                     flags=re.IGNORECASE)
-            if bssid_result:
-                bssid_result = bssid_result.group(0).split()
-                print(bssid_result)
-                scan_rssi[bssid]['data'].append(int(bssid_result[2]))
-            else:
-                scan_rssi[bssid]['data'].append(RSSI_ERROR_VAL)
-    # Compute mean RSSIs. Only average valid readings.
-    # Output RSSI_ERROR_VAL if no readings found.
-    for key, val in scan_rssi.items():
-        filtered_rssi_values = [x for x in val['data'] if not math.isnan(x)]
-        if filtered_rssi_values:
-            scan_rssi[key]['mean'] = statistics.mean(filtered_rssi_values)
-            if len(filtered_rssi_values) > 1:
-                scan_rssi[key]['stdev'] = statistics.stdev(
-                    filtered_rssi_values)
-            else:
-                scan_rssi[key]['stdev'] = 0
-        else:
-            scan_rssi[key]['mean'] = RSSI_ERROR_VAL
-            scan_rssi[key]['stdev'] = RSSI_ERROR_VAL
-    return scan_rssi
-
-
-@detect_wifi_decorator
-def get_sw_signature(dut):
-    """Function that checks the signature for wifi firmware and config files.
-
-    Returns:
-        bdf_signature: signature consisting of last three digits of bdf cksums
-        fw_signature: floating point firmware version, i.e., major.minor
-    """
-    pass
-
-
-def get_sw_signature_qcom(dut):
-    bdf_output = dut.adb.shell('cksum /vendor/firmware/bdwlan*')
-    logging.debug('BDF Checksum output: {}'.format(bdf_output))
-    bdf_signature = sum(
-        [int(line.split(' ')[0]) for line in bdf_output.splitlines()]) % 1000
-
-    fw_output = dut.adb.shell('halutil -logger -get fw')
-    logging.debug('Firmware version output: {}'.format(fw_output))
-    fw_version = re.search(FW_REGEX, fw_output).group('firmware')
-    fw_signature = fw_version.split('.')[-3:-1]
-    fw_signature = float('.'.join(fw_signature))
-    serial_hash = int(hashlib.md5(dut.serial.encode()).hexdigest(), 16) % 1000
-    return {
-        'config_signature': bdf_signature,
-        'fw_signature': fw_signature,
-        'serial_hash': serial_hash
-    }
-
-
-def get_sw_signature_brcm(dut):
-    bdf_output = dut.adb.shell('cksum /vendor/etc/wifi/bcmdhd*')
-    logging.debug('BDF Checksum output: {}'.format(bdf_output))
-    bdf_signature = sum(
-        [int(line.split(' ')[0]) for line in bdf_output.splitlines()]) % 1000
-
-    fw_output = dut.adb.shell('getprop vendor.wlan.firmware.version')
-    logging.debug('Firmware version output: {}'.format(fw_output))
-    fw_version = fw_output.split('.')[-1]
-    driver_output = dut.adb.shell('getprop vendor.wlan.driver.version')
-    driver_version = driver_output.split('.')[-1]
-    fw_signature = float('{}.{}'.format(fw_version, driver_version))
-    serial_hash = int(hashlib.md5(dut.serial.encode()).hexdigest(), 16) % 1000
-    return {
-        'config_signature': bdf_signature,
-        'fw_signature': fw_signature,
-        'serial_hash': serial_hash
-    }
-
-
-@detect_wifi_decorator
-def push_config(dut, config_file):
-    """Function to push Wifi BDF files
-
-    This function checks for existing wifi bdf files and over writes them all,
-    for simplicity, with the bdf file provided in the arguments. The dut is
-    rebooted for the bdf file to take effect
-
-    Args:
-        dut: dut to push bdf file to
-        config_file: path to bdf_file to push
-    """
-    pass
-
-
-def push_config_qcom(dut, config_file):
-    config_files_list = dut.adb.shell(
-        'ls /vendor/firmware/bdwlan*').splitlines()
-    for dst_file in config_files_list:
-        dut.push_system_file(config_file, dst_file)
-    dut.reboot()
-
-
-def push_config_brcm(dut, config_file):
-    config_files_list = dut.adb.shell('ls /vendor/etc/*.cal').splitlines()
-    for dst_file in config_files_list:
-        dut.push_system_file(config_file, dst_file)
-    dut.reboot()
-
-
-def push_firmware(dut, firmware_files):
-    """Function to push Wifi firmware files
-
-    Args:
-        dut: dut to push bdf file to
-        firmware_files: path to wlanmdsp.mbn file
-        datamsc_file: path to Data.msc file
-    """
-    for file in firmware_files:
-        dut.push_system_file(file, '/vendor/firmware/')
-    dut.reboot()
-
-
-@detect_wifi_decorator
-def start_wifi_logging(dut):
-    """Function to start collecting wifi-related logs"""
-    pass
-
-
-def start_wifi_logging_qcom(dut):
-    dut.droid.wifiEnableVerboseLogging(1)
-    msg = "Failed to enable WiFi verbose logging."
-    asserts.assert_equal(dut.droid.wifiGetVerboseLoggingLevel(), 1, msg)
-    logging.info('Starting CNSS logs')
-    dut.adb.shell("find /data/vendor/wifi/wlan_logs/ -type f -delete",
-                  ignore_status=True)
-    dut.adb.shell_nb('cnss_diag -f -s')
-
-
-def start_wifi_logging_brcm(dut):
-    pass
-
-
-@detect_wifi_decorator
-def stop_wifi_logging(dut):
-    """Function to start collecting wifi-related logs"""
-    pass
-
-
-def stop_wifi_logging_qcom(dut):
-    logging.info('Stopping CNSS logs')
-    dut.adb.shell('killall cnss_diag')
-    logs = dut.get_file_names("/data/vendor/wifi/wlan_logs/")
-    if logs:
-        dut.log.info("Pulling cnss_diag logs %s", logs)
-        log_path = os.path.join(dut.device_log_path,
-                                "CNSS_DIAG_%s" % dut.serial)
-        os.makedirs(log_path, exist_ok=True)
-        dut.pull_files(logs, log_path)
-
-
-def stop_wifi_logging_brcm(dut):
-    pass
-
-
-def _set_ini_fields(ini_file_path, ini_field_dict):
-    template_regex = r'^{}=[0-9,.x-]+'
-    with open(ini_file_path, 'r') as f:
-        ini_lines = f.read().splitlines()
-        for idx, line in enumerate(ini_lines):
-            for field_name, field_value in ini_field_dict.items():
-                line_regex = re.compile(template_regex.format(field_name))
-                if re.match(line_regex, line):
-                    ini_lines[idx] = '{}={}'.format(field_name, field_value)
-                    print(ini_lines[idx])
-    with open(ini_file_path, 'w') as f:
-        f.write('\n'.join(ini_lines) + '\n')
-
-
-def _edit_dut_ini(dut, ini_fields):
-    """Function to edit Wifi ini files."""
-    dut_ini_path = '/vendor/firmware/wlan/qca_cld/WCNSS_qcom_cfg.ini'
-    local_ini_path = os.path.expanduser('~/WCNSS_qcom_cfg.ini')
-    dut.pull_files(dut_ini_path, local_ini_path)
-
-    _set_ini_fields(local_ini_path, ini_fields)
-
-    dut.push_system_file(local_ini_path, dut_ini_path)
-    dut.reboot()
-
-
-def set_ini_single_chain_mode(dut, chain):
-    ini_fields = {
-        'gEnable2x2': 0,
-        'gSetTxChainmask1x1': chain + 1,
-        'gSetRxChainmask1x1': chain + 1,
-        'gDualMacFeatureDisable': 1,
-        'gDot11Mode': 0
-    }
-    _edit_dut_ini(dut, ini_fields)
-
-
-def set_ini_two_chain_mode(dut):
-    ini_fields = {
-        'gEnable2x2': 2,
-        'gSetTxChainmask1x1': 1,
-        'gSetRxChainmask1x1': 1,
-        'gDualMacFeatureDisable': 6,
-        'gDot11Mode': 0
-    }
-    _edit_dut_ini(dut, ini_fields)
-
-
-def set_ini_tx_mode(dut, mode):
-    TX_MODE_DICT = {
-        'Auto': 0,
-        '11n': 4,
-        '11ac': 9,
-        '11abg': 1,
-        '11b': 2,
-        '11': 3,
-        '11g only': 5,
-        '11n only': 6,
-        '11b only': 7,
-        '11ac only': 8
-    }
-
-    ini_fields = {
-        'gEnable2x2': 2,
-        'gSetTxChainmask1x1': 1,
-        'gSetRxChainmask1x1': 1,
-        'gDualMacFeatureDisable': 6,
-        'gDot11Mode': TX_MODE_DICT[mode]
-    }
-    _edit_dut_ini(dut, ini_fields)
-
-
-# Link layer stats utilities
-class LinkLayerStats():
-    def __new__(self, dut, llstats_enabled=True):
-        if detect_wifi_platform(dut) == 'qcom':
-            return LinkLayerStatsQcom(dut, llstats_enabled=True)
-        else:
-            return LinkLayerStatsBrcm(dut, llstats_enabled=True)
-
-
-class LinkLayerStatsQcom():
-
-    LLSTATS_CMD = 'cat /d/wlan0/ll_stats'
-    PEER_REGEX = 'LL_STATS_PEER_ALL'
-    MCS_REGEX = re.compile(
-        r'preamble: (?P<mode>\S+), nss: (?P<num_streams>\S+), bw: (?P<bw>\S+), '
-        'mcs: (?P<mcs>\S+), bitrate: (?P<rate>\S+), txmpdu: (?P<txmpdu>\S+), '
-        'rxmpdu: (?P<rxmpdu>\S+), mpdu_lost: (?P<mpdu_lost>\S+), '
-        'retries: (?P<retries>\S+), retries_short: (?P<retries_short>\S+), '
-        'retries_long: (?P<retries_long>\S+)')
-    MCS_ID = collections.namedtuple(
-        'mcs_id', ['mode', 'num_streams', 'bandwidth', 'mcs', 'rate'])
-    MODE_MAP = {'0': '11a/g', '1': '11b', '2': '11n', '3': '11ac'}
-    BW_MAP = {'0': 20, '1': 40, '2': 80}
-
-    def __init__(self, dut, llstats_enabled=True):
-        self.dut = dut
-        self.llstats_enabled = llstats_enabled
-        self.llstats_cumulative = self._empty_llstats()
-        self.llstats_incremental = self._empty_llstats()
-
-    def update_stats(self):
-        if self.llstats_enabled:
-            try:
-                llstats_output = self.dut.adb.shell(self.LLSTATS_CMD,
-                                                    timeout=0.1)
-            except:
-                llstats_output = ''
-        else:
-            llstats_output = ''
-        self._update_stats(llstats_output)
-
-    def reset_stats(self):
-        self.llstats_cumulative = self._empty_llstats()
-        self.llstats_incremental = self._empty_llstats()
-
-    def _empty_llstats(self):
-        return collections.OrderedDict(mcs_stats=collections.OrderedDict(),
-                                       summary=collections.OrderedDict())
-
-    def _empty_mcs_stat(self):
-        return collections.OrderedDict(txmpdu=0,
-                                       rxmpdu=0,
-                                       mpdu_lost=0,
-                                       retries=0,
-                                       retries_short=0,
-                                       retries_long=0)
-
-    def _mcs_id_to_string(self, mcs_id):
-        mcs_string = '{} {}MHz Nss{} MCS{} {}Mbps'.format(
-            mcs_id.mode, mcs_id.bandwidth, mcs_id.num_streams, mcs_id.mcs,
-            mcs_id.rate)
-        return mcs_string
-
-    def _parse_mcs_stats(self, llstats_output):
-        llstats_dict = {}
-        # Look for per-peer stats
-        match = re.search(self.PEER_REGEX, llstats_output)
-        if not match:
-            self.reset_stats()
-            return collections.OrderedDict()
-        # Find and process all matches for per stream stats
-        match_iter = re.finditer(self.MCS_REGEX, llstats_output)
-        for match in match_iter:
-            current_mcs = self.MCS_ID(self.MODE_MAP[match.group('mode')],
-                                      int(match.group('num_streams')) + 1,
-                                      self.BW_MAP[match.group('bw')],
-                                      int(match.group('mcs')),
-                                      int(match.group('rate'), 16) / 1000)
-            current_stats = collections.OrderedDict(
-                txmpdu=int(match.group('txmpdu')),
-                rxmpdu=int(match.group('rxmpdu')),
-                mpdu_lost=int(match.group('mpdu_lost')),
-                retries=int(match.group('retries')),
-                retries_short=int(match.group('retries_short')),
-                retries_long=int(match.group('retries_long')))
-            llstats_dict[self._mcs_id_to_string(current_mcs)] = current_stats
-        return llstats_dict
-
-    def _diff_mcs_stats(self, new_stats, old_stats):
-        stats_diff = collections.OrderedDict()
-        for stat_key in new_stats.keys():
-            stats_diff[stat_key] = new_stats[stat_key] - old_stats[stat_key]
-        return stats_diff
-
-    def _generate_stats_summary(self, llstats_dict):
-        llstats_summary = collections.OrderedDict(common_tx_mcs=None,
-                                                  common_tx_mcs_count=0,
-                                                  common_tx_mcs_freq=0,
-                                                  common_rx_mcs=None,
-                                                  common_rx_mcs_count=0,
-                                                  common_rx_mcs_freq=0)
-        txmpdu_count = 0
-        rxmpdu_count = 0
-        for mcs_id, mcs_stats in llstats_dict['mcs_stats'].items():
-            if mcs_stats['txmpdu'] > llstats_summary['common_tx_mcs_count']:
-                llstats_summary['common_tx_mcs'] = mcs_id
-                llstats_summary['common_tx_mcs_count'] = mcs_stats['txmpdu']
-            if mcs_stats['rxmpdu'] > llstats_summary['common_rx_mcs_count']:
-                llstats_summary['common_rx_mcs'] = mcs_id
-                llstats_summary['common_rx_mcs_count'] = mcs_stats['rxmpdu']
-            txmpdu_count += mcs_stats['txmpdu']
-            rxmpdu_count += mcs_stats['rxmpdu']
-        if txmpdu_count:
-            llstats_summary['common_tx_mcs_freq'] = (
-                llstats_summary['common_tx_mcs_count'] / txmpdu_count)
-        if rxmpdu_count:
-            llstats_summary['common_rx_mcs_freq'] = (
-                llstats_summary['common_rx_mcs_count'] / rxmpdu_count)
-        return llstats_summary
-
-    def _update_stats(self, llstats_output):
-        # Parse stats
-        new_llstats = self._empty_llstats()
-        new_llstats['mcs_stats'] = self._parse_mcs_stats(llstats_output)
-        # Save old stats and set new cumulative stats
-        old_llstats = self.llstats_cumulative.copy()
-        self.llstats_cumulative = new_llstats.copy()
-        # Compute difference between new and old stats
-        self.llstats_incremental = self._empty_llstats()
-        for mcs_id, new_mcs_stats in new_llstats['mcs_stats'].items():
-            old_mcs_stats = old_llstats['mcs_stats'].get(
-                mcs_id, self._empty_mcs_stat())
-            self.llstats_incremental['mcs_stats'][
-                mcs_id] = self._diff_mcs_stats(new_mcs_stats, old_mcs_stats)
-        # Generate llstats summary
-        self.llstats_incremental['summary'] = self._generate_stats_summary(
-            self.llstats_incremental)
-        self.llstats_cumulative['summary'] = self._generate_stats_summary(
-            self.llstats_cumulative)
-
-
-class LinkLayerStatsBrcm():
-    def __init__(self, dut, llstats_enabled=True):
-        self.dut = dut
-        self.llstats_enabled = llstats_enabled
-        self.llstats_incremental = self._empty_llstats()
-        self.llstats_cumulative = self.llstats_incremental
-
-    def _empty_llstats(self):
-        return collections.OrderedDict(mcs_stats=collections.OrderedDict(),
-                                       summary=collections.OrderedDict())
-
-    def update_stats(self):
-        self.llstats_incremental = self._empty_llstats()
-        self.llstats_incremental['summary'] = collections.OrderedDict(
-            common_tx_mcs=None,
-            common_tx_mcs_count=1,
-            common_tx_mcs_freq=1,
-            common_rx_mcs=None,
-            common_rx_mcs_count=1,
-            common_rx_mcs_freq=1)
-        if self.llstats_enabled:
-            try:
-                rate_info = self.dut.adb.shell('wl rate_info', timeout=0.1)
-                self.llstats_incremental['summary'][
-                    'common_tx_mcs'] = '{} Mbps'.format(
-                        re.findall('\[Tx\]:'
-                                   ' (\d+[.]*\d* Mbps)', rate_info))
-                self.llstats_incremental['summary'][
-                    'common_rx_mcs'] = '{} Mbps'.format(
-                        re.findall('\[Rx\]:'
-                                   ' (\d+[.]*\d* Mbps)', rate_info))
-            except:
-                pass
diff --git a/acts_tests/acts_contrib/test_utils/wifi/wifi_performance_test_utils/__init__.py b/acts_tests/acts_contrib/test_utils/wifi/wifi_performance_test_utils/__init__.py
new file mode 100644
index 0000000..4e32f1a
--- /dev/null
+++ b/acts_tests/acts_contrib/test_utils/wifi/wifi_performance_test_utils/__init__.py
@@ -0,0 +1,747 @@
+#!/usr/bin/env python3.4
+#
+#   Copyright 2019 - The Android Open Source Project
+#
+#   Licensed under the Apache License, Version 2.0 (the 'License');
+#   you may not use this file except in compliance with the License.
+#   You may obtain a copy of the License at
+#
+#       http://www.apache.org/licenses/LICENSE-2.0
+#
+#   Unless required by applicable law or agreed to in writing, software
+#   distributed under the License is distributed on an 'AS IS' BASIS,
+#   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#   See the License for the specific language governing permissions and
+#   limitations under the License.
+
+import collections
+import importlib
+import ipaddress
+import logging
+import numpy
+import re
+import time
+from acts import asserts
+from acts import utils
+from acts.controllers.android_device import AndroidDevice
+from acts.controllers.utils_lib import ssh
+from acts_contrib.test_utils.wifi import wifi_test_utils as wutils
+from acts_contrib.test_utils.wifi.wifi_performance_test_utils import ping_utils
+from acts_contrib.test_utils.wifi.wifi_performance_test_utils import qcom_utils
+from acts_contrib.test_utils.wifi.wifi_performance_test_utils import brcm_utils
+
+from concurrent.futures import ThreadPoolExecutor
+
+SHORT_SLEEP = 1
+MED_SLEEP = 6
+CHANNELS_6GHz = ['6g{}'.format(4 * x + 1) for x in range(59)]
+BAND_TO_CHANNEL_MAP = {
+    '2.4GHz': list(range(1, 14)),
+    'UNII-1': [36, 40, 44, 48],
+    'UNII-2':
+    [52, 56, 60, 64, 100, 104, 108, 112, 116, 120, 124, 128, 132, 140],
+    'UNII-3': [149, 153, 157, 161, 165],
+    '6GHz': CHANNELS_6GHz
+}
+CHANNEL_TO_BAND_MAP = {
+    channel: band
+    for band, channels in BAND_TO_CHANNEL_MAP.items() for channel in channels
+}
+
+
+# Decorators
+def nonblocking(f):
+    """Creates a decorator transforming function calls to non-blocking"""
+    def wrap(*args, **kwargs):
+        executor = ThreadPoolExecutor(max_workers=1)
+        thread_future = executor.submit(f, *args, **kwargs)
+        # Ensure resources are freed up when executor ruturns or raises
+        executor.shutdown(wait=False)
+        return thread_future
+
+    return wrap
+
+
+def detect_wifi_platform(dut):
+    if hasattr(dut, 'wifi_platform'):
+        return dut.wifi_platform
+    qcom_check = len(dut.get_file_names('/vendor/firmware/wlan/qca_cld/'))
+    if qcom_check:
+        dut.wifi_platform = 'qcom'
+    else:
+        dut.wifi_platform = 'brcm'
+    return dut.wifi_platform
+
+
+def detect_wifi_decorator(f):
+    def wrap(*args, **kwargs):
+        if 'dut' in kwargs:
+            dut = kwargs['dut']
+        else:
+            dut = next(arg for arg in args if type(arg) == AndroidDevice)
+        dut_package = 'acts_contrib.test_utils.wifi.wifi_performance_test_utils.{}_utils'.format(
+            detect_wifi_platform(dut))
+        dut_package = importlib.import_module(dut_package)
+        f_decorated = getattr(dut_package, f.__name__, lambda: None)
+        return (f_decorated(*args, **kwargs))
+
+    return wrap
+
+
+# JSON serializer
+def serialize_dict(input_dict):
+    """Function to serialize dicts to enable JSON output"""
+    output_dict = collections.OrderedDict()
+    for key, value in input_dict.items():
+        output_dict[_serialize_value(key)] = _serialize_value(value)
+    return output_dict
+
+
+def _serialize_value(value):
+    """Function to recursively serialize dict entries to enable JSON output"""
+    if isinstance(value, tuple):
+        return str(value)
+    if isinstance(value, numpy.int64):
+        return int(value)
+    if isinstance(value, numpy.float64):
+        return float(value)
+    if isinstance(value, list):
+        return [_serialize_value(x) for x in value]
+    if isinstance(value, numpy.ndarray):
+        return [_serialize_value(x) for x in value]
+    elif isinstance(value, dict):
+        return serialize_dict(value)
+    elif type(value) in (float, int, bool, str):
+        return value
+    else:
+        return "Non-serializable object"
+
+
+def extract_sub_dict(full_dict, fields):
+    sub_dict = collections.OrderedDict(
+        (field, full_dict[field]) for field in fields)
+    return sub_dict
+
+
+# Miscellaneous Wifi Utilities
+def check_skip_conditions(testcase_params,
+                          dut,
+                          access_point,
+                          ota_chamber=None):
+    """Checks if test should be skipped."""
+    # Check battery level before test
+    if not health_check(dut, 10):
+        asserts.skip('DUT battery level too low.')
+    if not access_point.band_lookup_by_channel(testcase_params['channel']):
+        asserts.skip('AP does not support requested channel.')
+    if ota_chamber and CHANNEL_TO_BAND_MAP[
+            testcase_params['channel']] not in ota_chamber.SUPPORTED_BANDS:
+        asserts.skip('OTA chamber does not support requested channel.')
+    # Check if 6GHz is supported by checking capabilities in the US.
+    if not dut.droid.wifiCheckState():
+        wutils.wifi_toggle_state(dut, True)
+    iw_list = dut.adb.shell('iw list')
+    supports_6ghz = '6135 MHz' in iw_list
+    supports_160mhz = 'Supported Channel Width: 160 MHz' in iw_list
+    if testcase_params.get('bandwidth', 20) == 160 and not supports_160mhz:
+        asserts.skip('DUT does not support 160 MHz networks.')
+    if testcase_params.get('channel',
+                           6) in CHANNELS_6GHz and not supports_6ghz:
+        asserts.skip('DUT does not support 6 GHz band.')
+
+
+def validate_network(dut, ssid):
+    """Check that DUT has a valid internet connection through expected SSID
+
+    Args:
+        dut: android device of interest
+        ssid: expected ssid
+    """
+    try:
+        connected = wutils.validate_connection(dut, wait_time=3) is not None
+        current_network = dut.droid.wifiGetConnectionInfo()
+    except:
+        connected = False
+        current_network = None
+    if connected and current_network['SSID'] == ssid:
+        return True
+    else:
+        return False
+
+
+def get_server_address(ssh_connection, dut_ip, subnet_mask):
+    """Get server address on a specific subnet,
+
+    This function retrieves the LAN or WAN IP of a remote machine used in
+    testing. If subnet_mask is set to 'public' it returns a machines global ip,
+    else it returns the ip belonging to the dut local network given the dut's
+    ip and subnet mask.
+
+    Args:
+        ssh_connection: object representing server for which we want an ip
+        dut_ip: string in ip address format, i.e., xxx.xxx.xxx.xxx
+        subnet_mask: string representing subnet mask (public for global ip)
+    """
+    ifconfig_out = ssh_connection.run('ifconfig').stdout
+    ip_list = re.findall('inet (?:addr:)?(\d+.\d+.\d+.\d+)', ifconfig_out)
+    ip_list = [ipaddress.ip_address(ip) for ip in ip_list]
+
+    if subnet_mask == 'public':
+        for ip in ip_list:
+            # is_global is not used to allow for CGNAT ips in 100.x.y.z range
+            if not ip.is_private:
+                return str(ip)
+    else:
+        dut_network = ipaddress.ip_network('{}/{}'.format(dut_ip, subnet_mask),
+                                           strict=False)
+        for ip in ip_list:
+            if ip in dut_network:
+                return str(ip)
+    logging.error('No IP address found in requested subnet')
+
+
+# Ping utilities
+def get_ping_stats(src_device, dest_address, ping_duration, ping_interval,
+                   ping_size):
+    """Run ping to or from the DUT.
+
+    The function computes either pings the DUT or pings a remote ip from
+    DUT.
+
+    Args:
+        src_device: object representing device to ping from
+        dest_address: ip address to ping
+        ping_duration: timeout to set on the ping process (in seconds)
+        ping_interval: time between pings (in seconds)
+        ping_size: size of ping packet payload
+    Returns:
+        ping_result: dict containing ping results and other meta data
+    """
+    ping_count = int(ping_duration / ping_interval)
+    ping_deadline = int(ping_count * ping_interval) + 1
+    ping_cmd_linux = 'ping -c {} -w {} -i {} -s {} -D'.format(
+        ping_count,
+        ping_deadline,
+        ping_interval,
+        ping_size,
+    )
+
+    ping_cmd_macos = 'ping -c {} -t {} -i {} -s {}'.format(
+        ping_count,
+        ping_deadline,
+        ping_interval,
+        ping_size,
+    )
+
+    if isinstance(src_device, AndroidDevice):
+        ping_cmd = '{} {}'.format(ping_cmd_linux, dest_address)
+        ping_output = src_device.adb.shell(ping_cmd,
+                                           timeout=ping_deadline + SHORT_SLEEP,
+                                           ignore_status=True)
+    elif isinstance(src_device, ssh.connection.SshConnection):
+        platform = src_device.run('uname').stdout
+        if 'linux' in platform.lower():
+            ping_cmd = 'sudo {} {}'.format(ping_cmd_linux, dest_address)
+        elif 'darwin' in platform.lower():
+            ping_cmd = "sudo {} {}| while IFS= read -r line; do printf '[%s] %s\n' \"$(gdate '+%s.%N')\" \"$line\"; done".format(
+                ping_cmd_macos, dest_address)
+        ping_output = src_device.run(ping_cmd,
+                                     timeout=ping_deadline + SHORT_SLEEP,
+                                     ignore_status=True).stdout
+    else:
+        raise TypeError('Unable to ping using src_device of type %s.' %
+                        type(src_device))
+    return ping_utils.PingResult(ping_output.splitlines())
+
+
+@nonblocking
+def get_ping_stats_nb(src_device, dest_address, ping_duration, ping_interval,
+                      ping_size):
+    return get_ping_stats(src_device, dest_address, ping_duration,
+                          ping_interval, ping_size)
+
+
+# Iperf utilities
+@nonblocking
+def start_iperf_client_nb(iperf_client, iperf_server_address, iperf_args, tag,
+                          timeout):
+    return iperf_client.start(iperf_server_address, iperf_args, tag, timeout)
+
+
+def get_iperf_arg_string(duration,
+                         reverse_direction,
+                         interval=1,
+                         traffic_type='TCP',
+                         socket_size=None,
+                         num_processes=1,
+                         udp_throughput='1000M',
+                         ipv6=False):
+    """Function to format iperf client arguments.
+
+    This function takes in iperf client parameters and returns a properly
+    formatter iperf arg string to be used in throughput tests.
+
+    Args:
+        duration: iperf duration in seconds
+        reverse_direction: boolean controlling the -R flag for iperf clients
+        interval: iperf print interval
+        traffic_type: string specifying TCP or UDP traffic
+        socket_size: string specifying TCP window or socket buffer, e.g., 2M
+        num_processes: int specifying number of iperf processes
+        udp_throughput: string specifying TX throughput in UDP tests, e.g. 100M
+        ipv6: boolean controlling the use of IP V6
+    Returns:
+        iperf_args: string of formatted iperf args
+    """
+    iperf_args = '-i {} -t {} -J '.format(interval, duration)
+    if ipv6:
+        iperf_args = iperf_args + '-6 '
+    if traffic_type.upper() == 'UDP':
+        iperf_args = iperf_args + '-u -b {} -l 1470 -P {} '.format(
+            udp_throughput, num_processes)
+    elif traffic_type.upper() == 'TCP':
+        iperf_args = iperf_args + '-P {} '.format(num_processes)
+    if socket_size:
+        iperf_args = iperf_args + '-w {} '.format(socket_size)
+    if reverse_direction:
+        iperf_args = iperf_args + ' -R'
+    return iperf_args
+
+
+# Attenuator Utilities
+def atten_by_label(atten_list, path_label, atten_level):
+    """Attenuate signals according to their path label.
+
+    Args:
+        atten_list: list of attenuators to iterate over
+        path_label: path label on which to set desired attenuation
+        atten_level: attenuation desired on path
+    """
+    for atten in atten_list:
+        if path_label in atten.path:
+            atten.set_atten(atten_level, retry=True)
+
+
+def get_atten_for_target_rssi(target_rssi, attenuators, dut, ping_server):
+    """Function to estimate attenuation to hit a target RSSI.
+
+    This function estimates a constant attenuation setting on all atennuation
+    ports to hit a target RSSI. The estimate is not meant to be exact or
+    guaranteed.
+
+    Args:
+        target_rssi: rssi of interest
+        attenuators: list of attenuator ports
+        dut: android device object assumed connected to a wifi network.
+        ping_server: ssh connection object to ping server
+    Returns:
+        target_atten: attenuation setting to achieve target_rssi
+    """
+    logging.info('Searching attenuation for RSSI = {}dB'.format(target_rssi))
+    # Set attenuator to 0 dB
+    for atten in attenuators:
+        atten.set_atten(0, strict=False, retry=True)
+    # Start ping traffic
+    dut_ip = dut.droid.connectivityGetIPv4Addresses('wlan0')[0]
+    # Measure starting RSSI
+    ping_future = get_ping_stats_nb(src_device=ping_server,
+                                    dest_address=dut_ip,
+                                    ping_duration=1.5,
+                                    ping_interval=0.02,
+                                    ping_size=64)
+    current_rssi = get_connected_rssi(dut,
+                                      num_measurements=4,
+                                      polling_frequency=0.25,
+                                      first_measurement_delay=0.5,
+                                      disconnect_warning=1,
+                                      ignore_samples=1)
+    current_rssi = current_rssi['signal_poll_rssi']['mean']
+    ping_future.result()
+    target_atten = 0
+    logging.debug('RSSI @ {0:.2f}dB attenuation = {1:.2f}'.format(
+        target_atten, current_rssi))
+    within_range = 0
+    for idx in range(20):
+        atten_delta = max(min(current_rssi - target_rssi, 20), -20)
+        target_atten = int((target_atten + atten_delta) * 4) / 4
+        if target_atten < 0:
+            return 0
+        if target_atten > attenuators[0].get_max_atten():
+            return attenuators[0].get_max_atten()
+        for atten in attenuators:
+            atten.set_atten(target_atten, strict=False, retry=True)
+        ping_future = get_ping_stats_nb(src_device=ping_server,
+                                        dest_address=dut_ip,
+                                        ping_duration=1.5,
+                                        ping_interval=0.02,
+                                        ping_size=64)
+        current_rssi = get_connected_rssi(dut,
+                                          num_measurements=4,
+                                          polling_frequency=0.25,
+                                          first_measurement_delay=0.5,
+                                          disconnect_warning=1,
+                                          ignore_samples=1)
+        current_rssi = current_rssi['signal_poll_rssi']['mean']
+        ping_future.result()
+        logging.info('RSSI @ {0:.2f}dB attenuation = {1:.2f}'.format(
+            target_atten, current_rssi))
+        if abs(current_rssi - target_rssi) < 1:
+            if within_range:
+                logging.info(
+                    'Reached RSSI: {0:.2f}. Target RSSI: {1:.2f}.'
+                    'Attenuation: {2:.2f}, Iterations = {3:.2f}'.format(
+                        current_rssi, target_rssi, target_atten, idx))
+                return target_atten
+            else:
+                within_range = True
+        else:
+            within_range = False
+    return target_atten
+
+
+def get_current_atten_dut_chain_map(attenuators,
+                                    dut,
+                                    ping_server,
+                                    ping_from_dut=False):
+    """Function to detect mapping between attenuator ports and DUT chains.
+
+    This function detects the mapping between attenuator ports and DUT chains
+    in cases where DUT chains are connected to only one attenuator port. The
+    function assumes the DUT is already connected to a wifi network. The
+    function starts by measuring per chain RSSI at 0 attenuation, then
+    attenuates one port at a time looking for the chain that reports a lower
+    RSSI.
+
+    Args:
+        attenuators: list of attenuator ports
+        dut: android device object assumed connected to a wifi network.
+        ping_server: ssh connection object to ping server
+        ping_from_dut: boolean controlling whether to ping from or to dut
+    Returns:
+        chain_map: list of dut chains, one entry per attenuator port
+    """
+    # Set attenuator to 0 dB
+    for atten in attenuators:
+        atten.set_atten(0, strict=False, retry=True)
+    # Start ping traffic
+    dut_ip = dut.droid.connectivityGetIPv4Addresses('wlan0')[0]
+    if ping_from_dut:
+        ping_future = get_ping_stats_nb(dut, ping_server._settings.hostname,
+                                        11, 0.02, 64)
+    else:
+        ping_future = get_ping_stats_nb(ping_server, dut_ip, 11, 0.02, 64)
+    # Measure starting RSSI
+    base_rssi = get_connected_rssi(dut, 4, 0.25, 1)
+    chain0_base_rssi = base_rssi['chain_0_rssi']['mean']
+    chain1_base_rssi = base_rssi['chain_1_rssi']['mean']
+    if chain0_base_rssi < -70 or chain1_base_rssi < -70:
+        logging.warning('RSSI might be too low to get reliable chain map.')
+    # Compile chain map by attenuating one path at a time and seeing which
+    # chain's RSSI degrades
+    chain_map = []
+    for test_atten in attenuators:
+        # Set one attenuator to 30 dB down
+        test_atten.set_atten(30, strict=False, retry=True)
+        # Get new RSSI
+        test_rssi = get_connected_rssi(dut, 4, 0.25, 1)
+        # Assign attenuator to path that has lower RSSI
+        if chain0_base_rssi > -70 and chain0_base_rssi - test_rssi[
+                'chain_0_rssi']['mean'] > 10:
+            chain_map.append('DUT-Chain-0')
+        elif chain1_base_rssi > -70 and chain1_base_rssi - test_rssi[
+                'chain_1_rssi']['mean'] > 10:
+            chain_map.append('DUT-Chain-1')
+        else:
+            chain_map.append(None)
+        # Reset attenuator to 0
+        test_atten.set_atten(0, strict=False, retry=True)
+    ping_future.result()
+    logging.debug('Chain Map: {}'.format(chain_map))
+    return chain_map
+
+
+def get_full_rf_connection_map(attenuators,
+                               dut,
+                               ping_server,
+                               networks,
+                               ping_from_dut=False):
+    """Function to detect per-network connections between attenuator and DUT.
+
+    This function detects the mapping between attenuator ports and DUT chains
+    on all networks in its arguments. The function connects the DUT to each
+    network then calls get_current_atten_dut_chain_map to get the connection
+    map on the current network. The function outputs the results in two formats
+    to enable easy access when users are interested in indexing by network or
+    attenuator port.
+
+    Args:
+        attenuators: list of attenuator ports
+        dut: android device object assumed connected to a wifi network.
+        ping_server: ssh connection object to ping server
+        networks: dict of network IDs and configs
+    Returns:
+        rf_map_by_network: dict of RF connections indexed by network.
+        rf_map_by_atten: list of RF connections indexed by attenuator
+    """
+    for atten in attenuators:
+        atten.set_atten(0, strict=False, retry=True)
+
+    rf_map_by_network = collections.OrderedDict()
+    rf_map_by_atten = [[] for atten in attenuators]
+    for net_id, net_config in networks.items():
+        wutils.reset_wifi(dut)
+        wutils.wifi_connect(dut,
+                            net_config,
+                            num_of_tries=1,
+                            assert_on_fail=False,
+                            check_connectivity=False)
+        rf_map_by_network[net_id] = get_current_atten_dut_chain_map(
+            attenuators, dut, ping_server, ping_from_dut)
+        for idx, chain in enumerate(rf_map_by_network[net_id]):
+            if chain:
+                rf_map_by_atten[idx].append({
+                    'network': net_id,
+                    'dut_chain': chain
+                })
+    logging.debug('RF Map (by Network): {}'.format(rf_map_by_network))
+    logging.debug('RF Map (by Atten): {}'.format(rf_map_by_atten))
+
+    return rf_map_by_network, rf_map_by_atten
+
+
+# Generic device utils
+def get_dut_temperature(dut):
+    """Function to get dut temperature.
+
+    The function fetches and returns the reading from the temperature sensor
+    used for skin temperature and thermal throttling.
+
+    Args:
+        dut: AndroidDevice of interest
+    Returns:
+        temperature: device temperature. 0 if temperature could not be read
+    """
+    candidate_zones = [
+        '/sys/devices/virtual/thermal/tz-by-name/skin-therm/temp',
+        '/sys/devices/virtual/thermal/tz-by-name/sdm-therm-monitor/temp',
+        '/sys/devices/virtual/thermal/tz-by-name/sdm-therm-adc/temp',
+        '/sys/devices/virtual/thermal/tz-by-name/back_therm/temp',
+        '/dev/thermal/tz-by-name/quiet_therm/temp'
+    ]
+    for zone in candidate_zones:
+        try:
+            temperature = int(dut.adb.shell('cat {}'.format(zone)))
+            break
+        except:
+            temperature = 0
+    if temperature == 0:
+        logging.debug('Could not check DUT temperature.')
+    elif temperature > 100:
+        temperature = temperature / 1000
+    return temperature
+
+
+def wait_for_dut_cooldown(dut, target_temp=50, timeout=300):
+    """Function to wait for a DUT to cool down.
+
+    Args:
+        dut: AndroidDevice of interest
+        target_temp: target cooldown temperature
+        timeout: maxt time to wait for cooldown
+    """
+    start_time = time.time()
+    while time.time() - start_time < timeout:
+        temperature = get_dut_temperature(dut)
+        if temperature < target_temp:
+            break
+        time.sleep(SHORT_SLEEP)
+    elapsed_time = time.time() - start_time
+    logging.debug('DUT Final Temperature: {}C. Cooldown duration: {}'.format(
+        temperature, elapsed_time))
+
+
+def health_check(dut, batt_thresh=5, temp_threshold=53, cooldown=1):
+    """Function to check health status of a DUT.
+
+    The function checks both battery levels and temperature to avoid DUT
+    powering off during the test.
+
+    Args:
+        dut: AndroidDevice of interest
+        batt_thresh: battery level threshold
+        temp_threshold: temperature threshold
+        cooldown: flag to wait for DUT to cool down when overheating
+    Returns:
+        health_check: boolean confirming device is healthy
+    """
+    health_check = True
+    battery_level = utils.get_battery_level(dut)
+    if battery_level < batt_thresh:
+        logging.warning('Battery level low ({}%)'.format(battery_level))
+        health_check = False
+    else:
+        logging.debug('Battery level = {}%'.format(battery_level))
+
+    temperature = get_dut_temperature(dut)
+    if temperature > temp_threshold:
+        if cooldown:
+            logging.warning(
+                'Waiting for DUT to cooldown. ({} C)'.format(temperature))
+            wait_for_dut_cooldown(dut, target_temp=temp_threshold - 5)
+        else:
+            logging.warning('DUT Overheating ({} C)'.format(temperature))
+            health_check = False
+    else:
+        logging.debug('DUT Temperature = {} C'.format(temperature))
+    return health_check
+
+
+# Wifi Device Utils
+def empty_rssi_result():
+    return collections.OrderedDict([('data', []), ('mean', float('nan')),
+                                    ('stdev', float('nan'))])
+
+
+@nonblocking
+def get_connected_rssi_nb(dut,
+                          num_measurements=1,
+                          polling_frequency=SHORT_SLEEP,
+                          first_measurement_delay=0,
+                          disconnect_warning=True,
+                          ignore_samples=0,
+                          interface='wlan0'):
+    return get_connected_rssi(dut, num_measurements, polling_frequency,
+                              first_measurement_delay, disconnect_warning,
+                              ignore_samples, interface)
+
+
+@detect_wifi_decorator
+def get_connected_rssi(dut,
+                       num_measurements=1,
+                       polling_frequency=SHORT_SLEEP,
+                       first_measurement_delay=0,
+                       disconnect_warning=True,
+                       ignore_samples=0,
+                       interface='wlan0'):
+    """Gets all RSSI values reported for the connected access point/BSSID.
+
+    Args:
+        dut: android device object from which to get RSSI
+        num_measurements: number of scans done, and RSSIs collected
+        polling_frequency: time to wait between RSSI measurements
+        disconnect_warning: boolean controlling disconnection logging messages
+        ignore_samples: number of leading samples to ignore
+    Returns:
+        connected_rssi: dict containing the measurements results for
+        all reported RSSI values (signal_poll, per chain, etc.) and their
+        statistics
+    """
+    pass
+
+
+@nonblocking
+def get_scan_rssi_nb(dut, tracked_bssids, num_measurements=1):
+    return get_scan_rssi(dut, tracked_bssids, num_measurements)
+
+
+@detect_wifi_decorator
+def get_scan_rssi(dut, tracked_bssids, num_measurements=1):
+    """Gets scan RSSI for specified BSSIDs.
+
+    Args:
+        dut: android device object from which to get RSSI
+        tracked_bssids: array of BSSIDs to gather RSSI data for
+        num_measurements: number of scans done, and RSSIs collected
+    Returns:
+        scan_rssi: dict containing the measurement results as well as the
+        statistics of the scan RSSI for all BSSIDs in tracked_bssids
+    """
+    pass
+
+
+@detect_wifi_decorator
+def get_sw_signature(dut):
+    """Function that checks the signature for wifi firmware and config files.
+
+    Returns:
+        bdf_signature: signature consisting of last three digits of bdf cksums
+        fw_signature: floating point firmware version, i.e., major.minor
+    """
+    pass
+
+
+@detect_wifi_decorator
+def get_country_code(dut):
+    """Function that returns the current wifi country code."""
+    pass
+
+
+@detect_wifi_decorator
+def push_config(dut, config_file):
+    """Function to push Wifi BDF files
+
+    This function checks for existing wifi bdf files and over writes them all,
+    for simplicity, with the bdf file provided in the arguments. The dut is
+    rebooted for the bdf file to take effect
+
+    Args:
+        dut: dut to push bdf file to
+        config_file: path to bdf_file to push
+    """
+    pass
+
+
+@detect_wifi_decorator
+def start_wifi_logging(dut):
+    """Function to start collecting wifi-related logs"""
+    pass
+
+
+@detect_wifi_decorator
+def stop_wifi_logging(dut):
+    """Function to start collecting wifi-related logs"""
+    pass
+
+
+@detect_wifi_decorator
+def push_firmware(dut, firmware_files):
+    """Function to push Wifi firmware files
+
+    Args:
+        dut: dut to push bdf file to
+        firmware_files: path to wlanmdsp.mbn file
+        datamsc_file: path to Data.msc file
+    """
+    pass
+
+
+@detect_wifi_decorator
+def disable_beamforming(dut):
+    """Function to disable beamforming."""
+    pass
+
+
+@detect_wifi_decorator
+def set_nss_capability(dut, nss):
+    """Function to set number of spatial streams supported."""
+    pass
+
+
+@detect_wifi_decorator
+def set_chain_mask(dut, chain_mask):
+    """Function to set DUT chain mask.
+
+    Args:
+        dut: android device
+        chain_mask: desired chain mask in [0, 1, '2x2']
+    """
+    pass
+
+
+# Link layer stats utilities
+class LinkLayerStats():
+    def __new__(self, dut, llstats_enabled=True):
+        if detect_wifi_platform(dut) == 'qcom':
+            return qcom_utils.LinkLayerStats(dut, llstats_enabled)
+        else:
+            return brcm_utils.LinkLayerStats(dut, llstats_enabled)
diff --git a/acts_tests/acts_contrib/test_utils/wifi/wifi_performance_test_utils/bokeh_figure.py b/acts_tests/acts_contrib/test_utils/wifi/wifi_performance_test_utils/bokeh_figure.py
new file mode 100644
index 0000000..5a8433e
--- /dev/null
+++ b/acts_tests/acts_contrib/test_utils/wifi/wifi_performance_test_utils/bokeh_figure.py
@@ -0,0 +1,361 @@
+#!/usr/bin/env python3.4
+#
+#   Copyright 2021 - The Android Open Source Project
+#
+#   Licensed under the Apache License, Version 2.0 (the 'License');
+#   you may not use this file except in compliance with the License.
+#   You may obtain a copy of the License at
+#
+#       http://www.apache.org/licenses/LICENSE-2.0
+#
+#   Unless required by applicable law or agreed to in writing, software
+#   distributed under the License is distributed on an 'AS IS' BASIS,
+#   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#   See the License for the specific language governing permissions and
+#   limitations under the License.
+
+import bokeh, bokeh.plotting, bokeh.io
+import collections
+import itertools
+import json
+import math
+
+
+# Plotting Utilities
+class BokehFigure():
+    """Class enabling  simplified Bokeh plotting."""
+
+    COLORS = [
+        'black',
+        'blue',
+        'blueviolet',
+        'brown',
+        'burlywood',
+        'cadetblue',
+        'cornflowerblue',
+        'crimson',
+        'cyan',
+        'darkblue',
+        'darkgreen',
+        'darkmagenta',
+        'darkorange',
+        'darkred',
+        'deepskyblue',
+        'goldenrod',
+        'green',
+        'grey',
+        'indigo',
+        'navy',
+        'olive',
+        'orange',
+        'red',
+        'salmon',
+        'teal',
+        'yellow',
+    ]
+    MARKERS = [
+        'asterisk', 'circle', 'circle_cross', 'circle_x', 'cross', 'diamond',
+        'diamond_cross', 'hex', 'inverted_triangle', 'square', 'square_x',
+        'square_cross', 'triangle', 'x'
+    ]
+
+    TOOLS = ('box_zoom,box_select,pan,crosshair,redo,undo,reset,hover,save')
+
+    def __init__(self,
+                 title=None,
+                 x_label=None,
+                 primary_y_label=None,
+                 secondary_y_label=None,
+                 height=700,
+                 width=1100,
+                 title_size='15pt',
+                 axis_label_size='12pt',
+                 legend_label_size='12pt',
+                 axis_tick_label_size='12pt',
+                 x_axis_type='auto',
+                 sizing_mode='scale_both',
+                 json_file=None):
+        if json_file:
+            self.load_from_json(json_file)
+        else:
+            self.figure_data = []
+            self.fig_property = {
+                'title': title,
+                'x_label': x_label,
+                'primary_y_label': primary_y_label,
+                'secondary_y_label': secondary_y_label,
+                'num_lines': 0,
+                'height': height,
+                'width': width,
+                'title_size': title_size,
+                'axis_label_size': axis_label_size,
+                'legend_label_size': legend_label_size,
+                'axis_tick_label_size': axis_tick_label_size,
+                'x_axis_type': x_axis_type,
+                'sizing_mode': sizing_mode
+            }
+
+    def init_plot(self):
+        self.plot = bokeh.plotting.figure(
+            sizing_mode=self.fig_property['sizing_mode'],
+            plot_width=self.fig_property['width'],
+            plot_height=self.fig_property['height'],
+            title=self.fig_property['title'],
+            tools=self.TOOLS,
+            x_axis_type=self.fig_property['x_axis_type'],
+            output_backend='webgl')
+        tooltips = [
+            ('index', '$index'),
+            ('(x,y)', '($x, $y)'),
+        ]
+        hover_set = []
+        for line in self.figure_data:
+            hover_set.extend(line['hover_text'].keys())
+        hover_set = set(hover_set)
+        for item in hover_set:
+            tooltips.append((item, '@{}'.format(item)))
+        self.plot.hover.tooltips = tooltips
+        self.plot.add_tools(
+            bokeh.models.tools.WheelZoomTool(dimensions='width'))
+        self.plot.add_tools(
+            bokeh.models.tools.WheelZoomTool(dimensions='height'))
+
+    def _filter_line(self, x_data, y_data, hover_text=None):
+        """Function to remove NaN points from bokeh plots."""
+        x_data_filtered = []
+        y_data_filtered = []
+        hover_text_filtered = {}
+        for idx, xy in enumerate(
+                itertools.zip_longest(x_data, y_data, fillvalue=float('nan'))):
+            if not math.isnan(xy[1]):
+                x_data_filtered.append(xy[0])
+                y_data_filtered.append(xy[1])
+                if hover_text:
+                    for key, value in hover_text.items():
+                        hover_text_filtered.setdefault(key, [])
+                        hover_text_filtered[key].append(
+                            value[idx] if len(value) > idx else '')
+        return x_data_filtered, y_data_filtered, hover_text_filtered
+
+    def add_line(self,
+                 x_data,
+                 y_data,
+                 legend,
+                 hover_text=None,
+                 color=None,
+                 width=3,
+                 style='solid',
+                 marker=None,
+                 marker_size=10,
+                 shaded_region=None,
+                 y_axis='default'):
+        """Function to add line to existing BokehFigure.
+
+        Args:
+            x_data: list containing x-axis values for line
+            y_data: list containing y_axis values for line
+            legend: string containing line title
+            hover_text: text to display when hovering over lines
+            color: string describing line color
+            width: integer line width
+            style: string describing line style, e.g, solid or dashed
+            marker: string specifying line marker, e.g., cross
+            shaded region: data describing shaded region to plot
+            y_axis: identifier for y-axis to plot line against
+        """
+        if y_axis not in ['default', 'secondary']:
+            raise ValueError('y_axis must be default or secondary')
+        if color == None:
+            color = self.COLORS[self.fig_property['num_lines'] %
+                                len(self.COLORS)]
+        if style == 'dashed':
+            style = [5, 5]
+        if isinstance(hover_text, list):
+            hover_text = {'info': hover_text}
+        x_data_filter, y_data_filter, hover_text_filter = self._filter_line(
+            x_data, y_data, hover_text)
+        self.figure_data.append({
+            'x_data': x_data_filter,
+            'y_data': y_data_filter,
+            'legend': legend,
+            'hover_text': hover_text_filter,
+            'color': color,
+            'width': width,
+            'style': style,
+            'marker': marker,
+            'marker_size': marker_size,
+            'shaded_region': shaded_region,
+            'y_axis': y_axis
+        })
+        self.fig_property['num_lines'] += 1
+
+    def add_scatter(self,
+                    x_data,
+                    y_data,
+                    legend,
+                    hover_text=None,
+                    color=None,
+                    marker=None,
+                    marker_size=10,
+                    y_axis='default'):
+        """Function to add line to existing BokehFigure.
+
+        Args:
+            x_data: list containing x-axis values for line
+            y_data: list containing y_axis values for line
+            legend: string containing line title
+            hover_text: text to display when hovering over lines
+            color: string describing line color
+            marker: string specifying marker, e.g., cross
+            y_axis: identifier for y-axis to plot line against
+        """
+        if y_axis not in ['default', 'secondary']:
+            raise ValueError('y_axis must be default or secondary')
+        if color == None:
+            color = self.COLORS[self.fig_property['num_lines'] %
+                                len(self.COLORS)]
+        if marker == None:
+            marker = self.MARKERS[self.fig_property['num_lines'] %
+                                  len(self.MARKERS)]
+        self.figure_data.append({
+            'x_data': x_data,
+            'y_data': y_data,
+            'legend': legend,
+            'hover_text': hover_text,
+            'color': color,
+            'width': 0,
+            'style': 'solid',
+            'marker': marker,
+            'marker_size': marker_size,
+            'shaded_region': None,
+            'y_axis': y_axis
+        })
+        self.fig_property['num_lines'] += 1
+
+    def generate_figure(self, output_file=None, save_json=True):
+        """Function to generate and save BokehFigure.
+
+        Args:
+            output_file: string specifying output file path
+        """
+        self.init_plot()
+        two_axes = False
+        for line in self.figure_data:
+            data_dict = {'x': line['x_data'], 'y': line['y_data']}
+            for key, value in line['hover_text'].items():
+                data_dict[key] = value
+            source = bokeh.models.ColumnDataSource(data=data_dict)
+            if line['width'] > 0:
+                self.plot.line(x='x',
+                               y='y',
+                               legend_label=line['legend'],
+                               line_width=line['width'],
+                               color=line['color'],
+                               line_dash=line['style'],
+                               name=line['y_axis'],
+                               y_range_name=line['y_axis'],
+                               source=source)
+            if line['shaded_region']:
+                band_x = line['shaded_region']['x_vector']
+                band_x.extend(line['shaded_region']['x_vector'][::-1])
+                band_y = line['shaded_region']['lower_limit']
+                band_y.extend(line['shaded_region']['upper_limit'][::-1])
+                self.plot.patch(band_x,
+                                band_y,
+                                color='#7570B3',
+                                line_alpha=0.1,
+                                fill_alpha=0.1)
+            if line['marker'] in self.MARKERS:
+                marker_func = getattr(self.plot, line['marker'])
+                marker_func(x='x',
+                            y='y',
+                            size=line['marker_size'],
+                            legend_label=line['legend'],
+                            line_color=line['color'],
+                            fill_color=line['color'],
+                            name=line['y_axis'],
+                            y_range_name=line['y_axis'],
+                            source=source)
+            if line['y_axis'] == 'secondary':
+                two_axes = True
+
+        #x-axis formatting
+        self.plot.xaxis.axis_label = self.fig_property['x_label']
+        self.plot.x_range.range_padding = 0
+        self.plot.xaxis[0].axis_label_text_font_size = self.fig_property[
+            'axis_label_size']
+        self.plot.xaxis.major_label_text_font_size = self.fig_property[
+            'axis_tick_label_size']
+        #y-axis formatting
+        self.plot.yaxis[0].axis_label = self.fig_property['primary_y_label']
+        self.plot.yaxis[0].axis_label_text_font_size = self.fig_property[
+            'axis_label_size']
+        self.plot.yaxis.major_label_text_font_size = self.fig_property[
+            'axis_tick_label_size']
+        self.plot.y_range = bokeh.models.DataRange1d(names=['default'])
+        if two_axes and 'secondary' not in self.plot.extra_y_ranges:
+            self.plot.extra_y_ranges = {
+                'secondary': bokeh.models.DataRange1d(names=['secondary'])
+            }
+            self.plot.add_layout(
+                bokeh.models.LinearAxis(
+                    y_range_name='secondary',
+                    axis_label=self.fig_property['secondary_y_label'],
+                    axis_label_text_font_size=self.
+                    fig_property['axis_label_size']), 'right')
+        # plot formatting
+        self.plot.legend.location = 'top_right'
+        self.plot.legend.click_policy = 'hide'
+        self.plot.title.text_font_size = self.fig_property['title_size']
+        self.plot.legend.label_text_font_size = self.fig_property[
+            'legend_label_size']
+
+        if output_file is not None:
+            self.save_figure(output_file, save_json)
+        return self.plot
+
+    def load_from_json(self, file_path):
+        with open(file_path, 'r') as json_file:
+            fig_dict = json.load(json_file)
+        self.fig_property = fig_dict['fig_property']
+        self.figure_data = fig_dict['figure_data']
+
+    def _save_figure_json(self, output_file):
+        """Function to save a json format of a figure"""
+        figure_dict = collections.OrderedDict(fig_property=self.fig_property,
+                                              figure_data=self.figure_data)
+        output_file = output_file.replace('.html', '_plot_data.json')
+        with open(output_file, 'w') as outfile:
+            json.dump(figure_dict, outfile, indent=4)
+
+    def save_figure(self, output_file, save_json=True):
+        """Function to save BokehFigure.
+
+        Args:
+            output_file: string specifying output file path
+            save_json: flag controlling json outputs
+        """
+        if save_json:
+            self._save_figure_json(output_file)
+        bokeh.io.output_file(output_file)
+        bokeh.io.save(self.plot)
+
+    @staticmethod
+    def save_figures(figure_array, output_file_path, save_json=True):
+        """Function to save list of BokehFigures in one file.
+
+        Args:
+            figure_array: list of BokehFigure object to be plotted
+            output_file: string specifying output file path
+        """
+        for idx, figure in enumerate(figure_array):
+            figure.generate_figure()
+            if save_json:
+                json_file_path = output_file_path.replace(
+                    '.html', '{}-plot_data.json'.format(idx))
+                figure._save_figure_json(json_file_path)
+        plot_array = [figure.plot for figure in figure_array]
+        all_plots = bokeh.layouts.column(children=plot_array,
+                                         sizing_mode='scale_width')
+        bokeh.plotting.output_file(output_file_path)
+        bokeh.plotting.save(all_plots)
diff --git a/acts_tests/acts_contrib/test_utils/wifi/wifi_performance_test_utils/brcm_utils.py b/acts_tests/acts_contrib/test_utils/wifi/wifi_performance_test_utils/brcm_utils.py
new file mode 100644
index 0000000..afa5f32
--- /dev/null
+++ b/acts_tests/acts_contrib/test_utils/wifi/wifi_performance_test_utils/brcm_utils.py
@@ -0,0 +1,578 @@
+#!/usr/bin/env python3.4
+#
+#   Copyright 2021 - The Android Open Source Project
+#
+#   Licensed under the Apache License, Version 2.0 (the 'License');
+#   you may not use this file except in compliance with the License.
+#   You may obtain a copy of the License at
+#
+#       http://www.apache.org/licenses/LICENSE-2.0
+#
+#   Unless required by applicable law or agreed to in writing, software
+#   distributed under the License is distributed on an 'AS IS' BASIS,
+#   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#   See the License for the specific language governing permissions and
+#   limitations under the License.
+
+import collections
+import hashlib
+import itertools
+import logging
+import math
+import numpy
+import re
+import statistics
+import time
+
+VERY_SHORT_SLEEP = 0.5
+SHORT_SLEEP = 1
+MED_SLEEP = 6
+DISCONNECTION_MESSAGE_BRCM = 'driver adapter not found'
+RSSI_ERROR_VAL = float('nan')
+RATE_TABLE = {
+    'HT': {
+        1: {
+            20: [7.2, 14.4, 21.7, 28.9, 43.4, 57.8, 65.0, 72.2],
+            40: [15.0, 30.0, 45.0, 60.0, 90.0, 120.0, 135.0, 150.0]
+        },
+        2: {
+            20: [
+                0, 0, 0, 0, 0, 0, 0, 0, 14.4, 28.8, 43.4, 57.8, 86.8, 115.6,
+                130, 144.4
+            ],
+            40: [0, 0, 0, 0, 0, 0, 0, 0, 30, 60, 90, 120, 180, 240, 270, 300]
+        }
+    },
+    'VHT': {
+        1: {
+            20: [
+                7.2, 14.4, 21.7, 28.9, 43.4, 57.8, 65.0, 72.2, 86.7, 96.2,
+                129.0, 143.4
+            ],
+            40: [
+                15.0, 30.0, 45.0, 60.0, 90.0, 120.0, 135.0, 150.0, 180.0,
+                200.0, 258, 286.8
+            ],
+            80: [
+                32.5, 65.0, 97.5, 130.0, 195.0, 260.0, 292.5, 325.0, 390.0,
+                433.3, 540.4, 600.4
+            ],
+            160: [
+                65.0, 130.0, 195.0, 260.0, 390.0, 520.0, 585.0, 650.0, 780.0,
+                1080.8, 1200.8
+            ]
+        },
+        2: {
+            20: [
+                14.4, 28.8, 43.4, 57.8, 86.8, 115.6, 130, 144.4, 173.4, 192.4,
+                258, 286.8
+            ],
+            40: [30, 60, 90, 120, 180, 240, 270, 300, 360, 400, 516, 573.6],
+            80: [
+                65, 130, 195, 260, 390, 520, 585, 650, 780, 866.6, 1080.8,
+                1200.8
+            ],
+            160:
+            [130, 260, 390, 520, 780, 1040, 1170, 1300, 1560, 2161.6, 2401.6]
+        },
+    },
+    'HE': {
+        1: {
+            20: [
+                8.6, 17.2, 25.8, 34.4, 51.6, 68.8, 77.4, 86.0, 103.2, 114.7,
+                129.0, 143.4
+            ],
+            40: [
+                17.2, 34.4, 51.6, 68.8, 103.2, 137.6, 154.8, 172, 206.4, 229.4,
+                258, 286.8
+            ],
+            80: [
+                36.0, 72.1, 108.1, 144.1, 216.2, 288.2, 324.3, 360.3, 432.4,
+                480.4, 540.4, 600.4
+            ],
+            160: [
+                72, 144.2, 216.2, 288.2, 432.4, 576.4, 648.6, 720.6, 864.8,
+                960.8, 1080.8, 1200.8
+            ]
+        },
+        2: {
+            20: [
+                17.2, 34.4, 51.6, 68.8, 103.2, 137.6, 154.8, 172, 206.4, 229.4,
+                258, 286.8
+            ],
+            40: [
+                34.4, 68.8, 103.2, 137.6, 206.4, 275.2, 309.6, 344, 412.8,
+                458.8, 516, 573.6
+            ],
+            80: [
+                72, 144.2, 216.2, 288.2, 432.4, 576.4, 648.6, 720.6, 864.8,
+                960.8, 1080.8, 1200.8
+            ],
+            160: [
+                144, 288.4, 432.4, 576.4, 864.8, 1152.8, 1297.2, 1441.2,
+                1729.6, 1921.6, 2161.6, 2401.6
+            ]
+        },
+    },
+}
+
+
+# Rssi Utilities
+def empty_rssi_result():
+    return collections.OrderedDict([('data', []), ('mean', None),
+                                    ('stdev', None)])
+
+
+def get_connected_rssi(dut,
+                       num_measurements=1,
+                       polling_frequency=SHORT_SLEEP,
+                       first_measurement_delay=0,
+                       disconnect_warning=True,
+                       ignore_samples=0,
+                       interface='wlan0'):
+    # yapf: disable
+    connected_rssi = collections.OrderedDict(
+        [('time_stamp', []),
+         ('bssid', []), ('ssid', []), ('frequency', []),
+         ('signal_poll_rssi', empty_rssi_result()),
+         ('signal_poll_avg_rssi', empty_rssi_result()),
+         ('chain_0_rssi', empty_rssi_result()),
+         ('chain_1_rssi', empty_rssi_result())])
+
+    # yapf: enable
+    previous_bssid = 'disconnected'
+    t0 = time.time()
+    time.sleep(first_measurement_delay)
+    for idx in range(num_measurements):
+        measurement_start_time = time.time()
+        connected_rssi['time_stamp'].append(measurement_start_time - t0)
+        # Get signal poll RSSI
+        try:
+            status_output = dut.adb.shell(
+                'wpa_cli -i {} status'.format(interface))
+        except:
+            status_output = ''
+        match = re.search('bssid=.*', status_output)
+        if match:
+            current_bssid = match.group(0).split('=')[1]
+            connected_rssi['bssid'].append(current_bssid)
+        else:
+            current_bssid = 'disconnected'
+            connected_rssi['bssid'].append(current_bssid)
+            if disconnect_warning and previous_bssid != 'disconnected':
+                logging.warning('WIFI DISCONNECT DETECTED!')
+
+        previous_bssid = current_bssid
+        match = re.search('\s+ssid=.*', status_output)
+        if match:
+            ssid = match.group(0).split('=')[1]
+            connected_rssi['ssid'].append(ssid)
+        else:
+            connected_rssi['ssid'].append('disconnected')
+
+        #TODO: SEARCH MAP ; PICK CENTER CHANNEL
+        match = re.search('\s+freq=.*', status_output)
+        if match:
+            frequency = int(match.group(0).split('=')[1])
+            connected_rssi['frequency'].append(frequency)
+        else:
+            connected_rssi['frequency'].append(RSSI_ERROR_VAL)
+
+        if interface == 'wlan0':
+            try:
+                per_chain_rssi = dut.adb.shell('wl phy_rssi_ant')
+                chain_0_rssi = re.search(
+                    r'rssi\[0\]\s(?P<chain_0_rssi>[0-9\-]*)', per_chain_rssi)
+                if chain_0_rssi:
+                    chain_0_rssi = int(chain_0_rssi.group('chain_0_rssi'))
+                else:
+                    chain_0_rssi = -float('inf')
+                chain_1_rssi = re.search(
+                    r'rssi\[1\]\s(?P<chain_1_rssi>[0-9\-]*)', per_chain_rssi)
+                if chain_1_rssi:
+                    chain_1_rssi = int(chain_1_rssi.group('chain_1_rssi'))
+                else:
+                    chain_1_rssi = -float('inf')
+            except:
+                chain_0_rssi = RSSI_ERROR_VAL
+                chain_1_rssi = RSSI_ERROR_VAL
+            connected_rssi['chain_0_rssi']['data'].append(chain_0_rssi)
+            connected_rssi['chain_1_rssi']['data'].append(chain_1_rssi)
+            combined_rssi = math.pow(10, chain_0_rssi / 10) + math.pow(
+                10, chain_1_rssi / 10)
+            combined_rssi = 10 * math.log10(combined_rssi)
+            connected_rssi['signal_poll_rssi']['data'].append(combined_rssi)
+            connected_rssi['signal_poll_avg_rssi']['data'].append(
+                combined_rssi)
+        else:
+            try:
+                signal_poll_output = dut.adb.shell(
+                    'wpa_cli -i {} signal_poll'.format(interface))
+            except:
+                signal_poll_output = ''
+            match = re.search('RSSI=.*', signal_poll_output)
+            if match:
+                temp_rssi = int(match.group(0).split('=')[1])
+                if temp_rssi == -9999 or temp_rssi == 0:
+                    connected_rssi['signal_poll_rssi']['data'].append(
+                        RSSI_ERROR_VAL)
+                else:
+                    connected_rssi['signal_poll_rssi']['data'].append(
+                        temp_rssi)
+            else:
+                connected_rssi['signal_poll_rssi']['data'].append(
+                    RSSI_ERROR_VAL)
+            connected_rssi['chain_0_rssi']['data'].append(RSSI_ERROR_VAL)
+            connected_rssi['chain_1_rssi']['data'].append(RSSI_ERROR_VAL)
+        measurement_elapsed_time = time.time() - measurement_start_time
+        time.sleep(max(0, polling_frequency - measurement_elapsed_time))
+
+    # Statistics, Statistics
+    for key, val in connected_rssi.copy().items():
+        if 'data' not in val:
+            continue
+        filtered_rssi_values = [x for x in val['data'] if not math.isnan(x)]
+        if len(filtered_rssi_values) > ignore_samples:
+            filtered_rssi_values = filtered_rssi_values[ignore_samples:]
+        if filtered_rssi_values:
+            connected_rssi[key]['mean'] = statistics.mean(filtered_rssi_values)
+            if len(filtered_rssi_values) > 1:
+                connected_rssi[key]['stdev'] = statistics.stdev(
+                    filtered_rssi_values)
+            else:
+                connected_rssi[key]['stdev'] = 0
+        else:
+            connected_rssi[key]['mean'] = RSSI_ERROR_VAL
+            connected_rssi[key]['stdev'] = RSSI_ERROR_VAL
+
+    return connected_rssi
+
+
+def get_scan_rssi(dut, tracked_bssids, num_measurements=1):
+    scan_rssi = collections.OrderedDict()
+    for bssid in tracked_bssids:
+        scan_rssi[bssid] = empty_rssi_result()
+    for idx in range(num_measurements):
+        scan_output = dut.adb.shell('cmd wifi start-scan')
+        time.sleep(MED_SLEEP)
+        scan_output = dut.adb.shell('cmd wifi list-scan-results')
+        for bssid in tracked_bssids:
+            bssid_result = re.search(bssid + '.*',
+                                     scan_output,
+                                     flags=re.IGNORECASE)
+            if bssid_result:
+                bssid_result = bssid_result.group(0).split()
+                scan_rssi[bssid]['data'].append(int(bssid_result[2]))
+            else:
+                scan_rssi[bssid]['data'].append(RSSI_ERROR_VAL)
+    # Compute mean RSSIs. Only average valid readings.
+    # Output RSSI_ERROR_VAL if no readings found.
+    for key, val in scan_rssi.items():
+        filtered_rssi_values = [x for x in val['data'] if not math.isnan(x)]
+        if filtered_rssi_values:
+            scan_rssi[key]['mean'] = statistics.mean(filtered_rssi_values)
+            if len(filtered_rssi_values) > 1:
+                scan_rssi[key]['stdev'] = statistics.stdev(
+                    filtered_rssi_values)
+            else:
+                scan_rssi[key]['stdev'] = 0
+        else:
+            scan_rssi[key]['mean'] = RSSI_ERROR_VAL
+            scan_rssi[key]['stdev'] = RSSI_ERROR_VAL
+    return scan_rssi
+
+
+def get_sw_signature(dut):
+    bdf_output = dut.adb.shell('cksum /vendor/firmware/bcmdhd*')
+    logging.debug('BDF Checksum output: {}'.format(bdf_output))
+    bdf_signature = sum(
+        [int(line.split(' ')[0]) for line in bdf_output.splitlines()]) % 1000
+
+    fw_version = dut.adb.shell('getprop vendor.wlan.firmware.version')
+    driver_version = dut.adb.shell('getprop vendor.wlan.driver.version')
+    logging.debug('Firmware version : {}. Driver version: {}'.format(
+        fw_version, driver_version))
+    fw_signature = '{}+{}'.format(fw_version, driver_version)
+    fw_signature = int(hashlib.md5(fw_signature.encode()).hexdigest(),
+                       16) % 1000
+    serial_hash = int(hashlib.md5(dut.serial.encode()).hexdigest(), 16) % 1000
+    return {
+        'config_signature': bdf_signature,
+        'fw_signature': fw_signature,
+        'serial_hash': serial_hash
+    }
+
+
+def get_country_code(dut):
+    try:
+        country_code = dut.adb.shell('wl country').split(' ')[0]
+    except:
+        country_code = 'XZ'
+    if country_code == 'XZ':
+        country_code = 'WW'
+    logging.debug('Country code: {}'.format(country_code))
+    return country_code
+
+
+def push_config(dut, config_file):
+    config_files_list = dut.adb.shell('ls /vendor/etc/*.cal').splitlines()
+    for dst_file in config_files_list:
+        dut.push_system_file(config_file, dst_file)
+    dut.reboot()
+
+
+def start_wifi_logging(dut):
+    pass
+
+
+def stop_wifi_logging(dut):
+    pass
+
+
+def push_firmware(dut, firmware_files):
+    """Function to push Wifi firmware files
+
+    Args:
+        dut: dut to push bdf file to
+        firmware_files: path to wlanmdsp.mbn file
+        datamsc_file: path to Data.msc file
+    """
+    for file in firmware_files:
+        dut.push_system_file(file, '/vendor/firmware/')
+    dut.reboot()
+
+
+def disable_beamforming(dut):
+    dut.adb.shell('wl txbf 0')
+
+
+def set_nss_capability(dut, nss):
+    dut.adb.shell('wl he omi -r {} -t {}'.format(nss, nss))
+
+
+def set_chain_mask(dut, chain):
+    if chain == '2x2':
+        chain = 3
+    else:
+        chain = chain + 1
+    # Get current chain mask
+    try:
+        curr_tx_chain = int(dut.adb.shell('wl txchain'))
+        curr_rx_chain = int(dut.adb.shell('wl rxchain'))
+    except:
+        curr_tx_chain = -1
+        curr_rx_chain = -1
+    if curr_tx_chain == chain and curr_rx_chain == chain:
+        return
+    # Set chain mask if needed
+    dut.adb.shell('wl down')
+    time.sleep(VERY_SHORT_SLEEP)
+    dut.adb.shell('wl txchain 0x{}'.format(chain))
+    dut.adb.shell('wl rxchain 0x{}'.format(chain))
+    dut.adb.shell('wl up')
+
+
+class LinkLayerStats():
+
+    LLSTATS_CMD = 'wl dump ampdu; wl counters;'
+    LL_STATS_CLEAR_CMD = 'wl dump_clear ampdu; wl reset_cnts;'
+    BW_REGEX = re.compile(r'Chanspec:.+ (?P<bandwidth>[0-9]+)MHz')
+    MCS_REGEX = re.compile(r'(?P<count>[0-9]+)\((?P<percent>[0-9]+)%\)')
+    RX_REGEX = re.compile(r'RX (?P<mode>\S+)\s+:\s*(?P<nss1>[0-9, ,(,),%]*)'
+                          '\n\s*:?\s*(?P<nss2>[0-9, ,(,),%]*)')
+    TX_REGEX = re.compile(r'TX (?P<mode>\S+)\s+:\s*(?P<nss1>[0-9, ,(,),%]*)'
+                          '\n\s*:?\s*(?P<nss2>[0-9, ,(,),%]*)')
+    TX_PER_REGEX = re.compile(
+        r'(?P<mode>\S+) PER\s+:\s*(?P<nss1>[0-9, ,(,),%]*)'
+        '\n\s*:?\s*(?P<nss2>[0-9, ,(,),%]*)')
+    RX_FCS_REGEX = re.compile(
+        r'rxbadfcs (?P<rx_bad_fcs>[0-9]*).+\n.+goodfcs (?P<rx_good_fcs>[0-9]*)'
+    )
+    RX_AGG_REGEX = re.compile(r'rxmpduperampdu (?P<aggregation>[0-9]*)')
+    TX_AGG_REGEX = re.compile(r' mpduperampdu (?P<aggregation>[0-9]*)')
+    TX_AGG_STOP_REGEX = re.compile(
+        r'agg stop reason: tot_agg_tried (?P<agg_tried>[0-9]+) agg_txcancel (?P<agg_canceled>[0-9]+) (?P<agg_stop_reason>.+)'
+    )
+    TX_AGG_STOP_REASON_REGEX = re.compile(
+        r'(?P<reason>\w+) [0-9]+ \((?P<value>[0-9]+%)\)')
+    MCS_ID = collections.namedtuple(
+        'mcs_id', ['mode', 'num_streams', 'bandwidth', 'mcs', 'gi'])
+    MODE_MAP = {'0': '11a/g', '1': '11b', '2': '11n', '3': '11ac'}
+    BW_MAP = {'0': 20, '1': 40, '2': 80}
+
+    def __init__(self, dut, llstats_enabled=True):
+        self.dut = dut
+        self.llstats_enabled = llstats_enabled
+        self.llstats_cumulative = self._empty_llstats()
+        self.llstats_incremental = self._empty_llstats()
+
+    def update_stats(self):
+        if self.llstats_enabled:
+            try:
+                llstats_output = self.dut.adb.shell(self.LLSTATS_CMD,
+                                                    timeout=1)
+                self.dut.adb.shell_nb(self.LL_STATS_CLEAR_CMD)
+
+                wl_join = self.dut.adb.shell("wl status")
+                self.bandwidth = int(
+                    re.search(self.BW_REGEX, wl_join).group('bandwidth'))
+            except:
+                llstats_output = ''
+        else:
+            llstats_output = ''
+        self._update_stats(llstats_output)
+
+    def reset_stats(self):
+        self.llstats_cumulative = self._empty_llstats()
+        self.llstats_incremental = self._empty_llstats()
+
+    def _empty_llstats(self):
+        return collections.OrderedDict(mcs_stats=collections.OrderedDict(),
+                                       mpdu_stats=collections.OrderedDict(),
+                                       summary=collections.OrderedDict())
+
+    def _empty_mcs_stat(self):
+        return collections.OrderedDict(txmpdu=0,
+                                       rxmpdu=0,
+                                       mpdu_lost=0,
+                                       retries=0,
+                                       retries_short=0,
+                                       retries_long=0)
+
+    def _mcs_id_to_string(self, mcs_id):
+        mcs_string = '{} Nss{} MCS{} GI{}'.format(mcs_id.mode,
+                                                  mcs_id.num_streams,
+                                                  mcs_id.mcs, mcs_id.gi)
+        return mcs_string
+
+    def _parse_mcs_stats(self, llstats_output):
+        llstats_dict = {}
+        # Look for per-peer stats
+        match = re.search(self.RX_REGEX, llstats_output)
+        if not match:
+            self.reset_stats()
+            return collections.OrderedDict()
+        # Find and process all matches for per stream stats
+        rx_match_iter = re.finditer(self.RX_REGEX, llstats_output)
+        tx_match_iter = re.finditer(self.TX_REGEX, llstats_output)
+        tx_per_match_iter = re.finditer(self.TX_PER_REGEX, llstats_output)
+        for rx_match, tx_match, tx_per_match in zip(rx_match_iter,
+                                                    tx_match_iter,
+                                                    tx_per_match_iter):
+            mode = rx_match.group('mode')
+            mode = 'HT' if mode == 'MCS' else mode
+            for nss in [1, 2]:
+                rx_mcs_iter = re.finditer(self.MCS_REGEX,
+                                          rx_match.group(nss + 1))
+                tx_mcs_iter = re.finditer(self.MCS_REGEX,
+                                          tx_match.group(nss + 1))
+                tx_per_iter = re.finditer(self.MCS_REGEX,
+                                          tx_per_match.group(nss + 1))
+                for mcs, (rx_mcs_stats, tx_mcs_stats,
+                          tx_per_mcs_stats) in enumerate(
+                              itertools.zip_longest(rx_mcs_iter, tx_mcs_iter,
+                                                    tx_per_iter)):
+                    current_mcs = self.MCS_ID(
+                        mode, nss, self.bandwidth,
+                        mcs + int(8 * (mode == 'HT') * (nss - 1)), 0)
+                    current_stats = collections.OrderedDict(
+                        txmpdu=int(tx_mcs_stats.group('count'))
+                        if tx_mcs_stats else 0,
+                        rxmpdu=int(rx_mcs_stats.group('count'))
+                        if rx_mcs_stats else 0,
+                        mpdu_lost=0,
+                        retries=tx_per_mcs_stats.group('count')
+                        if tx_per_mcs_stats else 0,
+                        retries_short=0,
+                        retries_long=0,
+                        mcs_id=current_mcs)
+                    llstats_dict[self._mcs_id_to_string(
+                        current_mcs)] = current_stats
+        return llstats_dict
+
+    def _parse_mpdu_stats(self, llstats_output):
+        rx_agg_match = re.search(self.RX_AGG_REGEX, llstats_output)
+        tx_agg_match = re.search(self.TX_AGG_REGEX, llstats_output)
+        tx_agg_stop_match = re.search(self.TX_AGG_STOP_REGEX, llstats_output)
+        rx_fcs_match = re.search(self.RX_FCS_REGEX, llstats_output)
+
+        if rx_agg_match and tx_agg_match and tx_agg_stop_match and rx_fcs_match:
+            agg_stop_dict = collections.OrderedDict(
+                rx_aggregation=int(rx_agg_match.group('aggregation')),
+                tx_aggregation=int(tx_agg_match.group('aggregation')),
+                tx_agg_tried=int(tx_agg_stop_match.group('agg_tried')),
+                tx_agg_canceled=int(tx_agg_stop_match.group('agg_canceled')),
+                rx_good_fcs=int(rx_fcs_match.group('rx_good_fcs')),
+                rx_bad_fcs=int(rx_fcs_match.group('rx_bad_fcs')),
+                agg_stop_reason=collections.OrderedDict())
+            agg_reason_match = re.finditer(
+                self.TX_AGG_STOP_REASON_REGEX,
+                tx_agg_stop_match.group('agg_stop_reason'))
+            for reason_match in agg_reason_match:
+                agg_stop_dict['agg_stop_reason'][reason_match.group(
+                    'reason')] = reason_match.group('value')
+
+        else:
+            agg_stop_dict = collections.OrderedDict(rx_aggregation=0,
+                                                    tx_aggregation=0,
+                                                    tx_agg_tried=0,
+                                                    tx_agg_canceled=0,
+                                                    rx_good_fcs=0,
+                                                    rx_bad_fcs=0,
+                                                    agg_stop_reason=None)
+        return agg_stop_dict
+
+    def _generate_stats_summary(self, llstats_dict):
+        llstats_summary = collections.OrderedDict(common_tx_mcs=None,
+                                                  common_tx_mcs_count=0,
+                                                  common_tx_mcs_freq=0,
+                                                  common_rx_mcs=None,
+                                                  common_rx_mcs_count=0,
+                                                  common_rx_mcs_freq=0,
+                                                  rx_per=float('nan'))
+        mcs_ids = []
+        tx_mpdu = []
+        rx_mpdu = []
+        phy_rates = []
+        for mcs_str, mcs_stats in llstats_dict['mcs_stats'].items():
+            mcs_id = mcs_stats['mcs_id']
+            mcs_ids.append(mcs_str)
+            tx_mpdu.append(mcs_stats['txmpdu'])
+            rx_mpdu.append(mcs_stats['rxmpdu'])
+            phy_rates.append(RATE_TABLE[mcs_id.mode][mcs_id.num_streams][
+                mcs_id.bandwidth][mcs_id.mcs])
+        if len(tx_mpdu) == 0 or len(rx_mpdu) == 0:
+            return llstats_summary
+        llstats_summary['common_tx_mcs'] = mcs_ids[numpy.argmax(tx_mpdu)]
+        llstats_summary['common_tx_mcs_count'] = numpy.max(tx_mpdu)
+        llstats_summary['common_rx_mcs'] = mcs_ids[numpy.argmax(rx_mpdu)]
+        llstats_summary['common_rx_mcs_count'] = numpy.max(rx_mpdu)
+        if sum(tx_mpdu) and sum(rx_mpdu):
+            llstats_summary['mean_tx_phy_rate'] = numpy.average(
+                phy_rates, weights=tx_mpdu)
+            llstats_summary['mean_rx_phy_rate'] = numpy.average(
+                phy_rates, weights=rx_mpdu)
+            llstats_summary['common_tx_mcs_freq'] = (
+                llstats_summary['common_tx_mcs_count'] / sum(tx_mpdu))
+            llstats_summary['common_rx_mcs_freq'] = (
+                llstats_summary['common_rx_mcs_count'] / sum(rx_mpdu))
+            total_rx_frames = llstats_dict['mpdu_stats'][
+                'rx_good_fcs'] + llstats_dict['mpdu_stats']['rx_bad_fcs']
+            if total_rx_frames:
+                llstats_summary['rx_per'] = (
+                    llstats_dict['mpdu_stats']['rx_bad_fcs'] /
+                    (total_rx_frames)) * 100
+        return llstats_summary
+
+    def _update_stats(self, llstats_output):
+        self.llstats_cumulative = self._empty_llstats()
+        self.llstats_incremental = self._empty_llstats()
+        self.llstats_incremental['raw_output'] = llstats_output
+        self.llstats_incremental['mcs_stats'] = self._parse_mcs_stats(
+            llstats_output)
+        self.llstats_incremental['mpdu_stats'] = self._parse_mpdu_stats(
+            llstats_output)
+        self.llstats_incremental['summary'] = self._generate_stats_summary(
+            self.llstats_incremental)
+        self.llstats_cumulative['summary'] = self._generate_stats_summary(
+            self.llstats_cumulative)
diff --git a/acts_tests/acts_contrib/test_utils/wifi/wifi_performance_test_utils/ping_utils.py b/acts_tests/acts_contrib/test_utils/wifi/wifi_performance_test_utils/ping_utils.py
new file mode 100644
index 0000000..7d2e09f
--- /dev/null
+++ b/acts_tests/acts_contrib/test_utils/wifi/wifi_performance_test_utils/ping_utils.py
@@ -0,0 +1,125 @@
+#!/usr/bin/env python3.4
+#
+#   Copyright 2021 - The Android Open Source Project
+#
+#   Licensed under the Apache License, Version 2.0 (the 'License');
+#   you may not use this file except in compliance with the License.
+#   You may obtain a copy of the License at
+#
+#       http://www.apache.org/licenses/LICENSE-2.0
+#
+#   Unless required by applicable law or agreed to in writing, software
+#   distributed under the License is distributed on an 'AS IS' BASIS,
+#   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#   See the License for the specific language governing permissions and
+#   limitations under the License.
+
+import re
+
+RTT_REGEX = re.compile(r'^\[(?P<timestamp>\S+)\] .*? time=(?P<rtt>\S+)')
+LOSS_REGEX = re.compile(r'(?P<loss>\S+)% packet loss')
+
+
+class PingResult(object):
+    """An object that contains the results of running ping command.
+
+    Attributes:
+        connected: True if a connection was made. False otherwise.
+        packet_loss_percentage: The total percentage of packets lost.
+        transmission_times: The list of PingTransmissionTimes containing the
+            timestamps gathered for transmitted packets.
+        rtts: An list-like object enumerating all round-trip-times of
+            transmitted packets.
+        timestamps: A list-like object enumerating the beginning timestamps of
+            each packet transmission.
+        ping_interarrivals: A list-like object enumerating the amount of time
+            between the beginning of each subsequent transmission.
+    """
+    def __init__(self, ping_output):
+        self.packet_loss_percentage = 100
+        self.transmission_times = []
+
+        self.rtts = _ListWrap(self.transmission_times, lambda entry: entry.rtt)
+        self.timestamps = _ListWrap(self.transmission_times,
+                                    lambda entry: entry.timestamp)
+        self.ping_interarrivals = _PingInterarrivals(self.transmission_times)
+
+        self.start_time = 0
+        for line in ping_output:
+            if 'loss' in line:
+                match = re.search(LOSS_REGEX, line)
+                self.packet_loss_percentage = float(match.group('loss'))
+            if 'time=' in line:
+                match = re.search(RTT_REGEX, line)
+                if self.start_time == 0:
+                    self.start_time = float(match.group('timestamp'))
+                self.transmission_times.append(
+                    PingTransmissionTimes(
+                        float(match.group('timestamp')) - self.start_time,
+                        float(match.group('rtt'))))
+        self.connected = len(
+            ping_output) > 1 and self.packet_loss_percentage < 100
+
+    def __getitem__(self, item):
+        if item == 'rtt':
+            return self.rtts
+        if item == 'connected':
+            return self.connected
+        if item == 'packet_loss_percentage':
+            return self.packet_loss_percentage
+        raise ValueError('Invalid key. Please use an attribute instead.')
+
+    def as_dict(self):
+        return {
+            'connected': 1 if self.connected else 0,
+            'rtt': list(self.rtts),
+            'time_stamp': list(self.timestamps),
+            'ping_interarrivals': list(self.ping_interarrivals),
+            'packet_loss_percentage': self.packet_loss_percentage
+        }
+
+
+class PingTransmissionTimes(object):
+    """A class that holds the timestamps for a packet sent via the ping command.
+
+    Attributes:
+        rtt: The round trip time for the packet sent.
+        timestamp: The timestamp the packet started its trip.
+    """
+    def __init__(self, timestamp, rtt):
+        self.rtt = rtt
+        self.timestamp = timestamp
+
+
+class _ListWrap(object):
+    """A convenient helper class for treating list iterators as native lists."""
+    def __init__(self, wrapped_list, func):
+        self.__wrapped_list = wrapped_list
+        self.__func = func
+
+    def __getitem__(self, key):
+        return self.__func(self.__wrapped_list[key])
+
+    def __iter__(self):
+        for item in self.__wrapped_list:
+            yield self.__func(item)
+
+    def __len__(self):
+        return len(self.__wrapped_list)
+
+
+class _PingInterarrivals(object):
+    """A helper class for treating ping interarrivals as a native list."""
+    def __init__(self, ping_entries):
+        self.__ping_entries = ping_entries
+
+    def __getitem__(self, key):
+        return (self.__ping_entries[key + 1].timestamp -
+                self.__ping_entries[key].timestamp)
+
+    def __iter__(self):
+        for index in range(len(self.__ping_entries) - 1):
+            yield self[index]
+
+    def __len__(self):
+        return max(0, len(self.__ping_entries) - 1)
diff --git a/acts_tests/acts_contrib/test_utils/wifi/wifi_performance_test_utils/qcom_utils.py b/acts_tests/acts_contrib/test_utils/wifi/wifi_performance_test_utils/qcom_utils.py
new file mode 100644
index 0000000..53321bc
--- /dev/null
+++ b/acts_tests/acts_contrib/test_utils/wifi/wifi_performance_test_utils/qcom_utils.py
@@ -0,0 +1,467 @@
+#!/usr/bin/env python3.4
+#
+#   Copyright 2021 - The Android Open Source Project
+#
+#   Licensed under the Apache License, Version 2.0 (the 'License');
+#   you may not use this file except in compliance with the License.
+#   You may obtain a copy of the License at
+#
+#       http://www.apache.org/licenses/LICENSE-2.0
+#
+#   Unless required by applicable law or agreed to in writing, software
+#   distributed under the License is distributed on an 'AS IS' BASIS,
+#   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#   See the License for the specific language governing permissions and
+#   limitations under the License.
+
+import collections
+import hashlib
+import logging
+import math
+import os
+import re
+import statistics
+import time
+from acts import asserts
+
+SHORT_SLEEP = 1
+MED_SLEEP = 6
+STATION_DUMP = 'iw {} station dump'
+SCAN = 'wpa_cli scan'
+SCAN_RESULTS = 'wpa_cli scan_results'
+SIGNAL_POLL = 'wpa_cli signal_poll'
+WPA_CLI_STATUS = 'wpa_cli status'
+RSSI_ERROR_VAL = float('nan')
+FW_REGEX = re.compile(r'FW:(?P<firmware>\S+) HW:')
+
+
+# Rssi Utilities
+def empty_rssi_result():
+    return collections.OrderedDict([('data', []), ('mean', None),
+                                    ('stdev', None)])
+
+
+def get_connected_rssi(dut,
+                       num_measurements=1,
+                       polling_frequency=SHORT_SLEEP,
+                       first_measurement_delay=0,
+                       disconnect_warning=True,
+                       ignore_samples=0,
+                       interface='wlan0'):
+    # yapf: disable
+    connected_rssi = collections.OrderedDict(
+        [('time_stamp', []),
+         ('bssid', []), ('ssid', []), ('frequency', []),
+         ('signal_poll_rssi', empty_rssi_result()),
+         ('signal_poll_avg_rssi', empty_rssi_result()),
+         ('chain_0_rssi', empty_rssi_result()),
+         ('chain_1_rssi', empty_rssi_result())])
+    # yapf: enable
+    previous_bssid = 'disconnected'
+    t0 = time.time()
+    time.sleep(first_measurement_delay)
+    for idx in range(num_measurements):
+        measurement_start_time = time.time()
+        connected_rssi['time_stamp'].append(measurement_start_time - t0)
+        # Get signal poll RSSI
+        try:
+            status_output = dut.adb.shell(
+                'wpa_cli -i {} status'.format(interface))
+        except:
+            status_output = ''
+        match = re.search('bssid=.*', status_output)
+        if match:
+            current_bssid = match.group(0).split('=')[1]
+            connected_rssi['bssid'].append(current_bssid)
+        else:
+            current_bssid = 'disconnected'
+            connected_rssi['bssid'].append(current_bssid)
+            if disconnect_warning and previous_bssid != 'disconnected':
+                logging.warning('WIFI DISCONNECT DETECTED!')
+        previous_bssid = current_bssid
+        match = re.search('\s+ssid=.*', status_output)
+        if match:
+            ssid = match.group(0).split('=')[1]
+            connected_rssi['ssid'].append(ssid)
+        else:
+            connected_rssi['ssid'].append('disconnected')
+        try:
+            signal_poll_output = dut.adb.shell(
+                'wpa_cli -i {} signal_poll'.format(interface))
+        except:
+            signal_poll_output = ''
+        match = re.search('FREQUENCY=.*', signal_poll_output)
+        if match:
+            frequency = int(match.group(0).split('=')[1])
+            connected_rssi['frequency'].append(frequency)
+        else:
+            connected_rssi['frequency'].append(RSSI_ERROR_VAL)
+        match = re.search('RSSI=.*', signal_poll_output)
+        if match:
+            temp_rssi = int(match.group(0).split('=')[1])
+            if temp_rssi == -9999 or temp_rssi == 0:
+                connected_rssi['signal_poll_rssi']['data'].append(
+                    RSSI_ERROR_VAL)
+            else:
+                connected_rssi['signal_poll_rssi']['data'].append(temp_rssi)
+        else:
+            connected_rssi['signal_poll_rssi']['data'].append(RSSI_ERROR_VAL)
+        match = re.search('AVG_RSSI=.*', signal_poll_output)
+        if match:
+            connected_rssi['signal_poll_avg_rssi']['data'].append(
+                int(match.group(0).split('=')[1]))
+        else:
+            connected_rssi['signal_poll_avg_rssi']['data'].append(
+                RSSI_ERROR_VAL)
+
+        # Get per chain RSSI
+        try:
+            per_chain_rssi = dut.adb.shell(STATION_DUMP.format(interface))
+        except:
+            per_chain_rssi = ''
+        match = re.search('.*signal avg:.*', per_chain_rssi)
+        if match:
+            per_chain_rssi = per_chain_rssi[per_chain_rssi.find('[') +
+                                            1:per_chain_rssi.find(']')]
+            per_chain_rssi = per_chain_rssi.split(', ')
+            connected_rssi['chain_0_rssi']['data'].append(
+                int(per_chain_rssi[0]))
+            connected_rssi['chain_1_rssi']['data'].append(
+                int(per_chain_rssi[1]))
+        else:
+            connected_rssi['chain_0_rssi']['data'].append(RSSI_ERROR_VAL)
+            connected_rssi['chain_1_rssi']['data'].append(RSSI_ERROR_VAL)
+        measurement_elapsed_time = time.time() - measurement_start_time
+        time.sleep(max(0, polling_frequency - measurement_elapsed_time))
+
+    # Compute mean RSSIs. Only average valid readings.
+    # Output RSSI_ERROR_VAL if no valid connected readings found.
+    for key, val in connected_rssi.copy().items():
+        if 'data' not in val:
+            continue
+        filtered_rssi_values = [x for x in val['data'] if not math.isnan(x)]
+        if len(filtered_rssi_values) > ignore_samples:
+            filtered_rssi_values = filtered_rssi_values[ignore_samples:]
+        if filtered_rssi_values:
+            connected_rssi[key]['mean'] = statistics.mean(filtered_rssi_values)
+            if len(filtered_rssi_values) > 1:
+                connected_rssi[key]['stdev'] = statistics.stdev(
+                    filtered_rssi_values)
+            else:
+                connected_rssi[key]['stdev'] = 0
+        else:
+            connected_rssi[key]['mean'] = RSSI_ERROR_VAL
+            connected_rssi[key]['stdev'] = RSSI_ERROR_VAL
+    return connected_rssi
+
+
+def get_scan_rssi(dut, tracked_bssids, num_measurements=1):
+    scan_rssi = collections.OrderedDict()
+    for bssid in tracked_bssids:
+        scan_rssi[bssid] = empty_rssi_result()
+    for idx in range(num_measurements):
+        scan_output = dut.adb.shell(SCAN)
+        time.sleep(MED_SLEEP)
+        scan_output = dut.adb.shell(SCAN_RESULTS)
+        for bssid in tracked_bssids:
+            bssid_result = re.search(bssid + '.*',
+                                     scan_output,
+                                     flags=re.IGNORECASE)
+            if bssid_result:
+                bssid_result = bssid_result.group(0).split('\t')
+                scan_rssi[bssid]['data'].append(int(bssid_result[2]))
+            else:
+                scan_rssi[bssid]['data'].append(RSSI_ERROR_VAL)
+    # Compute mean RSSIs. Only average valid readings.
+    # Output RSSI_ERROR_VAL if no readings found.
+    for key, val in scan_rssi.items():
+        filtered_rssi_values = [x for x in val['data'] if not math.isnan(x)]
+        if filtered_rssi_values:
+            scan_rssi[key]['mean'] = statistics.mean(filtered_rssi_values)
+            if len(filtered_rssi_values) > 1:
+                scan_rssi[key]['stdev'] = statistics.stdev(
+                    filtered_rssi_values)
+            else:
+                scan_rssi[key]['stdev'] = 0
+        else:
+            scan_rssi[key]['mean'] = RSSI_ERROR_VAL
+            scan_rssi[key]['stdev'] = RSSI_ERROR_VAL
+    return scan_rssi
+
+
+def get_sw_signature(dut):
+    bdf_output = dut.adb.shell('cksum /vendor/firmware/bdwlan*')
+    logging.debug('BDF Checksum output: {}'.format(bdf_output))
+    bdf_signature = sum(
+        [int(line.split(' ')[0]) for line in bdf_output.splitlines()]) % 1000
+
+    fw_output = dut.adb.shell('halutil -logger -get fw')
+    logging.debug('Firmware version output: {}'.format(fw_output))
+    fw_version = re.search(FW_REGEX, fw_output).group('firmware')
+    fw_signature = fw_version.split('.')[-3:-1]
+    fw_signature = float('.'.join(fw_signature))
+    serial_hash = int(hashlib.md5(dut.serial.encode()).hexdigest(), 16) % 1000
+    return {
+        'config_signature': bdf_signature,
+        'fw_signature': fw_signature,
+        'serial_hash': serial_hash
+    }
+
+
+def get_country_code(dut):
+    country_code = dut.adb.shell('iw reg get | grep country | head -1')
+    country_code = country_code.split(':')[0].split(' ')[1]
+    if country_code == '00':
+        country_code = 'WW'
+    return country_code
+
+
+def push_config(dut, config_file):
+    config_files_list = dut.adb.shell(
+        'ls /vendor/firmware/bdwlan*').splitlines()
+    for dst_file in config_files_list:
+        dut.push_system_file(config_file, dst_file)
+    dut.reboot()
+
+
+def start_wifi_logging(dut):
+    dut.droid.wifiEnableVerboseLogging(1)
+    msg = "Failed to enable WiFi verbose logging."
+    asserts.assert_equal(dut.droid.wifiGetVerboseLoggingLevel(), 1, msg)
+    logging.info('Starting CNSS logs')
+    dut.adb.shell("find /data/vendor/wifi/wlan_logs/ -type f -delete",
+                  ignore_status=True)
+    dut.adb.shell_nb('cnss_diag -f -s')
+
+
+def stop_wifi_logging(dut):
+    logging.info('Stopping CNSS logs')
+    dut.adb.shell('killall cnss_diag')
+    logs = dut.get_file_names("/data/vendor/wifi/wlan_logs/")
+    if logs:
+        dut.log.info("Pulling cnss_diag logs %s", logs)
+        log_path = os.path.join(dut.device_log_path,
+                                "CNSS_DIAG_%s" % dut.serial)
+        os.makedirs(log_path, exist_ok=True)
+        dut.pull_files(logs, log_path)
+
+
+def push_firmware(dut, firmware_files):
+    """Function to push Wifi firmware files
+
+    Args:
+        dut: dut to push bdf file to
+        firmware_files: path to wlanmdsp.mbn file
+        datamsc_file: path to Data.msc file
+    """
+    for file in firmware_files:
+        dut.push_system_file(file, '/vendor/firmware/')
+    dut.reboot()
+
+
+def _set_ini_fields(ini_file_path, ini_field_dict):
+    template_regex = r'^{}=[0-9,.x-]+'
+    with open(ini_file_path, 'r') as f:
+        ini_lines = f.read().splitlines()
+        for idx, line in enumerate(ini_lines):
+            for field_name, field_value in ini_field_dict.items():
+                line_regex = re.compile(template_regex.format(field_name))
+                if re.match(line_regex, line):
+                    ini_lines[idx] = '{}={}'.format(field_name, field_value)
+                    print(ini_lines[idx])
+    with open(ini_file_path, 'w') as f:
+        f.write('\n'.join(ini_lines) + '\n')
+
+
+def _edit_dut_ini(dut, ini_fields):
+    """Function to edit Wifi ini files."""
+    dut_ini_path = '/vendor/firmware/wlan/qca_cld/WCNSS_qcom_cfg.ini'
+    local_ini_path = os.path.expanduser('~/WCNSS_qcom_cfg.ini')
+    dut.pull_files(dut_ini_path, local_ini_path)
+
+    _set_ini_fields(local_ini_path, ini_fields)
+
+    dut.push_system_file(local_ini_path, dut_ini_path)
+    dut.reboot()
+
+
+def set_chain_mask(dut, chain_mask):
+    curr_mask = getattr(dut, 'chain_mask', '2x2')
+    if curr_mask == chain_mask:
+        return
+    dut.chain_mask = chain_mask
+    if chain_mask == '2x2':
+        ini_fields = {
+            'gEnable2x2': 2,
+            'gSetTxChainmask1x1': 1,
+            'gSetRxChainmask1x1': 1,
+            'gDualMacFeatureDisable': 6,
+            'gDot11Mode': 0
+        }
+    else:
+        ini_fields = {
+            'gEnable2x2': 0,
+            'gSetTxChainmask1x1': chain_mask + 1,
+            'gSetRxChainmask1x1': chain_mask + 1,
+            'gDualMacFeatureDisable': 1,
+            'gDot11Mode': 0
+        }
+    _edit_dut_ini(dut, ini_fields)
+
+
+def set_wifi_mode(dut, mode):
+    TX_MODE_DICT = {
+        'Auto': 0,
+        '11n': 4,
+        '11ac': 9,
+        '11abg': 1,
+        '11b': 2,
+        '11': 3,
+        '11g only': 5,
+        '11n only': 6,
+        '11b only': 7,
+        '11ac only': 8
+    }
+
+    ini_fields = {
+        'gEnable2x2': 2,
+        'gSetTxChainmask1x1': 1,
+        'gSetRxChainmask1x1': 1,
+        'gDualMacFeatureDisable': 6,
+        'gDot11Mode': TX_MODE_DICT[mode]
+    }
+    _edit_dut_ini(dut, ini_fields)
+
+
+class LinkLayerStats():
+
+    LLSTATS_CMD = 'cat /d/wlan0/ll_stats'
+    PEER_REGEX = 'LL_STATS_PEER_ALL'
+    MCS_REGEX = re.compile(
+        r'preamble: (?P<mode>\S+), nss: (?P<num_streams>\S+), bw: (?P<bw>\S+), '
+        'mcs: (?P<mcs>\S+), bitrate: (?P<rate>\S+), txmpdu: (?P<txmpdu>\S+), '
+        'rxmpdu: (?P<rxmpdu>\S+), mpdu_lost: (?P<mpdu_lost>\S+), '
+        'retries: (?P<retries>\S+), retries_short: (?P<retries_short>\S+), '
+        'retries_long: (?P<retries_long>\S+)')
+    MCS_ID = collections.namedtuple(
+        'mcs_id', ['mode', 'num_streams', 'bandwidth', 'mcs', 'rate'])
+    MODE_MAP = {'0': '11a/g', '1': '11b', '2': '11n', '3': '11ac'}
+    BW_MAP = {'0': 20, '1': 40, '2': 80}
+
+    def __init__(self, dut, llstats_enabled=True):
+        self.dut = dut
+        self.llstats_enabled = llstats_enabled
+        self.llstats_cumulative = self._empty_llstats()
+        self.llstats_incremental = self._empty_llstats()
+
+    def update_stats(self):
+        if self.llstats_enabled:
+            try:
+                llstats_output = self.dut.adb.shell(self.LLSTATS_CMD,
+                                                    timeout=0.1)
+            except:
+                llstats_output = ''
+        else:
+            llstats_output = ''
+        self._update_stats(llstats_output)
+
+    def reset_stats(self):
+        self.llstats_cumulative = self._empty_llstats()
+        self.llstats_incremental = self._empty_llstats()
+
+    def _empty_llstats(self):
+        return collections.OrderedDict(mcs_stats=collections.OrderedDict(),
+                                       summary=collections.OrderedDict())
+
+    def _empty_mcs_stat(self):
+        return collections.OrderedDict(txmpdu=0,
+                                       rxmpdu=0,
+                                       mpdu_lost=0,
+                                       retries=0,
+                                       retries_short=0,
+                                       retries_long=0)
+
+    def _mcs_id_to_string(self, mcs_id):
+        mcs_string = '{} {}MHz Nss{} MCS{} {}Mbps'.format(
+            mcs_id.mode, mcs_id.bandwidth, mcs_id.num_streams, mcs_id.mcs,
+            mcs_id.rate)
+        return mcs_string
+
+    def _parse_mcs_stats(self, llstats_output):
+        llstats_dict = {}
+        # Look for per-peer stats
+        match = re.search(self.PEER_REGEX, llstats_output)
+        if not match:
+            self.reset_stats()
+            return collections.OrderedDict()
+        # Find and process all matches for per stream stats
+        match_iter = re.finditer(self.MCS_REGEX, llstats_output)
+        for match in match_iter:
+            current_mcs = self.MCS_ID(self.MODE_MAP[match.group('mode')],
+                                      int(match.group('num_streams')) + 1,
+                                      self.BW_MAP[match.group('bw')],
+                                      int(match.group('mcs')),
+                                      int(match.group('rate'), 16) / 1000)
+            current_stats = collections.OrderedDict(
+                txmpdu=int(match.group('txmpdu')),
+                rxmpdu=int(match.group('rxmpdu')),
+                mpdu_lost=int(match.group('mpdu_lost')),
+                retries=int(match.group('retries')),
+                retries_short=int(match.group('retries_short')),
+                retries_long=int(match.group('retries_long')))
+            llstats_dict[self._mcs_id_to_string(current_mcs)] = current_stats
+        return llstats_dict
+
+    def _diff_mcs_stats(self, new_stats, old_stats):
+        stats_diff = collections.OrderedDict()
+        for stat_key in new_stats.keys():
+            stats_diff[stat_key] = new_stats[stat_key] - old_stats[stat_key]
+        return stats_diff
+
+    def _generate_stats_summary(self, llstats_dict):
+        llstats_summary = collections.OrderedDict(common_tx_mcs=None,
+                                                  common_tx_mcs_count=0,
+                                                  common_tx_mcs_freq=0,
+                                                  common_rx_mcs=None,
+                                                  common_rx_mcs_count=0,
+                                                  common_rx_mcs_freq=0,
+                                                  rx_per=float('nan'))
+
+        txmpdu_count = 0
+        rxmpdu_count = 0
+        for mcs_id, mcs_stats in llstats_dict['mcs_stats'].items():
+            if mcs_stats['txmpdu'] > llstats_summary['common_tx_mcs_count']:
+                llstats_summary['common_tx_mcs'] = mcs_id
+                llstats_summary['common_tx_mcs_count'] = mcs_stats['txmpdu']
+            if mcs_stats['rxmpdu'] > llstats_summary['common_rx_mcs_count']:
+                llstats_summary['common_rx_mcs'] = mcs_id
+                llstats_summary['common_rx_mcs_count'] = mcs_stats['rxmpdu']
+            txmpdu_count += mcs_stats['txmpdu']
+            rxmpdu_count += mcs_stats['rxmpdu']
+        if txmpdu_count:
+            llstats_summary['common_tx_mcs_freq'] = (
+                llstats_summary['common_tx_mcs_count'] / txmpdu_count)
+        if rxmpdu_count:
+            llstats_summary['common_rx_mcs_freq'] = (
+                llstats_summary['common_rx_mcs_count'] / rxmpdu_count)
+        return llstats_summary
+
+    def _update_stats(self, llstats_output):
+        # Parse stats
+        new_llstats = self._empty_llstats()
+        new_llstats['mcs_stats'] = self._parse_mcs_stats(llstats_output)
+        # Save old stats and set new cumulative stats
+        old_llstats = self.llstats_cumulative.copy()
+        self.llstats_cumulative = new_llstats.copy()
+        # Compute difference between new and old stats
+        self.llstats_incremental = self._empty_llstats()
+        for mcs_id, new_mcs_stats in new_llstats['mcs_stats'].items():
+            old_mcs_stats = old_llstats['mcs_stats'].get(
+                mcs_id, self._empty_mcs_stat())
+            self.llstats_incremental['mcs_stats'][
+                mcs_id] = self._diff_mcs_stats(new_mcs_stats, old_mcs_stats)
+        # Generate llstats summary
+        self.llstats_incremental['summary'] = self._generate_stats_summary(
+            self.llstats_incremental)
+        self.llstats_cumulative['summary'] = self._generate_stats_summary(
+            self.llstats_cumulative)
diff --git a/acts_tests/acts_contrib/test_utils/wifi/wifi_power_test_utils.py b/acts_tests/acts_contrib/test_utils/wifi/wifi_power_test_utils.py
index b1565d2..5c607ca 100644
--- a/acts_tests/acts_contrib/test_utils/wifi/wifi_power_test_utils.py
+++ b/acts_tests/acts_contrib/test_utils/wifi/wifi_power_test_utils.py
@@ -20,6 +20,7 @@
 from acts.libs.proc import job
 from acts.controllers.ap_lib import bridge_interface as bi
 from acts_contrib.test_utils.wifi import wifi_test_utils as wutils
+from acts.controllers.adb_lib.error import AdbCommandError
 from acts.controllers.ap_lib import hostapd_security
 from acts.controllers.ap_lib import hostapd_ap_preset
 
@@ -41,8 +42,23 @@
         gEnableModulatedDTIM: Modulated DTIM, int
         gMaxLIModulatedDTIM: Maximum modulated DTIM, int
     """
-    # First trying to find the ini file with DTIM settings
-    ini_file_phone = ad.adb.shell('ls /vendor/firmware/wlan/*/*.ini')
+    ad.log.info('Sets dtim to {}'.format(gEnableModulatedDTIM))
+
+    # In P21 the dtim setting method changed and an AdbCommandError will take
+    # place to get ini_file_phone. Thus add try/except block for the old method.
+    # If error occurs, use change_dtim_adb method later. Otherwise, first trying
+    # to find the ini file with DTIM settings
+    try:
+        ini_file_phone = ad.adb.shell('ls /vendor/firmware/wlan/*/*.ini')
+
+    except AdbCommandError as e:
+
+        # Gets AdbCommandError, change dtim later with change_dtim_adb merthod.
+        # change_dtim_adb requires that wifi connection is on.
+        ad.log.info('Gets AdbCommandError, change dtim with change_dtim_adb.')
+        change_dtim_adb(ad, gEnableModulatedDTIM)
+        return 0
+
     ini_file_local = ini_file_phone.split('/')[-1]
 
     # Pull the file and change the DTIM to desired value
@@ -81,6 +97,59 @@
     ad.log.info('DTIM updated and device back from reboot')
     return 1
 
+def change_dtim_adb(ad, gEnableModulatedDTIM):
+    """Function to change the DTIM setting in the P21 phone.
+
+        This method should be run after connecting wifi.
+
+    Args:
+        ad: the target android device, AndroidDevice object
+        gEnableModulatedDTIM: Modulated DTIM, int
+    """
+    ad.log.info('Changes DTIM to {} with adb'.format(gEnableModulatedDTIM))
+    ad.adb.root()
+    screen_status = ad.adb.shell('dumpsys nfc | grep Screen')
+    screen_is_on = 'ON_UNLOCKED' in screen_status
+
+    # To read the dtim with 'adb shell wl bcn_li_dtim', the screen should be off
+    if screen_is_on:
+        ad.log.info('The screen is on. Set it to off before change dtim')
+        ad.droid.goToSleepNow()
+        time_limit_seconds = 60
+        _wait_screen_off(ad, time_limit_seconds)
+
+    old_dtim = ad.adb.shell('wl bcn_li_dtim')
+    ad.log.info('The dtim before change is {}'.format(old_dtim))
+    if int(old_dtim) == gEnableModulatedDTIM:
+        ad.log.info('Current DTIM is already the desired value,'
+                    'no need to reset it')
+        if screen_is_on:
+            ad.log.info('Changes the screen to the original on status')
+            ad.droid.wakeUpNow()
+        return
+    current_dtim = _set_dtim(ad, gEnableModulatedDTIM)
+    ad.log.info(
+        'Old DTIM is {}, current DTIM is {}'.format(old_dtim, current_dtim))
+    if screen_is_on:
+        ad.log.info('Changes the screen to the original on status')
+        ad.droid.wakeUpNow()
+
+def _set_dtim(ad, gEnableModulatedDTIM):
+    ad.adb.shell("halutil -dtim_config {}".format(gEnableModulatedDTIM))
+    return ad.adb.shell('wl bcn_li_dtim')
+
+
+def _wait_screen_off(ad, time_limit_seconds):
+    while time_limit_seconds > 0:
+        screen_status = ad.adb.shell('dumpsys nfc | grep Screen')
+        if 'OFF_UNLOCKED' in screen_status:
+            ad.log.info('The screen status is {}'.format(screen_status))
+            return
+        time.sleep(1)
+        time_limit_seconds -= 1
+    raise TimeoutError('Timed out while waiting the screen off after {} '
+                       'seconds.'.format(time_limit_seconds))
+
 
 def push_file_to_phone(ad, file_local, file_phone):
     """Function to push local file to android phone.
@@ -104,7 +173,7 @@
     ad.adb.push('{} {}'.format(file_local, file_phone))
 
 
-def ap_setup(ap, network, bandwidth=80):
+def ap_setup(ap, network, bandwidth=80, dtim_period=None):
     """Set up the whirlwind AP with provided network info.
 
     Args:
@@ -112,6 +181,7 @@
         network: dict with information of the network, including ssid, password
                  bssid, channel etc.
         bandwidth: the operation bandwidth for the AP, default 80MHz
+        dtim_period: the dtim period of access point
     Returns:
         brconfigs: the bridge interface configs
     """
@@ -128,6 +198,7 @@
     config = hostapd_ap_preset.create_ap_preset(
         channel=channel,
         ssid=ssid,
+        dtim_period=dtim_period,
         security=security,
         bss_settings=bss_settings,
         vht_bandwidth=bandwidth,
diff --git a/acts_tests/acts_contrib/test_utils/wifi/wifi_retail_ap/__init__.py b/acts_tests/acts_contrib/test_utils/wifi/wifi_retail_ap/__init__.py
index 572a238..16f7a1d 100644
--- a/acts_tests/acts_contrib/test_utils/wifi/wifi_retail_ap/__init__.py
+++ b/acts_tests/acts_contrib/test_utils/wifi/wifi_retail_ap/__init__.py
@@ -51,8 +51,8 @@
             'package': 'netgear_r7500'
         },
         ('Netgear', 'R7500NA'): {
-        'name': 'NetgearR7500NAAP',
-        'package': 'netgear_r7500'
+            'name': 'NetgearR7500NAAP',
+            'package': 'netgear_r7500'
         },
         ('Netgear', 'R7800'): {
             'name': 'NetgearR7800AP',
@@ -66,13 +66,21 @@
             'name': 'NetgearRAX80AP',
             'package': 'netgear_rax80'
         },
+        ('Netgear', 'RAX120'): {
+            'name': 'NetgearRAX120AP',
+            'package': 'netgear_rax120'
+        },
         ('Netgear', 'RAX200'): {
             'name': 'NetgearRAX200AP',
             'package': 'netgear_rax200'
         },
-        ('Netgear', 'RAX120'): {
-            'name': 'NetgearRAX120AP',
-            'package': 'netgear_rax120'
+        ('Netgear', 'RAXE500'): {
+            'name': 'NetgearRAXE500AP',
+            'package': 'netgear_raxe500'
+        },
+        ('Brcm', 'Reference'): {
+            'name': 'BrcmRefAP',
+            'package': 'brcm_ref'
         },
         ('Google', 'Wifi'): {
             'name': 'GoogleWifiAP',
@@ -257,7 +265,8 @@
 
     def teardown(self):
         """Function to perform destroy operations."""
-        self._unlock_ap()
+        if self.ap_settings.get('lock_ap', 0):
+            self._unlock_ap()
 
     def reset(self):
         """Function that resets AP.
@@ -316,7 +325,9 @@
         Args:
             region: string indicating AP region
         """
-        self.log.warning('Updating region may overwrite wireless settings.')
+        if region != self.ap_settings['region']:
+            self.log.warning(
+                'Updating region may overwrite wireless settings.')
         setting_to_update = {'region': region}
         self.update_ap_settings(setting_to_update)
 
@@ -350,7 +361,7 @@
         if channel not in self.capabilities['channels'][network]:
             self.log.error('Ch{} is not supported on {} interface.'.format(
                 channel, network))
-        setting_to_update = {network: {'channel': str(channel)}}
+        setting_to_update = {network: {'channel': channel}}
         self.update_ap_settings(setting_to_update)
 
     def set_bandwidth(self, network, bandwidth):
@@ -363,10 +374,39 @@
         if 'bw' in bandwidth:
             bandwidth = bandwidth.replace('bw',
                                           self.capabilities['default_mode'])
+        elif isinstance(bandwidth, int):
+            bandwidth = str(bandwidth) + self.capabilities['default_mode']
         if bandwidth not in self.capabilities['modes'][network]:
             self.log.error('{} mode is not supported on {} interface.'.format(
                 bandwidth, network))
-        setting_to_update = {network: {'bandwidth': str(bandwidth)}}
+        setting_to_update = {network: {'bandwidth': bandwidth}}
+        self.update_ap_settings(setting_to_update)
+
+    def set_channel_and_bandwidth(self, network, channel, bandwidth):
+        """Function that sets network bandwidth/mode and channel.
+
+        Args:
+            network: string containing network identifier (2G, 5G_1, 5G_2)
+            channel: string containing desired channel
+            bandwidth: string containing mode, e.g. 11g, VHT20, VHT40, VHT80.
+        """
+        if 'bw' in bandwidth:
+            bandwidth = bandwidth.replace('bw',
+                                          self.capabilities['default_mode'])
+        elif isinstance(bandwidth, int):
+            bandwidth = str(bandwidth) + self.capabilities['default_mode']
+        if bandwidth not in self.capabilities['modes'][network]:
+            self.log.error('{} mode is not supported on {} interface.'.format(
+                bandwidth, network))
+        if channel not in self.capabilities['channels'][network]:
+            self.log.error('Ch{} is not supported on {} interface.'.format(
+                channel, network))
+        setting_to_update = {
+            network: {
+                'bandwidth': bandwidth,
+                'channel': channel
+            }
+        }
         self.update_ap_settings(setting_to_update)
 
     def set_power(self, network, power):
@@ -379,7 +419,7 @@
         if 'power' not in self.ap_settings[network].keys():
             self.log.error(
                 'Cannot configure power on {} interface.'.format(network))
-        setting_to_update = {network: {'power': str(power)}}
+        setting_to_update = {network: {'power': power}}
         self.update_ap_settings(setting_to_update)
 
     def set_security(self, network, security_type, *password):
@@ -463,12 +503,13 @@
         Args:
             channel: channel number to lookup
         Returns:
-            band: name of band which this channel belongs to on this ap
+            band: name of band which this channel belongs to on this ap, False
+            if not supported
         """
         for key, value in self.capabilities['channels'].items():
             if channel in value:
                 return key
-        raise ValueError('Invalid channel passed in argument.')
+        return False
 
     def _get_control_ip_address(self):
         """Function to get AP's Control Interface IP address."""
@@ -502,5 +543,9 @@
         """Function to unlock the AP when tests are done."""
         self.log.info('Releasing AP lock.')
         if hasattr(self, 'lock_file'):
-            fcntl.flock(self.lock_file, fcntl.LOCK_UN)
-            self.lock_file.close()
+            try:
+                fcntl.flock(self.lock_file, fcntl.LOCK_UN)
+                self.lock_file.close()
+                self.log.info('Succussfully released AP lock file.')
+            except:
+                raise RuntimeError('Error occurred while unlocking AP.')
\ No newline at end of file
diff --git a/acts_tests/acts_contrib/test_utils/wifi/wifi_retail_ap/brcm_ref.py b/acts_tests/acts_contrib/test_utils/wifi/wifi_retail_ap/brcm_ref.py
new file mode 100644
index 0000000..1b09533
--- /dev/null
+++ b/acts_tests/acts_contrib/test_utils/wifi/wifi_retail_ap/brcm_ref.py
@@ -0,0 +1,243 @@
+import collections
+import numpy
+import time
+from acts_contrib.test_utils.wifi.wifi_retail_ap import WifiRetailAP
+from acts_contrib.test_utils.wifi.wifi_retail_ap import BlockingBrowser
+
+BROWSER_WAIT_SHORT = 1
+BROWSER_WAIT_MED = 3
+BROWSER_WAIT_LONG = 10
+BROWSER_WAIT_EXTRA_LONG = 60
+
+
+class BrcmRefAP(WifiRetailAP):
+    """Class that implements Netgear RAX200 AP.
+
+    Since most of the class' implementation is shared with the R7000, this
+    class inherits from NetgearR7000AP and simply redefines config parameters
+    """
+    def __init__(self, ap_settings):
+        super().__init__(ap_settings)
+        self.init_gui_data()
+        # Read and update AP settings
+        self.read_ap_settings()
+        self.update_ap_settings(ap_settings)
+
+    def init_gui_data(self):
+        self.config_page = ('{protocol}://{username}:{password}@'
+                            '{ip_address}:{port}/info.html').format(
+                                protocol=self.ap_settings['protocol'],
+                                username=self.ap_settings['admin_username'],
+                                password=self.ap_settings['admin_password'],
+                                ip_address=self.ap_settings['ip_address'],
+                                port=self.ap_settings['port'])
+        self.config_page_nologin = (
+            '{protocol}://{ip_address}:{port}/'
+            'wlrouter/radio.asp').format(
+                protocol=self.ap_settings['protocol'],
+                ip_address=self.ap_settings['ip_address'],
+                port=self.ap_settings['port'])
+
+        self.capabilities = {
+            'interfaces': ['2G_5G', '6G'],
+            'channels': {
+                '2G_5G': [
+                    1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 36, 40, 44, 48, 52, 56,
+                    60, 64, 100, 104, 108, 112, 116, 120, 124, 128, 132, 136,
+                    140, 144, 149, 153, 157, 161, 165
+                ],
+                '6G': ['6g' + str(ch) for ch in numpy.arange(1, 222, 4)]
+            },
+            'modes': {
+                '2G_5G': [
+                    'VHT20', 'VHT40', 'VHT80', 'VHT160', 'HE20', 'HE40',
+                    'HE80', 'HE160'
+                ],
+                '6G': [
+                    'VHT20', 'VHT40', 'VHT80', 'VHT160', 'HE20', 'HE40',
+                    'HE80', 'HE160'
+                ]
+            },
+            'default_mode': 'HE'
+        }
+        self.ap_settings['region'] = 'United States'
+        for interface in self.capabilities['interfaces']:
+            self.ap_settings[interface] = {
+                'ssid': 'BrcmAP0' if interface == '6G' else 'BrcmAP1',
+                'security_type': 'Open',
+                'password': '1234567890'
+            }
+        self.config_page_fields = collections.OrderedDict({
+            ('2G_5G', 'interface'): ('wl_unit', 1),
+            ('2G_5G', 'band'):
+            'wl_nband',
+            ('2G_5G', 'bandwidth'):
+            'wl_bw_cap',
+            ('2G_5G', 'channel'):
+            'wl_chanspec',
+            ('6G', 'interface'): ('wl_unit', 0),
+            ('6G', 'band'):
+            'wl_nband',
+            ('6G', 'bandwidth'):
+            'wl_bw_cap',
+            ('6G', 'channel'):
+            'wl_chanspec',
+        })
+
+        self.band_mode_values = {'1': '5 GHz', '2': '2.4 GHz', '4': '6 GHz'}
+
+        self.band_values = {'5 GHz': 1, '2.4 GHz': 2, '6 GHz': 4}
+
+        self.bandwidth_mode_values = {
+            '1': 'HE20',
+            '3': 'HE40',
+            '7': 'HE80',
+            '15': 'HE160'
+        }
+
+    def _decode_channel_string(self, channel_string):
+        if channel_string == '0':
+            return 'Auto'
+        if 'u' in channel_string or 'l' in channel_string:
+            channel_string = channel_string[0:-1]
+        elif len(channel_string.split('/')) > 1:
+            channel_string = channel_string.split('/')[0]
+        if '6g' in channel_string:
+            return channel_string
+        else:
+            return int(channel_string)
+
+    def _get_channel_str(self, interface, channel, bandwidth):
+        bandwidth = int(''.join([x for x in bandwidth if x.isdigit()]))
+        if bandwidth == 20:
+            channel_str = str(channel)
+        elif bandwidth in [80, 160]:
+            channel_str = str(channel) + '/' + str(bandwidth)
+        elif interface == '6G' and bandwidth == 40:
+            channel_str = str(channel) + '/' + str(bandwidth)
+        elif interface == '2G_5G' and bandwidth == 40:
+            lower_lookup = [
+                36, 44, 52, 60, 100, 108, 116, 124, 132, 140, 149, 157
+            ]
+            if int(channel) in lower_lookup:
+                channel_str = str(channel) + 'l'
+            else:
+                channel_str = str(channel) + 'u'
+        return channel_str
+
+    def read_ap_settings(self):
+        with BlockingBrowser(self.ap_settings['headless_browser'],
+                             900) as browser:
+            # Visit URL
+            browser.visit_persistent(self.config_page, BROWSER_WAIT_MED, 10)
+            browser.visit_persistent(self.config_page_nologin,
+                                     BROWSER_WAIT_MED, 10, self.config_page)
+
+            for key in self.config_page_fields.keys():
+                if 'interface' in key:
+                    config_item = browser.find_by_name(
+                        self.config_page_fields[key][0]).first
+                    config_item.select(self.config_page_fields[key][1])
+                    time.sleep(BROWSER_WAIT_SHORT)
+                else:
+                    config_item = browser.find_by_name(
+                        self.config_page_fields[key]).first
+                    if 'band' in key:
+                        self.ap_settings[key[0]][
+                            key[1]] = self.band_mode_values[config_item.value]
+                    elif 'bandwidth' in key:
+                        self.ap_settings[key[0]][key[
+                            1]] = self.bandwidth_mode_values[config_item.value]
+                    elif 'channel' in key:
+                        self.ap_settings[key[0]][
+                            key[1]] = self._decode_channel_string(
+                                config_item.value)
+                    else:
+                        self.ap_settings[key[0]][key[1]] = config_item.value
+
+    def update_ap_settings(self, dict_settings={}, **named_settings):
+        """Function to update settings of existing AP.
+
+        Function copies arguments into ap_settings and calls configure_ap
+        to apply them.
+
+        Args:
+            dict_settings: single dictionary of settings to update
+            **named_settings: named settings to update
+            Note: dict and named_settings cannot contain the same settings.
+        """
+
+        settings_to_update = dict(dict_settings, **named_settings)
+        if len(settings_to_update) != len(dict_settings) + len(named_settings):
+            raise KeyError('The following keys were passed twice: {}'.format(
+                (set(dict_settings.keys()).intersection(
+                    set(named_settings.keys())))))
+
+        updating_6G = '6G' in settings_to_update.keys()
+        updating_2G_5G = '2G_5G' in settings_to_update.keys()
+
+        if updating_2G_5G:
+            if 'channel' in settings_to_update['2G_5G']:
+                band = '2.4 GHz' if int(
+                    settings_to_update['2G_5G']['channel']) < 13 else '5 GHz'
+                if band == '2.4 GHz':
+                    settings_to_update['2G_5G']['bandwidth'] = 'HE20'
+                settings_to_update['2G_5G']['band'] = band
+        self.ap_settings, updates_requested, status_toggle_flag = self._update_settings_dict(
+            self.ap_settings, settings_to_update)
+        if updates_requested:
+            self.configure_ap(updating_2G_5G, updating_6G)
+
+    def configure_ap(self, updating_2G_5G, updating_6G):
+
+        with BlockingBrowser(self.ap_settings['headless_browser'],
+                             900) as browser:
+
+            interfaces_to_update = []
+            if updating_2G_5G:
+                interfaces_to_update.append('2G_5G')
+            if updating_6G:
+                interfaces_to_update.append('6G')
+            for interface in interfaces_to_update:
+                # Visit URL
+                browser.visit_persistent(self.config_page, BROWSER_WAIT_MED,
+                                         10)
+                browser.visit_persistent(self.config_page_nologin,
+                                         BROWSER_WAIT_MED, 10,
+                                         self.config_page)
+
+                config_item = browser.find_by_name(
+                    self.config_page_fields[(interface, 'interface')][0]).first
+                config_item.select(self.config_page_fields[(interface,
+                                                            'interface')][1])
+                time.sleep(BROWSER_WAIT_SHORT)
+
+                for key, value in self.config_page_fields.items():
+                    if 'interface' in key or interface not in key:
+                        continue
+                    config_item = browser.find_by_name(
+                        self.config_page_fields[key]).first
+                    if 'band' in key:
+                        config_item.select(
+                            self.band_values[self.ap_settings[key[0]][key[1]]])
+                    elif 'bandwidth' in key:
+                        config_item.select_by_text(
+                            str(self.ap_settings[key[0]][key[1]])[2:] + ' MHz')
+                    elif 'channel' in key:
+                        channel_str = self._get_channel_str(
+                            interface, self.ap_settings[interface][key[1]],
+                            self.ap_settings[interface]['bandwidth'])
+                        config_item.select_by_text(channel_str)
+                    else:
+                        self.ap_settings[key[0]][key[1]] = config_item.value
+                    time.sleep(BROWSER_WAIT_SHORT)
+                # Apply
+                config_item = browser.find_by_name('action')
+                config_item.first.click()
+                time.sleep(BROWSER_WAIT_MED)
+                config_item = browser.find_by_name('action')
+                time.sleep(BROWSER_WAIT_SHORT)
+                config_item.first.click()
+                time.sleep(BROWSER_WAIT_LONG)
+                browser.visit_persistent(self.config_page, BROWSER_WAIT_LONG,
+                                     10)
diff --git a/acts_tests/acts_contrib/test_utils/wifi/wifi_retail_ap/google_wifi.py b/acts_tests/acts_contrib/test_utils/wifi/wifi_retail_ap/google_wifi.py
index dd4ee9f..4023b9a 100644
--- a/acts_tests/acts_contrib/test_utils/wifi/wifi_retail_ap/google_wifi.py
+++ b/acts_tests/acts_contrib/test_utils/wifi/wifi_retail_ap/google_wifi.py
@@ -93,6 +93,14 @@
                 'subnet': '192.168.9.0/24'
             }
         }
+        for setting in self.default_settings.keys():
+            if setting in self.capabilities['interfaces']:
+                continue
+            elif setting not in self.ap_settings:
+                self.log.debug(
+                    '{0} {1} not found during init. Setting {0} = {1}'.format(
+                        setting, self.default_settings[setting]))
+                self.ap_settings[setting] = self.default_settings[setting]
 
         for interface in self.capabilities['interfaces']:
             for setting in self.default_settings[interface].keys():
diff --git a/acts_tests/acts_contrib/test_utils/wifi/wifi_retail_ap/netgear_r7000.py b/acts_tests/acts_contrib/test_utils/wifi/wifi_retail_ap/netgear_r7000.py
index e7f4e83..ac118df 100644
--- a/acts_tests/acts_contrib/test_utils/wifi/wifi_retail_ap/netgear_r7000.py
+++ b/acts_tests/acts_contrib/test_utils/wifi/wifi_retail_ap/netgear_r7000.py
@@ -278,5 +278,5 @@
     """Class that implements Netgear R7000 NA AP."""
     def init_gui_data(self):
         """Function to initialize data used while interacting with web GUI"""
-        super.init_gui_data()
+        super().init_gui_data()
         self.region_map['11'] = 'North America'
diff --git a/acts_tests/acts_contrib/test_utils/wifi/wifi_retail_ap/netgear_rax120.py b/acts_tests/acts_contrib/test_utils/wifi/wifi_retail_ap/netgear_rax120.py
index e718ebd..d1420df 100644
--- a/acts_tests/acts_contrib/test_utils/wifi/wifi_retail_ap/netgear_rax120.py
+++ b/acts_tests/acts_contrib/test_utils/wifi/wifi_retail_ap/netgear_rax120.py
@@ -108,6 +108,12 @@
             '19': 'Russia',
             '20': 'Singapore',
             '21': 'Taiwan',
+            'Australia': 'Australia',
+            'Europe': 'Europe',
+            'Korea': 'Korea',
+            'Singapore': 'Singapore',
+            'Hong Kong': 'Hong Kong',
+            'United States': 'United States',
         }
         self.bw_mode_text = {
             '2G': {
@@ -120,12 +126,12 @@
             '5G_1': {
                 'HE20': 'Up to 1147 Mbps (11ax, HT20, 1024-QAM)',
                 'HE40': 'Up to 2294 Mbps (11ax, HT40, 1024-QAM)',
-                'HE80': 'Up to 4803 Mbps (11ax, HT80, 1024-QAM)',
-                'HE160': 'Up to 4803 Mbps (11ax, HT160, 1024-QAM)',
+                'HE80': 'Up to 4803 Mbps (80MHz) (11ax, HT80, 1024-QAM)',
+                'HE160': 'Up to 4803 Mbps (160MHz) (11ax, HT160, 1024-QAM)',
                 'VHT20': 'Up to 962 Mbps (11ac, HT20, 1024-QAM)',
                 'VHT40': 'Up to 2000 Mbps (11ac, HT40, 1024-QAM)',
-                'VHT80': 'Up to 4333 Mbps (11ac, HT80, 1024-QAM)',
-                'VHT160': 'Up to 4333 Mbps (11ac, HT160, 1024-QAM)'
+                'VHT80': 'Up to 4333 Mbps (80MHz) (11ac, HT80, 1024-QAM)',
+                'VHT160': 'Up to 4333 Mbps (160MHz) (11ac, HT160, 1024-QAM)'
             }
         }
         self.bw_mode_values = {
@@ -146,7 +152,14 @@
                 '7': 'HE20',
                 '8': 'HE40',
                 '9': 'HE80',
-                '10': 'HE160'
+                '10': 'HE160',
+                '54': '11g',
+                '573.5': 'HE20',
+                '1146': 'HE40',
+                '1147': 'HE20',
+                '2294': 'HE40',
+                '4803-HT80': 'HE80',
+                '4803-HT160': 'HE160'
             }
         }
         self.security_mode_values = {
@@ -160,6 +173,55 @@
             }
         }
 
+    def _set_channel_and_bandwidth(self,
+                                   network,
+                                   channel=None,
+                                   bandwidth=None):
+        """Helper function that sets network bandwidth and channel.
+
+        Args:
+            network: string containing network identifier (2G, 5G_1, 5G_2)
+            channel: desired channel
+            bandwidth: string containing mode, e.g. 11g, VHT20, VHT40, VHT80.
+        """
+        setting_to_update = {network: {}}
+        if channel:
+            if channel not in self.capabilities['channels'][network]:
+                self.log.error('Ch{} is not supported on {} interface.'.format(
+                    channel, network))
+            setting_to_update[network]['channel'] = channel
+
+        if bandwidth is None:
+            return setting_to_update
+
+        if 'bw' in bandwidth:
+            bandwidth = bandwidth.replace('bw',
+                                          self.capabilities['default_mode'])
+        if bandwidth not in self.capabilities['modes'][network]:
+            self.log.error('{} mode is not supported on {} interface.'.format(
+                bandwidth, network))
+        setting_to_update[network]['bandwidth'] = str(bandwidth)
+        setting_to_update['enable_ax'] = int('HE' in bandwidth)
+        # Check if other interfaces need to be changed too
+        requested_mode = 'HE' if 'HE' in bandwidth else 'VHT'
+        for other_network in self.capabilities['interfaces']:
+            if other_network == network:
+                continue
+            other_mode = 'HE' if 'HE' in self.ap_settings[other_network][
+                'bandwidth'] else 'VHT'
+            other_bw = ''.join([
+                x for x in self.ap_settings[other_network]['bandwidth']
+                if x.isdigit()
+            ])
+            if other_mode != requested_mode:
+                updated_mode = '{}{}'.format(requested_mode, other_bw)
+                self.log.warning('All networks must be VHT or HE. '
+                                 'Updating {} to {}'.format(
+                                     other_network, updated_mode))
+                setting_to_update.setdefault(other_network, {})
+                setting_to_update[other_network]['bandwidth'] = updated_mode
+        return setting_to_update
+
     def set_bandwidth(self, network, bandwidth):
         """Function that sets network bandwidth/mode.
 
@@ -167,31 +229,31 @@
             network: string containing network identifier (2G, 5G_1, 5G_2)
             bandwidth: string containing mode, e.g. 11g, VHT20, VHT40, VHT80.
         """
-        if 'bw' in bandwidth:
-            bandwidth = bandwidth.replace('bw',
-                                          self.capabilities['default_mode'])
-        if bandwidth not in self.capabilities['modes'][network]:
-            self.log.error('{} mode is not supported on {} interface.'.format(
-                bandwidth, network))
-        setting_to_update = {network: {'bandwidth': str(bandwidth)}}
-        setting_to_update['enable_ax'] = int('HE' in bandwidth)
-        # Check if other interfaces need to be changed too
-        requested_mode = 'HE' if 'HE' in bandwidth else 'VHT'
-        other_network = '2G' if '5G_1' in network else '5G_1'
-        other_mode = 'HE' if 'HE' in self.ap_settings[other_network][
-            'bandwidth'] else 'VHT'
-        other_bw = ''.join([
-            x for x in self.ap_settings[other_network]['bandwidth']
-            if x.isdigit()
-        ])
-        if other_mode != requested_mode:
-            updated_mode = '{}{}'.format(requested_mode, other_bw)
-            self.log.warning('All networks must be VHT or HE. '
-                             'Updating {} to {}'.format(
-                                 other_network, updated_mode))
-            setting_to_update.setdefault(other_network, {})
-            setting_to_update[other_network]['bandwidth'] = updated_mode
+        setting_to_update = self._set_channel_and_bandwidth(
+            network, bandwidth=bandwidth)
+        self.update_ap_settings(setting_to_update)
 
+    def set_channel(self, network, channel):
+        """Function that sets network channel.
+
+        Args:
+            network: string containing network identifier (2G, 5G_1, 5G_2)
+            channel: string or int containing channel
+        """
+        setting_to_update = self._set_channel_and_bandwidth(network,
+                                                            channel=channel)
+        self.update_ap_settings(setting_to_update)
+
+    def set_channel_and_bandwidth(self, network, channel, bandwidth):
+        """Function that sets network bandwidth/mode.
+
+        Args:
+            network: string containing network identifier (2G, 5G_1, 5G_2)
+            channel: desired channel
+            bandwidth: string containing mode, e.g. 11g, VHT20, VHT40, VHT80.
+        """
+        setting_to_update = self._set_channel_and_bandwidth(
+            network, channel=channel, bandwidth=bandwidth)
         self.update_ap_settings(setting_to_update)
 
     def read_ap_settings(self):
@@ -236,9 +298,13 @@
                                 key[1]] = 'defaultpassword'
                             self.ap_settings[
                                 key[0]]['security_type'] = 'Disable'
-                    elif ('channel' in key) or ('ssid' in key):
+                    elif ('ssid' in key):
                         config_item = iframe.find_by_name(value).first
                         self.ap_settings[key[0]][key[1]] = config_item.value
+                    elif ('channel' in key):
+                        config_item = iframe.find_by_name(value).first
+                        self.ap_settings[key[0]][key[1]] = int(
+                            config_item.value)
         return self.ap_settings.copy()
 
     def configure_ap(self, **config_flags):
@@ -269,9 +335,13 @@
                         'enable_ax_chec')
                     action.move_to_element(ax_checkbox).click().perform()
                 # Update AP region. Must be done before channel setting
-                config_item = iframe.find_by_name(
-                    self.config_page_fields['region']).first
-                config_item.select_by_text(self.ap_settings['region'])
+                try:
+                    config_item = iframe.find_by_name(
+                        self.config_page_fields['region']).first
+                    config_item.select_by_text(self.ap_settings['region'])
+                except:
+                    self.log.warning('Could not set AP region to {}.'.format(
+                        self.ap_settings['region']))
                 # Update wireless settings for each network
                 for key, value in self.config_page_fields.items():
                     if 'ssid' in key:
diff --git a/acts_tests/acts_contrib/test_utils/wifi/wifi_retail_ap/netgear_rax200.py b/acts_tests/acts_contrib/test_utils/wifi/wifi_retail_ap/netgear_rax200.py
index 91c6382..d6c6fad 100644
--- a/acts_tests/acts_contrib/test_utils/wifi/wifi_retail_ap/netgear_rax200.py
+++ b/acts_tests/acts_contrib/test_utils/wifi/wifi_retail_ap/netgear_rax200.py
@@ -15,7 +15,6 @@
 #   limitations under the License.
 
 import collections
-import selenium
 import time
 from acts_contrib.test_utils.wifi.wifi_retail_ap import WifiRetailAP
 from acts_contrib.test_utils.wifi.wifi_retail_ap import BlockingBrowser
@@ -119,7 +118,7 @@
                 'VHT20': 'Up to 433 Mbps',
                 'VHT40': 'Up to 1000 Mbps',
                 'VHT80': 'Up to 2165 Mbps',
-                'VHT160': 'Up to 4330'
+                'VHT160': 'Up to 4330 Mbps'
             },
             '5G_2': {
                 'HE20': 'Up to 600 Mbps',
@@ -129,7 +128,7 @@
                 'VHT20': 'Up to 433 Mbps',
                 'VHT40': 'Up to 1000 Mbps',
                 'VHT80': 'Up to 2165 Mbps',
-                'VHT160': 'Up to 4330'
+                'VHT160': 'Up to 4330 Mbps'
             }
         }
         self.bw_mode_values = {
@@ -181,20 +180,34 @@
             '4': '25%'
         }
 
-    def set_bandwidth(self, network, bandwidth):
-        """Function that sets network bandwidth/mode.
+    def _set_channel_and_bandwidth(self,
+                                   network,
+                                   channel=None,
+                                   bandwidth=None):
+        """Helper function that sets network bandwidth and channel.
 
         Args:
             network: string containing network identifier (2G, 5G_1, 5G_2)
+            channel: desired channel
             bandwidth: string containing mode, e.g. 11g, VHT20, VHT40, VHT80.
         """
+        setting_to_update = {network: {}}
+        if channel:
+            if channel not in self.capabilities['channels'][network]:
+                self.log.error('Ch{} is not supported on {} interface.'.format(
+                    channel, network))
+            setting_to_update[network]['channel'] = channel
+
+        if bandwidth is None:
+            return setting_to_update
+
         if 'bw' in bandwidth:
             bandwidth = bandwidth.replace('bw',
                                           self.capabilities['default_mode'])
         if bandwidth not in self.capabilities['modes'][network]:
             self.log.error('{} mode is not supported on {} interface.'.format(
                 bandwidth, network))
-        setting_to_update = {network: {'bandwidth': str(bandwidth)}}
+        setting_to_update[network]['bandwidth'] = str(bandwidth)
         setting_to_update['enable_ax'] = int('HE' in bandwidth)
         # Check if other interfaces need to be changed too
         requested_mode = 'HE' if 'HE' in bandwidth else 'VHT'
@@ -214,7 +227,41 @@
                                      other_network, updated_mode))
                 setting_to_update.setdefault(other_network, {})
                 setting_to_update[other_network]['bandwidth'] = updated_mode
+        return setting_to_update
 
+    def set_bandwidth(self, network, bandwidth):
+        """Function that sets network bandwidth/mode.
+
+        Args:
+            network: string containing network identifier (2G, 5G_1, 5G_2)
+            bandwidth: string containing mode, e.g. 11g, VHT20, VHT40, VHT80.
+        """
+
+        setting_to_update = self._set_channel_and_bandwidth(
+            network, bandwidth=bandwidth)
+        self.update_ap_settings(setting_to_update)
+
+    def set_channel(self, network, channel):
+        """Function that sets network channel.
+
+        Args:
+            network: string containing network identifier (2G, 5G_1, 5G_2)
+            channel: string or int containing channel
+        """
+        setting_to_update = self._set_channel_and_bandwidth(network,
+                                                            channel=channel)
+        self.update_ap_settings(setting_to_update)
+
+    def set_channel_and_bandwidth(self, network, channel, bandwidth):
+        """Function that sets network bandwidth/mode.
+
+        Args:
+            network: string containing network identifier (2G, 5G_1, 5G_2)
+            channel: desired channel
+            bandwidth: string containing mode, e.g. 11g, VHT20, VHT40, VHT80.
+        """
+        setting_to_update = self._set_channel_and_bandwidth(
+            network, channel=channel, bandwidth=bandwidth)
         self.update_ap_settings(setting_to_update)
 
     def read_ap_settings(self):
@@ -252,6 +299,10 @@
                         for item in config_item:
                             if item.checked:
                                 self.ap_settings[key[0]][key[1]] = item.value
+                    elif 'channel' in key:
+                        config_item = browser.find_by_name(value)
+                        self.ap_settings[key[0]][key[1]] = int(
+                            config_item.first.value)
                     else:
                         config_item = browser.find_by_name(value)
                         self.ap_settings[key[0]][
@@ -321,8 +372,10 @@
                         self.log.warning(
                             'Cannot select channel. Keeping AP default.')
                     try:
-                        alert = browser.get_alert()
-                        alert.accept()
+                        for idx in range(0, 2):
+                            alert = browser.get_alert()
+                            alert.accept()
+                            time.sleep(BROWSER_WAIT_SHORT)
                     except:
                         pass
             time.sleep(BROWSER_WAIT_SHORT)
@@ -336,7 +389,6 @@
                 time.sleep(BROWSER_WAIT_SHORT)
             browser.visit_persistent(self.config_page, BROWSER_WAIT_EXTRA_LONG,
                                      10)
-        self.validate_ap_settings()
 
     def configure_radio_on_off(self):
         """Helper configuration function to turn radios on/off."""
diff --git a/acts_tests/acts_contrib/test_utils/wifi/wifi_retail_ap/netgear_raxe500.py b/acts_tests/acts_contrib/test_utils/wifi/wifi_retail_ap/netgear_raxe500.py
new file mode 100644
index 0000000..9dc60aa
--- /dev/null
+++ b/acts_tests/acts_contrib/test_utils/wifi/wifi_retail_ap/netgear_raxe500.py
@@ -0,0 +1,442 @@
+#!/usr/bin/env python3
+#
+#   Copyright 2020 - The Android Open Source Project
+#
+#   Licensed under the Apache License, Version 2.0 (the 'License');
+#   you may not use this file except in compliance with the License.
+#   You may obtain a copy of the License at
+#
+#       http://www.apache.org/licenses/LICENSE-2.0
+#
+#   Unless required by applicable law or agreed to in writing, software
+#   distributed under the License is distributed on an 'AS IS' BASIS,
+#   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#   See the License for the specific language governing permissions and
+#   limitations under the License.
+
+import collections
+import numpy
+import re
+import time
+from acts_contrib.test_utils.wifi.wifi_retail_ap import WifiRetailAP
+from acts_contrib.test_utils.wifi.wifi_retail_ap import BlockingBrowser
+
+BROWSER_WAIT_SHORT = 1
+BROWSER_WAIT_MED = 3
+BROWSER_WAIT_LONG = 30
+BROWSER_WAIT_EXTRA_LONG = 60
+
+
+class NetgearRAXE500AP(WifiRetailAP):
+    """Class that implements Netgear RAXE500 AP.
+
+    Since most of the class' implementation is shared with the R7000, this
+    class inherits from NetgearR7000AP and simply redefines config parameters
+    """
+    def __init__(self, ap_settings):
+        super().__init__(ap_settings)
+        self.init_gui_data()
+        # Read and update AP settings
+        self.read_ap_firmware()
+        self.read_ap_settings()
+        self.update_ap_settings(ap_settings)
+
+    def init_gui_data(self):
+        self.config_page = (
+            '{protocol}://{username}:{password}@'
+            '{ip_address}:{port}/WLG_wireless_tri_band.htm').format(
+                protocol=self.ap_settings['protocol'],
+                username=self.ap_settings['admin_username'],
+                password=self.ap_settings['admin_password'],
+                ip_address=self.ap_settings['ip_address'],
+                port=self.ap_settings['port'])
+        self.config_page_nologin = (
+            '{protocol}://{ip_address}:{port}/'
+            'WLG_wireless_tri_band.htm').format(
+                protocol=self.ap_settings['protocol'],
+                ip_address=self.ap_settings['ip_address'],
+                port=self.ap_settings['port'])
+        self.config_page_advanced = (
+            '{protocol}://{username}:{password}@'
+            '{ip_address}:{port}/WLG_adv_tri_band2.htm').format(
+                protocol=self.ap_settings['protocol'],
+                username=self.ap_settings['admin_username'],
+                password=self.ap_settings['admin_password'],
+                ip_address=self.ap_settings['ip_address'],
+                port=self.ap_settings['port'])
+        self.firmware_page = (
+            '{protocol}://{username}:{password}@'
+            '{ip_address}:{port}/ADVANCED_home2_tri_band.htm').format(
+                protocol=self.ap_settings['protocol'],
+                username=self.ap_settings['admin_username'],
+                password=self.ap_settings['admin_password'],
+                ip_address=self.ap_settings['ip_address'],
+                port=self.ap_settings['port'])
+        self.capabilities = {
+            'interfaces': ['2G', '5G_1', '6G'],
+            'channels': {
+                '2G': [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11],
+                '5G_1': [
+                    36, 40, 44, 48, 52, 56, 60, 64, 100, 104, 108, 112, 116,
+                    120, 124, 128, 132, 136, 140, 144, 149, 153, 157, 161, 165
+                ],
+                '6G': ['6g' + str(ch) for ch in numpy.arange(37, 222, 16)]
+            },
+            'modes': {
+                '2G': ['VHT20', 'VHT40', 'HE20', 'HE40'],
+                '5G_1': [
+                    'VHT20', 'VHT40', 'VHT80', 'VHT160', 'HE20', 'HE40',
+                    'HE80', 'HE160'
+                ],
+                '6G': [
+                    'VHT20', 'VHT40', 'VHT80', 'VHT160', 'HE20', 'HE40',
+                    'HE80', 'HE160'
+                ]
+            },
+            'default_mode': 'HE'
+        }
+        for interface in self.capabilities['interfaces']:
+            self.ap_settings[interface] = {}
+
+        self.region_map = {
+            '3': 'Australia',
+            '4': 'Canada',
+            '5': 'Europe',
+            '7': 'Japan',
+            '8': 'Korea',
+            '11': 'North America',
+            '16': 'China',
+            '17': 'India',
+            '21': 'Middle East(Saudi Arabia/United Arab Emirates)',
+            '23': 'Singapore',
+            '25': 'Hong Kong',
+            '26': 'Vietnam'
+        }
+
+        self.bw_mode_text = {
+            '2G': {
+                'g and b': 'Up to 54 Mbps',
+                'HE20': 'Up to 600 Mbps',
+                'HE40': 'Up to 1200 Mbps',
+                'VHT20': 'Up to 433 Mbps',
+                'VHT40': 'Up to 1000 Mbps'
+            },
+            '5G_1': {
+                'HE20': 'Up to 600 Mbps',
+                'HE40': 'Up to 1200 Mbps',
+                'HE80': 'Up to 2400 Mbps',
+                'HE160': 'Up to 4800 Mbps',
+                'VHT20': 'Up to 433 Mbps',
+                'VHT40': 'Up to 1000 Mbps',
+                'VHT80': 'Up to 2165 Mbps',
+                'VHT160': 'Up to 4330 Mbps'
+            },
+            '6G': {
+                'HE20': 'Up to 600 Mbps',
+                'HE40': 'Up to 1200 Mbps',
+                'HE80': 'Up to 2400 Mbps',
+                'HE160': 'Up to 4800 Mbps',
+                'VHT20': 'Up to 600 Mbps',
+                'VHT40': 'Up to 1200 Mbps',
+                'VHT80': 'Up to 2400 Mbps',
+                'VHT160': 'Up to 4800 Mbps'
+            }
+        }
+        self.bw_mode_values = {
+            # first key is a boolean indicating if 11ax is enabled
+            0: {
+                'g and b': '11g',
+                'HT20': 'VHT20',
+                'HT40': 'VHT40',
+                'HT80': 'VHT80',
+                'HT160': 'VHT160'
+            },
+            1: {
+                'g and b': '11g',
+                'HT20': 'HE20',
+                'HT40': 'HE40',
+                'HT80': 'HE80',
+                'HT160': 'HE160'
+            }
+        }
+
+        # Config ordering intentional to avoid GUI bugs
+        self.config_page_fields = collections.OrderedDict([
+            ('region', 'WRegion'), ('enable_ax', 'enable_he'),
+            (('2G', 'status'), 'enable_ap'),
+            (('5G_1', 'status'), 'enable_ap_an'),
+            (('6G', 'status'), 'enable_ap_an_2'), (('2G', 'ssid'), 'ssid'),
+            (('5G_1', 'ssid'), 'ssid_an'), (('6G', 'ssid'), 'ssid_an_2'),
+            (('2G', 'channel'), 'w_channel'),
+            (('5G_1', 'channel'), 'w_channel_an'),
+            (('6G', 'channel'), 'w_channel_an_2'),
+            (('2G', 'bandwidth'), 'opmode'),
+            (('5G_1', 'bandwidth'), 'opmode_an'),
+            (('6G', 'bandwidth'), 'opmode_an_2'),
+            (('2G', 'power'), 'enable_tpc'),
+            (('5G_1', 'power'), 'enable_tpc_an'),
+            (('6G', 'security_type'), 'security_type_an_2'),
+            (('5G_1', 'security_type'), 'security_type_an'),
+            (('2G', 'security_type'), 'security_type'),
+            (('2G', 'password'), 'passphrase'),
+            (('5G_1', 'password'), 'passphrase_an'),
+            (('6G', 'password'), 'passphrase_an_2')
+        ])
+
+        self.power_mode_values = {
+            '1': '100%',
+            '2': '75%',
+            '3': '50%',
+            '4': '25%'
+        }
+
+    def _set_channel_and_bandwidth(self,
+                                   network,
+                                   channel=None,
+                                   bandwidth=None):
+        """Helper function that sets network bandwidth and channel.
+
+        Args:
+            network: string containing network identifier (2G, 5G_1, 5G_2)
+            channel: desired channel
+            bandwidth: string containing mode, e.g. 11g, VHT20, VHT40, VHT80.
+        """
+
+        setting_to_update = {network: {}}
+        if channel:
+            if channel not in self.capabilities['channels'][network]:
+                self.log.error('Ch{} is not supported on {} interface.'.format(
+                    channel, network))
+            if isinstance(channel, str) and '6g' in channel:
+                channel = int(channel[2:])
+            setting_to_update[network]['channel'] = channel
+
+        if bandwidth is None:
+            return setting_to_update
+
+        if 'bw' in bandwidth:
+            bandwidth = bandwidth.replace('bw',
+                                          self.capabilities['default_mode'])
+        if bandwidth not in self.capabilities['modes'][network]:
+            self.log.error('{} mode is not supported on {} interface.'.format(
+                bandwidth, network))
+        setting_to_update[network]['bandwidth'] = str(bandwidth)
+        setting_to_update['enable_ax'] = int('HE' in bandwidth)
+        # Check if other interfaces need to be changed too
+        requested_mode = 'HE' if 'HE' in bandwidth else 'VHT'
+        for other_network in self.capabilities['interfaces']:
+            if other_network == network:
+                continue
+            other_mode = 'HE' if 'HE' in self.ap_settings[other_network][
+                'bandwidth'] else 'VHT'
+            other_bw = ''.join([
+                x for x in self.ap_settings[other_network]['bandwidth']
+                if x.isdigit()
+            ])
+            if other_mode != requested_mode:
+                updated_mode = '{}{}'.format(requested_mode, other_bw)
+                self.log.warning('All networks must be VHT or HE. '
+                                 'Updating {} to {}'.format(
+                                     other_network, updated_mode))
+                setting_to_update.setdefault(other_network, {})
+                setting_to_update[other_network]['bandwidth'] = updated_mode
+        return setting_to_update
+
+    def set_bandwidth(self, network, bandwidth):
+        """Function that sets network bandwidth/mode.
+
+        Args:
+            network: string containing network identifier (2G, 5G_1, 5G_2)
+            bandwidth: string containing mode, e.g. 11g, VHT20, VHT40, VHT80.
+        """
+
+        setting_to_update = self._set_channel_and_bandwidth(
+            network, bandwidth=bandwidth)
+        self.update_ap_settings(setting_to_update)
+
+    def set_channel(self, network, channel):
+        """Function that sets network channel.
+
+        Args:
+            network: string containing network identifier (2G, 5G_1, 5G_2)
+            channel: string or int containing channel
+        """
+        setting_to_update = self._set_channel_and_bandwidth(network,
+                                                            channel=channel)
+        self.update_ap_settings(setting_to_update)
+
+    def set_channel_and_bandwidth(self, network, channel, bandwidth):
+        """Function that sets network bandwidth/mode.
+
+        Args:
+            network: string containing network identifier (2G, 5G_1, 5G_2)
+            channel: desired channel
+            bandwidth: string containing mode, e.g. 11g, VHT20, VHT40, VHT80.
+        """
+        setting_to_update = self._set_channel_and_bandwidth(
+            network, channel=channel, bandwidth=bandwidth)
+        self.update_ap_settings(setting_to_update)
+
+    def read_ap_firmware(self):
+        """Function to read ap settings."""
+        with BlockingBrowser(self.ap_settings['headless_browser'],
+                             900) as browser:
+
+            # Visit URL
+            browser.visit_persistent(self.firmware_page, BROWSER_WAIT_MED, 10)
+            firmware_regex = re.compile(
+                r'Firmware Version[\s\S]+V(?P<version>[0-9._]+)')
+            firmware_version = re.search(firmware_regex, browser.html)
+            if firmware_version:
+                self.ap_settings['firmware_version'] = firmware_version.group(
+                    'version')
+            else:
+                self.ap_settings['firmware_version'] = -1
+
+    def read_ap_settings(self):
+        """Function to read ap settings."""
+        with BlockingBrowser(self.ap_settings['headless_browser'],
+                             900) as browser:
+            # Visit URL
+            browser.visit_persistent(self.config_page, BROWSER_WAIT_MED, 10)
+
+            for key, value in self.config_page_fields.items():
+                if 'status' in key:
+                    browser.visit_persistent(self.config_page_advanced,
+                                             BROWSER_WAIT_MED, 10)
+                    config_item = browser.find_by_name(value)
+                    self.ap_settings[key[0]][key[1]] = int(
+                        config_item.first.checked)
+                    browser.visit_persistent(self.config_page,
+                                             BROWSER_WAIT_MED, 10)
+                else:
+                    config_item = browser.find_by_name(value)
+                    if 'enable_ax' in key:
+                        self.ap_settings[key] = int(config_item.first.checked)
+                    elif 'bandwidth' in key:
+                        self.ap_settings[key[0]][key[1]] = self.bw_mode_values[
+                            self.ap_settings['enable_ax']][
+                                config_item.first.value]
+                    elif 'power' in key:
+                        self.ap_settings[key[0]][
+                            key[1]] = self.power_mode_values[
+                                config_item.first.value]
+                    elif 'region' in key:
+                        self.ap_settings['region'] = self.region_map[
+                            config_item.first.value]
+                    elif 'security_type' in key:
+                        for item in config_item:
+                            if item.checked:
+                                self.ap_settings[key[0]][key[1]] = item.value
+                    elif 'channel' in key:
+                        config_item = browser.find_by_name(value)
+                        self.ap_settings[key[0]][key[1]] = int(
+                            config_item.first.value)
+                    else:
+                        config_item = browser.find_by_name(value)
+                        self.ap_settings[key[0]][
+                            key[1]] = config_item.first.value
+        return self.ap_settings.copy()
+
+    def configure_ap(self, **config_flags):
+        """Function to configure ap wireless settings."""
+        # Turn radios on or off
+        if config_flags['status_toggled']:
+            self.configure_radio_on_off()
+        # Configure radios
+        with BlockingBrowser(self.ap_settings['headless_browser'],
+                             900) as browser:
+            # Visit URL
+            browser.visit_persistent(self.config_page, BROWSER_WAIT_MED, 10)
+            browser.visit_persistent(self.config_page_nologin,
+                                     BROWSER_WAIT_MED, 10, self.config_page)
+
+            # Update region, and power/bandwidth for each network
+            try:
+                config_item = browser.find_by_name(
+                    self.config_page_fields['region']).first
+                config_item.select_by_text(self.ap_settings['region'])
+            except:
+                self.log.warning('Cannot change region.')
+            for key, value in self.config_page_fields.items():
+                if 'enable_ax' in key:
+                    config_item = browser.find_by_name(value).first
+                    if self.ap_settings['enable_ax']:
+                        config_item.check()
+                    else:
+                        config_item.uncheck()
+                if 'power' in key:
+                    config_item = browser.find_by_name(value).first
+                    config_item.select_by_text(
+                        self.ap_settings[key[0]][key[1]])
+                elif 'bandwidth' in key:
+                    config_item = browser.find_by_name(value).first
+                    try:
+                        config_item.select_by_text(self.bw_mode_text[key[0]][
+                            self.ap_settings[key[0]][key[1]]])
+                    except AttributeError:
+                        self.log.warning(
+                            'Cannot select bandwidth. Keeping AP default.')
+
+            # Update security settings (passwords updated only if applicable)
+            for key, value in self.config_page_fields.items():
+                if 'security_type' in key:
+                    browser.choose(value, self.ap_settings[key[0]][key[1]])
+                    if 'WPA' in self.ap_settings[key[0]][key[1]]:
+                        config_item = browser.find_by_name(
+                            self.config_page_fields[(key[0],
+                                                     'password')]).first
+                        config_item.fill(self.ap_settings[key[0]]['password'])
+
+            for key, value in self.config_page_fields.items():
+                if 'ssid' in key:
+                    config_item = browser.find_by_name(value).first
+                    config_item.fill(self.ap_settings[key[0]][key[1]])
+                elif 'channel' in key:
+                    config_item = browser.find_by_name(value).first
+                    try:
+                        config_item.select(self.ap_settings[key[0]][key[1]])
+                        time.sleep(BROWSER_WAIT_SHORT)
+                    except AttributeError:
+                        self.log.warning(
+                            'Cannot select channel. Keeping AP default.')
+                    try:
+                        alert = browser.get_alert()
+                        alert.accept()
+                    except:
+                        pass
+            time.sleep(BROWSER_WAIT_SHORT)
+            browser.find_by_name('Apply').first.click()
+            time.sleep(BROWSER_WAIT_SHORT)
+            try:
+                alert = browser.get_alert()
+                alert.accept()
+                time.sleep(BROWSER_WAIT_SHORT)
+            except:
+                time.sleep(BROWSER_WAIT_SHORT)
+            browser.visit_persistent(self.config_page, BROWSER_WAIT_EXTRA_LONG,
+                                     10)
+
+    def configure_radio_on_off(self):
+        """Helper configuration function to turn radios on/off."""
+        with BlockingBrowser(self.ap_settings['headless_browser'],
+                             900) as browser:
+            # Visit URL
+            browser.visit_persistent(self.config_page, BROWSER_WAIT_MED, 10)
+            browser.visit_persistent(self.config_page_advanced,
+                                     BROWSER_WAIT_MED, 10)
+
+            # Turn radios on or off
+            for key, value in self.config_page_fields.items():
+                if 'status' in key:
+                    config_item = browser.find_by_name(value).first
+                    if self.ap_settings[key[0]][key[1]]:
+                        config_item.check()
+                    else:
+                        config_item.uncheck()
+
+            time.sleep(BROWSER_WAIT_SHORT)
+            browser.find_by_name('Apply').first.click()
+            time.sleep(BROWSER_WAIT_EXTRA_LONG)
+            browser.visit_persistent(self.config_page, BROWSER_WAIT_EXTRA_LONG,
+                                     10)
diff --git a/acts_tests/acts_contrib/test_utils/wifi/wifi_test_utils.py b/acts_tests/acts_contrib/test_utils/wifi/wifi_test_utils.py
index 1729a7c..c29ce5c 100755
--- a/acts_tests/acts_contrib/test_utils/wifi/wifi_test_utils.py
+++ b/acts_tests/acts_contrib/test_utils/wifi/wifi_test_utils.py
@@ -439,6 +439,14 @@
         165: 5825
     }
 
+    channel_6G_to_freq = {4 * x + 1: 5955 + 20 * x for x in range(59)}
+
+    channel_to_freq = {
+        '2G': channel_2G_to_freq,
+        '5G': channel_5G_to_freq,
+        '6G': channel_6G_to_freq
+    }
+
 
 class WifiChannelBase:
     ALL_2G_FREQUENCIES = []
@@ -1713,7 +1721,8 @@
     # Need a delay here because UI interaction should only start once wifi
     # starts processing the request.
     time.sleep(wifi_constants.NETWORK_REQUEST_CB_REGISTER_DELAY_SEC)
-    _wait_for_wifi_connect_after_network_request(ad, network, key, num_of_tries)
+    _wait_for_wifi_connect_after_network_request(ad, network, key,
+                                                 num_of_tries)
     return key
 
 
@@ -1749,7 +1758,10 @@
                             assert_on_fail, ad, network, key, num_of_tries)
 
 
-def _wait_for_wifi_connect_after_network_request(ad, network, key, num_of_tries=3):
+def _wait_for_wifi_connect_after_network_request(ad,
+                                                 network,
+                                                 key,
+                                                 num_of_tries=3):
     """
     Simulate and verify the connection flow after initiating the network
     request.
@@ -1801,13 +1813,11 @@
 
         # Wait for the platform to connect to the network.
         autils.wait_for_event_with_keys(
-            ad, cconsts.EVENT_NETWORK_CALLBACK,
-            60,
+            ad, cconsts.EVENT_NETWORK_CALLBACK, 60,
             (cconsts.NETWORK_CB_KEY_ID, key),
             (cconsts.NETWORK_CB_KEY_EVENT, cconsts.NETWORK_CB_AVAILABLE))
         on_capabilities_changed = autils.wait_for_event_with_keys(
-            ad, cconsts.EVENT_NETWORK_CALLBACK,
-            10,
+            ad, cconsts.EVENT_NETWORK_CALLBACK, 10,
             (cconsts.NETWORK_CB_KEY_ID, key),
             (cconsts.NETWORK_CB_KEY_EVENT,
              cconsts.NETWORK_CB_CAPABILITIES_CHANGED))
@@ -1824,8 +1834,7 @@
         asserts.assert_equal(
             connected_network[WifiEnums.SSID_KEY], expected_ssid,
             "Connected to the wrong network."
-            "Expected %s, but got %s."
-            % (network, connected_network))
+            "Expected %s, but got %s." % (network, connected_network))
     except Empty:
         asserts.fail("Failed to connect to %s" % expected_ssid)
     except Exception as error:
diff --git a/acts_tests/setup.py b/acts_tests/setup.py
index 6c0a4a6..b2c3151 100755
--- a/acts_tests/setup.py
+++ b/acts_tests/setup.py
@@ -45,7 +45,6 @@
     # Python 3.8+ is support by latest bokeh
     install_requires.append('bokeh')
 
-
 def _setup_acts_framework(option, *args):
     """Locates and runs setup.py for the ACTS framework.
 
diff --git a/acts_tests/tests/google/ble/performance/BleRangeTest.py b/acts_tests/tests/google/ble/performance/BleRangeTest.py
new file mode 100644
index 0000000..3473cef
--- /dev/null
+++ b/acts_tests/tests/google/ble/performance/BleRangeTest.py
@@ -0,0 +1,289 @@
+#!/usr/bin/env python3
+#
+# Copyright 2017 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""
+Test script to execute BLE connection,run data traffic and calculating RSSI value of the remote BLE device.
+"""
+
+import os
+import logging
+import pandas as pd
+import numpy as np
+import time
+import acts_contrib.test_utils.bt.bt_test_utils as btutils
+import acts_contrib.test_utils.wifi.wifi_performance_test_utils.bokeh_figure as bokeh_figure
+from acts_contrib.test_utils.bt.ble_performance_test_utils import ble_coc_connection
+from acts_contrib.test_utils.bt.ble_performance_test_utils import ble_gatt_disconnection
+from acts_contrib.test_utils.bt.ble_performance_test_utils import start_advertising_and_scanning
+from acts_contrib.test_utils.bt.BluetoothBaseTest import BluetoothBaseTest
+from acts_contrib.test_utils.bt.bt_test_utils import cleanup_scanners_and_advertisers
+from acts_contrib.test_utils.bt.ble_performance_test_utils import establish_ble_connection
+from acts_contrib.test_utils.bt.bt_constants import l2cap_max_inactivity_delay_after_disconnect
+from acts_contrib.test_utils.bt.ble_performance_test_utils import run_ble_throughput
+from acts_contrib.test_utils.bt.ble_performance_test_utils import read_ble_rssi
+from acts_contrib.test_utils.bt.ble_performance_test_utils import read_ble_scan_rssi
+from acts_contrib.test_utils.bt.bt_test_utils import reset_bluetooth
+from acts_contrib.test_utils.power.PowerBTBaseTest import ramp_attenuation
+from acts_contrib.test_utils.bt.bt_test_utils import setup_multiple_devices_for_bt_test
+from acts.signals import TestPass
+from acts import utils
+
+INIT_ATTEN = 0
+MAX_RSSI = 92
+
+
+class BleRangeTest(BluetoothBaseTest):
+    active_adv_callback_list = []
+    active_scan_callback_list = []
+
+    def __init__(self, configs):
+        super().__init__(configs)
+        req_params = ['attenuation_vector', 'system_path_loss']
+        #'attenuation_vector' is a dict containing: start, stop and step of
+        #attenuation changes
+        self.unpack_userparams(req_params)
+
+    def setup_class(self):
+        super().setup_class()
+        self.client_ad = self.android_devices[0]
+        # The client which is scanning will need location to be enabled in order to
+        # start scan and get scan results.
+        utils.set_location_service(self.client_ad, True)
+        self.server_ad = self.android_devices[1]
+        # Note that some tests required a third device.
+        if hasattr(self, 'attenuators'):
+            self.attenuator = self.attenuators[0]
+            self.attenuator.set_atten(INIT_ATTEN)
+        self.attenuation_range = range(self.attenuation_vector['start'],
+                                       self.attenuation_vector['stop'] + 1,
+                                       self.attenuation_vector['step'])
+        self.log_path = os.path.join(logging.log_path, 'results')
+        os.makedirs(self.log_path, exist_ok=True)
+        # BokehFigure object
+        self.plot = bokeh_figure.BokehFigure(
+            title='{}'.format(self.current_test_name),
+            x_label='Pathloss (dB)',
+            primary_y_label='BLE RSSI (dBm)',
+            secondary_y_label='DUT Tx Power (dBm)',
+            axis_label_size='16pt')
+        if len(self.android_devices) > 2:
+            self.server2_ad = self.android_devices[2]
+
+        btutils.enable_bqr(self.android_devices)
+        return setup_multiple_devices_for_bt_test(self.android_devices)
+
+    def teardown_test(self):
+        self.client_ad.droid.bluetoothSocketConnStop()
+        self.server_ad.droid.bluetoothSocketConnStop()
+        if hasattr(self, 'attenuator'):
+            self.attenuator.set_atten(INIT_ATTEN)
+        # Give sufficient time for the physical LE link to be disconnected.
+        time.sleep(l2cap_max_inactivity_delay_after_disconnect)
+        cleanup_scanners_and_advertisers(self.client_ad,
+                                         self.active_scan_callback_list,
+                                         self.server_ad,
+                                         self.active_adv_callback_list)
+
+    def test_ble_gatt_connection_range(self):
+        """Test GATT connection over LE and read RSSI.
+
+        Test will establish a gatt connection between a GATT server and GATT
+        client then read the RSSI for each attenuation until the BLE link get disconnect
+
+        Expected Result:
+        Verify that a connection was established and then disconnected
+        successfully. Verify that the RSSI was read correctly.
+
+        """
+        attenuation = []
+        ble_rssi = []
+        dut_pwlv = []
+        path_loss = []
+        bluetooth_gatt, gatt_callback, adv_callback, gatt_server = establish_ble_connection(
+            self.client_ad, self.server_ad)
+        for atten in self.attenuation_range:
+            ramp_attenuation(self.attenuator, atten)
+            self.log.info('Set attenuation to %d dB', atten)
+            rssi_primary, pwlv_primary = self.get_ble_rssi_and_pwlv()
+            self.log.info(
+                "Dut BLE RSSI:{} and Pwlv:{} with attenuation:{}".format(
+                    rssi_primary, pwlv_primary, atten))
+            rssi = self.client_ad.droid.gattClientReadRSSI(gatt_server)
+            if type(rssi_primary) != str:
+                attenuation.append(atten)
+                ble_rssi.append(rssi_primary)
+                dut_pwlv.append(pwlv_primary)
+                path_loss.append(atten + self.system_path_loss)
+                df = pd.DataFrame({
+                    'Attenuation': attenuation,
+                    'BLE_RSSI': ble_rssi,
+                    'Dut_PwLv': dut_pwlv,
+                    'Pathloss': path_loss
+                })
+                filepath = os.path.join(
+                    self.log_path, '{}.csv'.format(self.current_test_name))
+            else:
+                self.plot_ble_graph(df)
+                df.to_csv(filepath, encoding='utf-8')
+                raise TestPass('Reached BLE Max Range, BLE Gatt disconnected')
+        ble_gatt_disconnection(self.client_ad, bluetooth_gatt, gatt_callback)
+        self.plot_ble_graph(df)
+        df.to_csv(filepath, encoding='utf-8')
+        self.server_ad.droid.bleStopBleAdvertising(adv_callback)
+        return True
+
+    def test_ble_coc_throughput_range(self):
+        """Test LE CoC data transfer and read RSSI with each attenuation
+
+        Test will establish a L2CAP CoC connection between client and server
+        then start BLE date transfer and read the RSSI for each attenuation
+        until the BLE link get disconnect
+
+        Expected Result:
+        BLE data transfer successful and Read RSSi Value of the server
+
+        """
+        attenuation = []
+        ble_rssi = []
+        throughput = []
+        dut_pwlv = []
+        path_loss = []
+        self.plot_throughput = bokeh_figure.BokehFigure(
+            title='{}'.format(self.current_test_name),
+            x_label='Pathloss (dB)',
+            primary_y_label='BLE Throughput (bits per sec)',
+            axis_label_size='16pt')
+        status, gatt_callback, gatt_server, bluetooth_gatt, client_conn_id = ble_coc_connection(
+            self.server_ad, self.client_ad)
+        for atten in self.attenuation_range:
+            ramp_attenuation(self.attenuator, atten)
+            self.log.info('Set attenuation to %d dB', atten)
+            datarate = run_ble_throughput(self.client_ad, client_conn_id,
+                                          self.server_ad)
+            rssi_primary, pwlv_primary = self.get_ble_rssi_and_pwlv()
+            self.log.info(
+                "BLE RSSI is:{} dBm and Tx Power:{} with attenuation {} dB with throughput:{}bits per sec"
+                .format(rssi_primary, pwlv_primary, atten, datarate))
+            if type(rssi_primary) != str:
+                attenuation.append(atten)
+                ble_rssi.append(rssi_primary)
+                dut_pwlv.append(pwlv_primary)
+                throughput.append(datarate)
+                path_loss.append(atten + self.system_path_loss)
+                df = pd.DataFrame({
+                    'Attenuation': attenuation,
+                    'BLE_RSSI': ble_rssi,
+                    'Dut_PwLv': dut_pwlv,
+                    'Throughput': throughput,
+                    'Pathloss': path_loss
+                })
+                filepath = os.path.join(
+                    self.log_path, '{}.csv'.format(self.current_test_name))
+                results_file_path = os.path.join(
+                    self.log_path,
+                    '{}_throughput.html'.format(self.current_test_name))
+                self.plot_throughput.add_line(df['Pathloss'],
+                                              df['Throughput'],
+                                              legend='BLE Throughput',
+                                              marker='square_x')
+            else:
+                self.plot_ble_graph(df)
+                self.plot_throughput.generate_figure()
+                bokeh_figure.BokehFigure.save_figures([self.plot_throughput],
+                                                      results_file_path)
+                df.to_csv(filepath, encoding='utf-8')
+                raise TestPass('Reached BLE Max Range, BLE Gatt disconnected')
+        self.plot_ble_graph(df)
+        self.plot_throughput.generate_figure()
+        bokeh_figure.BokehFigure.save_figures([self.plot_throughput],
+                                              results_file_path)
+        df.to_csv(filepath, encoding='utf-8')
+        ble_gatt_disconnection(self.server_ad, bluetooth_gatt, gatt_callback)
+        return True
+
+    def test_ble_scan_remote_rssi(self):
+        data_points = []
+        for atten in self.attenuation_range:
+            csv_path = os.path.join(
+                self.log_path,
+                '{}_attenuation_{}.csv'.format(self.current_test_name, atten))
+            ramp_attenuation(self.attenuator, atten)
+            self.log.info('Set attenuation to %d dB', atten)
+            adv_callback, scan_callback = start_advertising_and_scanning(
+                self.client_ad, self.server_ad, Legacymode=False)
+            self.active_adv_callback_list.append(adv_callback)
+            self.active_scan_callback_list.append(scan_callback)
+            average_rssi, raw_rssi, timestamp = read_ble_scan_rssi(
+                self.client_ad, scan_callback)
+            self.log.info(
+                "Scanned rssi list of the remote device is :{}".format(
+                    raw_rssi))
+            self.log.info(
+                "BLE RSSI of the remote device is:{} dBm".format(average_rssi))
+            min_rssi = min(raw_rssi)
+            max_rssi = max(raw_rssi)
+            path_loss = atten + self.system_path_loss
+            std_deviation = np.std(raw_rssi)
+            data_point = {
+                'Attenuation': atten,
+                'BLE_RSSI': average_rssi,
+                'Pathloss': path_loss,
+                'Min_RSSI': min_rssi,
+                'Max_RSSI': max_rssi,
+                'Standard_deviation': std_deviation
+            }
+            data_points.append(data_point)
+            df = pd.DataFrame({'timestamp': timestamp, 'raw rssi': raw_rssi})
+            df.to_csv(csv_path, encoding='utf-8', index=False)
+            try:
+                self.server_ad.droid.bleAdvSetStopAdvertisingSet(adv_callback)
+            except Exception as err:
+                self.log.warning(
+                    "Failed to stop advertisement: {}".format(err))
+                reset_bluetooth([self.server_ad])
+            self.client_ad.droid.bleStopBleScan(scan_callback)
+        filepath = os.path.join(
+            self.log_path, '{}_summary.csv'.format(self.current_test_name))
+        ble_df = pd.DataFrame(data_points)
+        ble_df.to_csv(filepath, encoding='utf-8')
+        return True
+
+    def plot_ble_graph(self, df):
+        """ Plotting BLE RSSI and Throughput with Attenuation.
+
+        Args:
+            df: Summary of results contains attenuation, BLE_RSSI and Throughput
+        """
+        self.plot.add_line(df['Pathloss'],
+                           df['BLE_RSSI'],
+                           legend='DUT BLE RSSI (dBm)',
+                           marker='circle_x')
+        self.plot.add_line(df['Pathloss'],
+                           df['Dut_PwLv'],
+                           legend='DUT TX Power (dBm)',
+                           marker='hex',
+                           y_axis='secondary')
+        results_file_path = os.path.join(
+            self.log_path, '{}.html'.format(self.current_test_name))
+        self.plot.generate_figure()
+        bokeh_figure.BokehFigure.save_figures([self.plot], results_file_path)
+
+    def get_ble_rssi_and_pwlv(self):
+        process_data_dict = btutils.get_bt_metric(self.client_ad)
+        rssi_primary = process_data_dict.get('rssi')
+        pwlv_primary = process_data_dict.get('pwlv')
+        rssi_primary = rssi_primary.get(self.client_ad.serial)
+        pwlv_primary = pwlv_primary.get(self.client_ad.serial)
+        return rssi_primary, pwlv_primary
diff --git a/acts_tests/tests/google/bt/car_bt/BtCarPairedConnectDisconnectTest.py b/acts_tests/tests/google/bt/car_bt/BtCarPairedConnectDisconnectTest.py
index eb2a06c..82751af 100644
--- a/acts_tests/tests/google/bt/car_bt/BtCarPairedConnectDisconnectTest.py
+++ b/acts_tests/tests/google/bt/car_bt/BtCarPairedConnectDisconnectTest.py
@@ -79,9 +79,9 @@
         self.car.droid.bluetoothHfpClientSetPriority(
             self.ph.droid.bluetoothGetLocalAddress(),
             BtEnum.BluetoothPriorityLevel.PRIORITY_OFF.value)
-        self.ph.droid.bluetoothHspSetPriority(
+        self.ph.droid.bluetoothHspSetConnectionPolicy(
             self.car.droid.bluetoothGetLocalAddress(),
-            BtEnum.BluetoothPriorityLevel.PRIORITY_OFF.value)
+            BtEnum.BluetoothConnectionPolicy.CONNECTION_POLICY_FORBIDDEN.value)
         addr = self.ph.droid.bluetoothGetLocalAddress()
         if not bt_test_utils.connect_pri_to_sec(
                 self.car, self.ph,
diff --git a/acts_tests/tests/google/bt/performance/BtA2dpDynamicChannelTest.py b/acts_tests/tests/google/bt/performance/BtA2dpDynamicChannelTest.py
new file mode 100644
index 0000000..0ee137c
--- /dev/null
+++ b/acts_tests/tests/google/bt/performance/BtA2dpDynamicChannelTest.py
@@ -0,0 +1,116 @@
+#!/usr/bin/env python3
+#
+# Copyright (C) 2019 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may not
+# use this file except in compliance with the License. You may obtain a copy of
+# the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations under
+# the License.
+import time
+import os
+import logging
+import acts_contrib.test_utils.bt.bt_test_utils as btutils
+import acts_contrib.test_utils.coex.audio_test_utils as atu
+from acts import asserts
+from acts_contrib.test_utils.bt.A2dpBaseTest import A2dpBaseTest
+from acts.signals import TestFailure
+
+INIT_ATTEN = 0
+WAIT_TIME = 2
+
+
+class BtA2dpDynamicChannelTest(A2dpBaseTest):
+    def __init__(self, configs):
+        super().__init__(configs)
+        req_params = ['codecs', 'rssi_profile_params']
+        # 'rssi_profile_params' is a dict containing,a list of upper_bound,
+        # lower_bound attenuation values, Dwell time for RSSI and the test duration
+        # ex:- "rssi_profile_params": {
+        #   "upper_bound": [15, 25],
+        #    "RSSI_Dwell_time": [1, 1],
+        #    "lower_bound": [35, 45],
+        #    "test_duration": 30}
+        # 'codecs' is a list containing all codecs required in the tests
+        self.unpack_userparams(req_params)
+        self.upper_bound = self.rssi_profile_params['upper_bound']
+        self.lower_bound = self.rssi_profile_params['lower_bound']
+        self.dwell_time = self.rssi_profile_params['RSSI_Dwell_time']
+        for upper_bound, lower_bound, dwell_time in zip(
+                self.upper_bound, self.lower_bound, self.dwell_time):
+            for codec_config in self.codecs:
+                self.generate_test_case(codec_config, upper_bound, lower_bound,
+                                        dwell_time)
+
+    def setup_class(self):
+        super().setup_class()
+        # Enable BQR on all android devices
+        btutils.enable_bqr(self.android_devices)
+        self.log_path = os.path.join(logging.log_path, 'results')
+
+    def teardown_class(self):
+        super().teardown_class()
+
+    def generate_test_case(self, codec_config, upper_bound, lower_bound,
+                           dwell_time):
+        def test_case_fn():
+            self.check_audio_quality_dynamic_rssi(upper_bound, lower_bound,
+                                                  dwell_time)
+
+        test_case_name = 'test_bt_a2dp_Dynamic_channel_between_attenuation_{}dB_and_{}dB' \
+                         '_codec_{}'.format(upper_bound, lower_bound, codec_config['codec_type'])
+        setattr(self, test_case_name, test_case_fn)
+
+    def check_audio_quality_dynamic_rssi(self, upper_bound, lower_bound,
+                                         dwell_time):
+        tag = 'Dynamic_RSSI'
+        self.media.play()
+        proc = self.audio_device.start()
+        self.inject_rssi_profile(upper_bound, lower_bound, dwell_time)
+        proc.kill()
+        time.sleep(WAIT_TIME)
+        proc.kill()
+        audio_captured = self.audio_device.stop()
+        self.media.stop()
+        self.log.info('Audio play and record stopped')
+        asserts.assert_true(audio_captured, 'Audio not recorded')
+        audio_result = atu.AudioCaptureResult(audio_captured,
+                                              self.audio_params)
+        thdn = audio_result.THDN(**self.audio_params['thdn_params'])
+        self.log.info('THDN is {}'.format(thdn[0]))
+        # Reading DUT RSSI to check the RSSI fluctuation from
+        # upper and lower bound attenuation values
+        self.attenuator.set_atten(upper_bound)
+        [
+            rssi_master, pwl_master, rssi_c0_master, rssi_c1_master,
+            txpw_c0_master, txpw_c1_master, bftx_master, divtx_master
+        ], [rssi_slave] = self._get_bt_link_metrics(tag)
+        rssi_l1 = rssi_master.get(self.dut.serial, -127)
+        pwlv_l1 = pwl_master.get(self.dut.serial, -127)
+        self.attenuator.set_atten(lower_bound)
+        [
+            rssi_master, pwl_master, rssi_c0_master, rssi_c1_master,
+            txpw_c0_master, txpw_c1_master, bftx_master, divtx_master
+        ], [rssi_slave] = self._get_bt_link_metrics(tag)
+        rssi_l2 = rssi_master.get(self.dut.serial, -127)
+        pwlv_l2 = pwl_master.get(self.dut.serial, -127)
+        self.log.info(
+            "DUT RSSI is fluctuating between {} and {} dBm with {}sec interval"
+            .format(rssi_l1, rssi_l2, dwell_time))
+        if thdn[0] > self.audio_params['thdn_threshold'] or thdn[0] == 0:
+            raise TestFailure('Observed audio glitches!')
+
+    def inject_rssi_profile(self, upper_bound, lower_bound, dwell_time):
+        end_time = time.time() + self.rssi_profile_params['test_duration']
+        self.log.info("Testing dynamic channel RSSI")
+        while time.time() < end_time:
+            self.attenuator.set_atten(upper_bound)
+            time.sleep(dwell_time)
+            self.attenuator.set_atten(lower_bound)
+            time.sleep(dwell_time)
diff --git a/acts_tests/tests/google/bt/performance/BtA2dpRangeTest.py b/acts_tests/tests/google/bt/performance/BtA2dpRangeTest.py
index d2bcb10..c775dd7 100644
--- a/acts_tests/tests/google/bt/performance/BtA2dpRangeTest.py
+++ b/acts_tests/tests/google/bt/performance/BtA2dpRangeTest.py
@@ -13,23 +13,16 @@
 # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
 # License for the specific language governing permissions and limitations under
 # the License.
-import os
-import pandas as pd
 import acts_contrib.test_utils.bt.bt_test_utils as btutils
-import acts_contrib.test_utils.wifi.wifi_performance_test_utils as wifi_utils
 from acts import asserts
-from acts_contrib.test_utils.bt import bt_constants
-from acts_contrib.test_utils.bt import BtEnum
 from acts_contrib.test_utils.bt.A2dpBaseTest import A2dpBaseTest
-from acts_contrib.test_utils.bt.loggers import bluetooth_metric_logger as log
-from acts_contrib.test_utils.power.PowerBTBaseTest import ramp_attenuation
-from acts.signals import TestPass
+
+INIT_ATTEN = 0
 
 
 class BtA2dpRangeTest(A2dpBaseTest):
     def __init__(self, configs):
         super().__init__(configs)
-        self.bt_logger = log.BluetoothMetricLogger.for_test_case()
         req_params = ['attenuation_vector', 'codecs']
         #'attenuation_vector' is a dict containing: start, stop and step of
         #attenuation changes
@@ -40,151 +33,30 @@
 
     def setup_class(self):
         super().setup_class()
+        opt_params = ['gain_mismatch', 'dual_chain']
+        self.unpack_userparams(opt_params, dual_chain=None, gain_mismatch=None)
         # Enable BQR on all android devices
         btutils.enable_bqr(self.android_devices)
+        if hasattr(self, 'dual_chain') and self.dual_chain == 1:
+            self.atten_c0 = self.attenuators[0]
+            self.atten_c1 = self.attenuators[1]
+            self.atten_c0.set_atten(INIT_ATTEN)
+            self.atten_c1.set_atten(INIT_ATTEN)
+
+    def teardown_class(self):
+        super().teardown_class()
+        if hasattr(self, 'atten_c0') and hasattr(self, 'atten_c1'):
+            self.atten_c0.set_atten(INIT_ATTEN)
+            self.atten_c1.set_atten(INIT_ATTEN)
 
     def generate_test_case(self, codec_config):
         def test_case_fn():
             self.run_a2dp_to_max_range(codec_config)
 
-        test_case_name = 'test_bt_a2dp_range_codec_{}'.format(
-            codec_config['codec_type'])
-        setattr(self, test_case_name, test_case_fn)
-
-    def generate_proto(self, data_points, codec_type, sample_rate,
-                       bits_per_sample, channel_mode):
-        """Generate a results protobuf.
-
-        Args:
-            data_points: list of dicts representing info to go into
-              AudioTestDataPoint protobuffer message.
-            codec_type: The codec type config to store in the proto.
-            sample_rate: The sample rate config to store in the proto.
-            bits_per_sample: The bits per sample config to store in the proto.
-            channel_mode: The channel mode config to store in the proto.
-        Returns:
-             dict: Dictionary with key 'proto' mapping to serialized protobuf,
-               'proto_ascii' mapping to human readable protobuf info, and 'test'
-               mapping to the test class name that generated the results.
-        """
-
-        # Populate protobuf
-        test_case_proto = self.bt_logger.proto_module.BluetoothAudioTestResult(
-        )
-
-        for data_point in data_points:
-            audio_data_proto = test_case_proto.data_points.add()
-            log.recursive_assign(audio_data_proto, data_point)
-
-        codec_proto = test_case_proto.a2dp_codec_config
-        codec_proto.codec_type = bt_constants.codec_types[codec_type]
-        codec_proto.sample_rate = int(sample_rate)
-        codec_proto.bits_per_sample = int(bits_per_sample)
-        codec_proto.channel_mode = bt_constants.channel_modes[channel_mode]
-
-        self.bt_logger.add_config_data_to_proto(test_case_proto, self.dut,
-                                                self.bt_device)
-
-        self.bt_logger.add_proto_to_results(test_case_proto,
-                                            self.__class__.__name__)
-
-        proto_dict = self.bt_logger.get_proto_dict(self.__class__.__name__,
-                                                   test_case_proto)
-        del proto_dict["proto_ascii"]
-        return proto_dict
-
-    def plot_graph(self, df):
-        """ Plotting A2DP DUT RSSI, remote RSSI and TX Power with Attenuation.
-
-        Args:
-            df: Summary of results contains attenuation, DUT RSSI, remote RSSI and Tx Power
-        """
-        self.plot = wifi_utils.BokehFigure(title='{}'.format(
-            self.current_test_name),
-                                           x_label='Pathloss (dBm)',
-                                           primary_y_label='RSSI (dBm)',
-                                           secondary_y_label='TX Power (dBm)',
-                                           axis_label_size='16pt')
-        self.plot.add_line(df.index,
-                           df['rssi_primary'],
-                           legend='DUT RSSI (dBm)',
-                           marker='circle_x')
-        self.plot.add_line(df.index,
-                           df['rssi_secondary'],
-                           legend='Remote RSSI (dBm)',
-                           marker='square_x')
-        self.plot.add_line(df.index,
-                           df['tx_power_level_master'],
-                           legend='DUT TX Power (dBm)',
-                           marker='hex',
-                           y_axis='secondary')
-
-        results_file_path = os.path.join(
-            self.log_path, '{}.html'.format(self.current_test_name))
-        self.plot.generate_figure()
-        wifi_utils.BokehFigure.save_figures([self.plot], results_file_path)
-
-    def run_a2dp_to_max_range(self, codec_config):
-        attenuation_range = range(self.attenuation_vector['start'],
-                                  self.attenuation_vector['stop'] + 1,
-                                  self.attenuation_vector['step'])
-
-        data_points = []
-        self.file_output = os.path.join(
-            self.log_path, '{}.csv'.format(self.current_test_name))
-
-        # Set Codec if needed
-        current_codec = self.dut.droid.bluetoothA2dpGetCurrentCodecConfig()
-        current_codec_type = BtEnum.BluetoothA2dpCodecType(
-            current_codec['codecType']).name
-        if current_codec_type != codec_config['codec_type']:
-            codec_set = btutils.set_bluetooth_codec(self.dut, **codec_config)
-            asserts.assert_true(codec_set, 'Codec configuration failed.')
+        if hasattr(self, 'dual_chain') and self.dual_chain == 1:
+            test_case_name = 'test_dual_bt_a2dp_range_codec_{}_gainmimatch_{}dB'.format(
+                codec_config['codec_type'], self.gain_mismatch)
         else:
-            self.log.info('Current codec is {}, no need to change'.format(
-                current_codec_type))
-
-        #loop RSSI with the same codec setting
-        for atten in attenuation_range:
-            ramp_attenuation(self.attenuator, atten)
-            self.log.info('Set attenuation to %d dB', atten)
-
-            tag = 'codec_{}_attenuation_{}dB_'.format(
-                codec_config['codec_type'], atten)
-            recorded_file = self.play_and_record_audio(
-                self.audio_params['duration'])
-            [rssi_master, pwl_master, rssi_slave] = self._get_bt_link_metrics()
-            thdns = self.run_thdn_analysis(recorded_file, tag)
-            # Collect Metrics for dashboard
-            data_point = {
-                'attenuation_db': int(self.attenuator.get_atten()),
-                'rssi_primary': rssi_master[self.dut.serial],
-                'tx_power_level_master': pwl_master[self.dut.serial],
-                'rssi_secondary': rssi_slave[self.bt_device_controller.serial],
-                'total_harmonic_distortion_plus_noise_percent': thdns[0] * 100
-            }
-            data_points.append(data_point)
-            self.log.info(data_point)
-            A2dpRange_df = pd.DataFrame(data_points)
-
-            # Check thdn for glitches, stop if max range reached
-            for thdn in thdns:
-                if thdn >= self.audio_params['thdn_threshold']:
-                    self.log.info(
-                        'Max range at attenuation {} dB'.format(atten))
-                    self.log.info('DUT rssi {} dBm, DUT tx power level {}, '
-                                  'Remote rssi {} dBm'.format(
-                                      rssi_master, pwl_master, rssi_slave))
-                    proto_dict = self.generate_proto(data_points,
-                                                     **codec_config)
-                    A2dpRange_df.to_csv(self.file_output, index=False)
-                    self.plot_graph(A2dpRange_df)
-                    raise TestPass('Max range reached and move to next codec',
-                                   extras=proto_dict)
-        # Save Data points to csv
-        A2dpRange_df.to_csv(self.file_output, index=False)
-        # Plot graph
-        self.plot_graph(A2dpRange_df)
-        proto_dict = self.generate_proto(data_points, **codec_config)
-        raise TestPass('Could not reach max range, need extra attenuation.',
-                       extras=proto_dict)
\ No newline at end of file
+            test_case_name = 'test_bt_a2dp_range_codec_{}'.format(
+                codec_config['codec_type'])
+        setattr(self, test_case_name, test_case_fn)
diff --git a/acts_tests/tests/google/bt/performance/BtA2dpRangeWithBleAdvTest.py b/acts_tests/tests/google/bt/performance/BtA2dpRangeWithBleAdvTest.py
new file mode 100644
index 0000000..5fe9503
--- /dev/null
+++ b/acts_tests/tests/google/bt/performance/BtA2dpRangeWithBleAdvTest.py
@@ -0,0 +1,142 @@
+#!/usr/bin/env python3
+#
+# Copyright (C) 2019 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may not
+# use this file except in compliance with the License. You may obtain a copy of
+# the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations under
+# the License.
+
+import acts_contrib.test_utils.bt.bt_test_utils as btutils
+from acts_contrib.test_utils.bt.bt_constants import adv_succ
+from acts_contrib.test_utils.bt.A2dpBaseTest import A2dpBaseTest
+from acts_contrib.test_utils.bt.bt_constants import bt_default_timeout
+from acts_contrib.test_utils.bt.bt_constants import ble_advertise_settings_modes
+from acts_contrib.test_utils.bt.bt_constants import ble_advertise_settings_tx_powers
+from acts_contrib.test_utils.bt.bt_test_utils import BtTestUtilsError
+from queue import Empty
+from acts_contrib.test_utils.bt.bt_test_utils import generate_ble_advertise_objects
+from acts_contrib.test_utils.bt.bt_test_utils import setup_multiple_devices_for_bt_test
+
+INIT_ATTEN = 0
+
+
+class BtA2dpRangeWithBleAdvTest(A2dpBaseTest):
+    """User can generate test case with below format.
+      test_bt_a2dp_range_codec_"Codec"_adv_mode_"Adv Mode"_adv_tx_power_"Adv Tx Power"
+
+      Below are the list of test cases:
+          test_bt_a2dp_range_codec_AAC_adv_mode_low_power_adv_tx_power_ultra_low
+          test_bt_a2dp_range_codec_AAC_adv_mode_low_power_adv_tx_power_low
+          test_bt_a2dp_range_codec_AAC_adv_mode_low_power_adv_tx_power_medium
+          test_bt_a2dp_range_codec_AAC_adv_mode_low_power_adv_tx_power_high
+          test_bt_a2dp_range_codec_AAC_adv_mode_balanced_adv_tx_power_ultra_low
+          test_bt_a2dp_range_codec_AAC_adv_mode_balanced_adv_tx_power_low
+          test_bt_a2dp_range_codec_AAC_adv_mode_balanced_adv_tx_power_medium
+          test_bt_a2dp_range_codec_AAC_adv_mode_balanced_adv_tx_power_high
+          test_bt_a2dp_range_codec_AAC_adv_mode_low_latency_adv_tx_power_ultra_low
+          test_bt_a2dp_range_codec_AAC_adv_mode_low_latency_adv_tx_power_low
+          test_bt_a2dp_range_codec_AAC_adv_mode_low_latency_adv_tx_power_medium
+          test_bt_a2dp_range_codec_AAC_adv_mode_low_latency_adv_tx_power_high
+          test_bt_a2dp_range_codec_SBC_adv_mode_low_power_adv_tx_power_ultra_low
+          test_bt_a2dp_range_codec_SBC_adv_mode_low_power_adv_tx_power_low
+          test_bt_a2dp_range_codec_SBC_adv_mode_low_power_adv_tx_power_medium
+          test_bt_a2dp_range_codec_SBC_adv_mode_low_power_adv_tx_power_high
+          test_bt_a2dp_range_codec_SBC_adv_mode_balanced_adv_tx_power_ultra_low
+          test_bt_a2dp_range_codec_SBC_adv_mode_balanced_adv_tx_power_low
+          test_bt_a2dp_range_codec_SBC_adv_mode_balanced_adv_tx_power_medium
+          test_bt_a2dp_range_codec_SBC_adv_mode_balanced_adv_tx_power_high
+          test_bt_a2dp_range_codec_SBC_adv_mode_low_latency_adv_tx_power_ultra_low
+          test_bt_a2dp_range_codec_SBC_adv_mode_low_latency_adv_tx_power_low
+          test_bt_a2dp_range_codec_SBC_adv_mode_low_latency_adv_tx_power_medium
+          test_bt_a2dp_range_codec_SBC_adv_mode_low_latency_adv_tx_power_high
+
+      """
+    def __init__(self, configs):
+        super().__init__(configs)
+        req_params = ['attenuation_vector', 'codecs']
+        #'attenuation_vector' is a dict containing: start, stop and step of
+        #attenuation changes
+        #'codecs' is a list containing all codecs required in the tests
+        self.unpack_userparams(req_params)
+        for codec_config in self.codecs:
+            # Loop all advertise modes and power levels
+            for adv_mode in ble_advertise_settings_modes.items():
+                for adv_power_level in ble_advertise_settings_tx_powers.items(
+                ):
+                    self.generate_test_case(codec_config, adv_mode,
+                                            adv_power_level)
+
+    def setup_class(self):
+        super().setup_class()
+        opt_params = ['gain_mismatch', 'dual_chain']
+        self.unpack_userparams(opt_params, dual_chain=None, gain_mismatch=None)
+        return setup_multiple_devices_for_bt_test(self.android_devices)
+        # Enable BQR on all android devices
+        btutils.enable_bqr(self.android_devices)
+        if hasattr(self, 'dual_chain') and self.dual_chain == 1:
+            self.atten_c0 = self.attenuators[0]
+            self.atten_c1 = self.attenuators[1]
+            self.atten_c0.set_atten(INIT_ATTEN)
+            self.atten_c1.set_atten(INIT_ATTEN)
+
+    def teardown_class(self):
+        super().teardown_class()
+        if hasattr(self, 'atten_c0') and hasattr(self, 'atten_c1'):
+            self.atten_c0.set_atten(INIT_ATTEN)
+            self.atten_c1.set_atten(INIT_ATTEN)
+
+    def generate_test_case(self, codec_config, adv_mode, adv_power_level):
+        def test_case_fn():
+            adv_callback = self.start_ble_adv(adv_mode[1], adv_power_level[1])
+            self.run_a2dp_to_max_range(codec_config)
+            self.dut.droid.bleStopBleAdvertising(adv_callback)
+            self.log.info("Advertisement stopped Successfully")
+
+        if hasattr(self, 'dual_chain') and self.dual_chain == 1:
+            test_case_name = 'test_dual_bt_a2dp_range_codec_{}_gainmimatch_{}dB'.format(
+                codec_config['codec_type'], self.gain_mismatch)
+        else:
+            test_case_name = 'test_bt_a2dp_range_codec_{}_adv_mode_{}_adv_tx_power_{}'.format(
+                codec_config['codec_type'], adv_mode[0], adv_power_level[0])
+        setattr(self, test_case_name, test_case_fn)
+
+    def start_ble_adv(self, adv_mode, adv_power_level):
+        """Function to start an LE advertisement
+        Steps:
+        1. Create a advertise data object
+        2. Create a advertise settings object.
+        3. Create a advertise callback object.
+        4. Start an LE advertising using the objects created in steps 1-3.
+        5. Find the onSuccess advertisement event.
+
+        Expected Result:
+        Advertisement is successfully advertising.
+
+        Returns:
+          Returns advertise call back"""
+
+        self.dut.droid.bleSetAdvertiseDataIncludeDeviceName(True)
+        self.dut.droid.bleSetAdvertiseSettingsAdvertiseMode(adv_mode)
+        self.dut.droid.bleSetAdvertiseSettingsIsConnectable(True)
+        self.dut.droid.bleSetAdvertiseSettingsTxPowerLevel(adv_power_level)
+        advertise_callback, advertise_data, advertise_settings = (
+            generate_ble_advertise_objects(self.dut.droid))
+        self.dut.droid.bleStartBleAdvertising(advertise_callback,
+                                              advertise_data,
+                                              advertise_settings)
+        try:
+            self.dut.ed.pop_event(adv_succ.format(advertise_callback),
+                                  bt_default_timeout)
+            self.log.info("Advertisement started successfully")
+        except Empty as err:
+            raise BtTestUtilsError(
+                "Advertiser did not start successfully {}".format(err))
+        return advertise_callback
diff --git a/acts_tests/tests/google/bt/performance/BtA2dpRangeWithBleScanTest.py b/acts_tests/tests/google/bt/performance/BtA2dpRangeWithBleScanTest.py
new file mode 100644
index 0000000..6020c4a
--- /dev/null
+++ b/acts_tests/tests/google/bt/performance/BtA2dpRangeWithBleScanTest.py
@@ -0,0 +1,103 @@
+#!/usr/bin/env python3
+#
+# Copyright (C) 2019 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may not
+# use this file except in compliance with the License. You may obtain a copy of
+# the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations under
+# the License.
+
+import acts_contrib.test_utils.bt.bt_test_utils as btutils
+from acts_contrib.test_utils.bt.A2dpBaseTest import A2dpBaseTest
+from acts_contrib.test_utils.bt.bt_constants import ble_scan_settings_modes
+from acts_contrib.test_utils.bt.bt_test_utils import generate_ble_scan_objects
+from acts_contrib.test_utils.bt.bt_test_utils import setup_multiple_devices_for_bt_test
+from acts_contrib.test_utils.bt.bt_constants import scan_result
+
+INIT_ATTEN = 0
+
+
+class BtA2dpRangeWithBleScanTest(A2dpBaseTest):
+    default_timeout = 10
+
+    def __init__(self, configs):
+        super().__init__(configs)
+        req_params = ['attenuation_vector', 'codecs']
+        #'attenuation_vector' is a dict containing: start, stop and step of
+        #attenuation changes
+        #'codecs' is a list containing all codecs required in the tests
+        self.unpack_userparams(req_params)
+        for codec_config in self.codecs:
+            # Loop all BLE Scan modes
+            for scan_mode in ble_scan_settings_modes.items():
+                self.generate_test_case(codec_config, scan_mode)
+
+    def setup_class(self):
+        super().setup_class()
+        opt_params = ['gain_mismatch', 'dual_chain']
+        self.unpack_userparams(opt_params, dual_chain=None, gain_mismatch=None)
+        return setup_multiple_devices_for_bt_test(self.android_devices)
+        # Enable BQR on all android devices
+        btutils.enable_bqr(self.android_devices)
+        if hasattr(self, 'dual_chain') and self.dual_chain == 1:
+            self.atten_c0 = self.attenuators[0]
+            self.atten_c1 = self.attenuators[1]
+            self.atten_c0.set_atten(INIT_ATTEN)
+            self.atten_c1.set_atten(INIT_ATTEN)
+
+    def teardown_class(self):
+        super().teardown_class()
+        if hasattr(self, 'atten_c0') and hasattr(self, 'atten_c1'):
+            self.atten_c0.set_atten(INIT_ATTEN)
+            self.atten_c1.set_atten(INIT_ATTEN)
+
+    def generate_test_case(self, codec_config, scan_mode):
+        """ Below are the list of test case's user can choose to run.
+        Test case list:
+        "test_bt_a2dp_range_codec_AAC_with_BLE_scan_balanced"
+        "test_bt_a2dp_range_codec_AAC_with_BLE_scan_low_latency"
+        "test_bt_a2dp_range_codec_AAC_with_BLE_scan_low_power"
+        "test_bt_a2dp_range_codec_AAC_with_BLE_scan_opportunistic"
+        "test_bt_a2dp_range_codec_SBC_with_BLE_scan_balanced"
+        "test_bt_a2dp_range_codec_SBC_with_BLE_scan_low_latency"
+        "test_bt_a2dp_range_codec_SBC_with_BLE_scan_low_power"
+        "test_bt_a2dp_range_codec_SBC_with_BLE_scan_opportunistic"
+        """
+        def test_case_fn():
+            scan_callback = self.start_ble_scan(scan_mode[1])
+            self.run_a2dp_to_max_range(codec_config)
+            self.dut.droid.bleStopBleScan(scan_callback)
+            self.log.info("BLE Scan stopped succssfully")
+
+        if hasattr(self, 'dual_chain') and self.dual_chain == 1:
+            test_case_name = 'test_dual_bt_a2dp_range_codec_{}_gainmimatch_{}dB'.format(
+                codec_config['codec_type'], self.gain_mismatch)
+        else:
+            test_case_name = 'test_bt_a2dp_range_codec_{}_with_BLE_scan_{}'.format(
+                codec_config['codec_type'], scan_mode[0])
+        setattr(self, test_case_name, test_case_fn)
+
+    def start_ble_scan(self, scan_mode):
+        """ This function will start Ble Scan with different scan mode.
+
+        Args:
+            Scan_mode: Ble scan setting modes
+
+        returns:
+        Scan_callback: Ble scan callback
+        """
+
+        self.dut.droid.bleSetScanSettingsScanMode(scan_mode)
+        filter_list, scan_settings, scan_callback = generate_ble_scan_objects(
+            self.dut.droid)
+        self.dut.droid.bleStartBleScan(filter_list, scan_settings,
+                                       scan_callback)
+        self.log.info("BLE Scanning started succssfully")
+        return scan_callback
diff --git a/acts_tests/tests/google/bt/performance/BtInterferenceDynamicTest.py b/acts_tests/tests/google/bt/performance/BtInterferenceDynamicTest.py
index 59721ca..ec980a2 100644
--- a/acts_tests/tests/google/bt/performance/BtInterferenceDynamicTest.py
+++ b/acts_tests/tests/google/bt/performance/BtInterferenceDynamicTest.py
@@ -261,9 +261,13 @@
         ramp_attenuation(self.attenuator, bt_atten_level)
         self.interference_rssi_mapping_from_attenuation(
             interference_atten_level)
-        [rssi_master, pwl_master, rssi_slave] = self._get_bt_link_metrics()
-        tag_bt = 'bt_signal_level_{}_rssi_{}_dBm'.format(
-            bt_atten_level, rssi_master)
+        [
+            rssi_master, pwl_master, rssi_c0_master, rssi_c1_master,
+            txpw_c0_master, txpw_c1_master, bftx_master, divtx_master
+        ], [rssi_slave] = self._get_bt_link_metrics()
+        rssi_primary = rssi_master.get(self.dut.serial, -127)
+        tag_bt = 'bt_signal_level_{}'.format(
+            bt_atten_level)
         procs_iperf = []
         for obj in self.wifi_int_pairs:
             obj.iperf_server.start()
@@ -313,9 +317,13 @@
         ramp_attenuation(self.attenuator, bt_atten_level)
         self.interference_rssi_mapping_from_attenuation(
             interference_atten_level)
-        [rssi_master, pwl_master, rssi_slave] = self._get_bt_link_metrics()
-        tag_bt = 'bt_signal_level_{}_rssi_{}_dBm'.format(
-            bt_atten_level, rssi_master)
+        [
+            rssi_master, pwl_master, rssi_c0_master, rssi_c1_master,
+            txpw_c0_master, txpw_c1_master, bftx_master, divtx_master
+        ], [rssi_slave] = self._get_bt_link_metrics()
+        rssi_primary = rssi_master.get(self.dut.serial, -127)
+        tag_bt = 'bt_signal_level_{}'.format(
+            bt_atten_level)
         procs_iperf = []
         #Start IPERF on all three interference pairs
         for obj in self.wifi_int_pairs:
@@ -354,4 +362,4 @@
         self.log.info('THDN results are {}'.format(thdns))
         for thdn in thdns:
             if thdn >= self.audio_params['thdn_threshold']:
-                raise TestFailure('AFH failed')
+                raise TestFailure('AFH failed')
\ No newline at end of file
diff --git a/acts_tests/tests/google/bt/performance/BtInterferenceStaticTest.py b/acts_tests/tests/google/bt/performance/BtInterferenceStaticTest.py
index bf6b0de..99218b0 100644
--- a/acts_tests/tests/google/bt/performance/BtInterferenceStaticTest.py
+++ b/acts_tests/tests/google/bt/performance/BtInterferenceStaticTest.py
@@ -15,11 +15,13 @@
 # the License.
 """Stream music through connected device from phone across different
 attenuations."""
-from acts.signals import TestPass
+
 from acts_contrib.test_utils.bt.BtInterferenceBaseTest import BtInterferenceBaseTest
 from acts.metrics.loggers.blackbox import BlackboxMetricLogger
 from acts_contrib.test_utils.bt.BtInterferenceBaseTest import get_iperf_results
+from acts_contrib.test_utils.bt.BtInterferenceBaseTest import inject_static_wifi_interference
 from multiprocessing import Process, Queue
+from acts.signals import TestPass
 
 DEFAULT_THDN_THRESHOLD = 0.9
 MAX_ATTENUATION = 95
@@ -33,8 +35,7 @@
                                           self.attenuation_vector['stop'] + 1,
                                           self.attenuation_vector['step'])
 
-        self.iperf_duration = self.audio_params[
-            'duration'] + TIME_OVERHEAD
+        self.iperf_duration = self.audio_params['duration'] + TIME_OVERHEAD
         for level in list(
                 self.static_wifi_interference['interference_level'].keys()):
             for channels in self.static_wifi_interference['channels']:
@@ -69,44 +70,6 @@
                                               str_channel_test))
         setattr(self, test_case_name, test_case_fn)
 
-    def inject_static_wifi_interference(self, interference_level, channels):
-        """Function to inject wifi interference to bt link and read rssi.
-
-        Interference of IPERF traffic is always running, by setting attenuation,
-        the gate is opened to release the interference to the setup.
-        Args:
-            interference_level: the signal strength of wifi interference, use
-                attenuation level to represent this
-            channels: wifi channels where interference will
-                be injected, list
-        """
-        all_pair = range(len(self.wifi_int_pairs))
-        interference_pair_indices = self.locate_interference_pair_by_channel(
-            channels)
-        inactive_interference_pairs_indices = [
-            item for item in all_pair if item not in interference_pair_indices
-        ]
-        self.log.info(
-            'WiFi interference at {} and inactive channels at {}'.format(
-                interference_pair_indices,
-                inactive_interference_pairs_indices))
-        for i in interference_pair_indices:
-            self.wifi_int_pairs[i].attenuator.set_atten(interference_level)
-            self.log.info('Set attenuation {} dB on attenuator {}'.format(
-                self.wifi_int_pairs[i].attenuator.get_atten(), i + 1))
-        for i in inactive_interference_pairs_indices:
-            self.wifi_int_pairs[i].attenuator.set_atten(MAX_ATTENUATION)
-            self.log.info('Set attenuation {} dB on attenuator {}'.format(
-                self.wifi_int_pairs[i].attenuator.get_atten(), i + 1))
-        #Read interference RSSI
-        self.get_interference_rssi()
-        self.wifi_chan1_rssi_metric.metric_value = self.interference_rssi[0][
-            'rssi']
-        self.wifi_chan6_rssi_metric.metric_value = self.interference_rssi[1][
-            'rssi']
-        self.wifi_chan11_rssi_metric.metric_value = self.interference_rssi[2][
-            'rssi']
-
     def bt_range_with_static_wifi_interference(self, interference_level,
                                                channels):
         """Test function to measure bt range under interference.
@@ -116,12 +79,28 @@
             channels: wifi interference channels
         """
         #setup wifi interference by setting the correct attenuator
-        self.inject_static_wifi_interference(interference_level, channels)
+        inject_static_wifi_interference(self.wifi_int_pairs,
+                                        interference_level, channels)
+        # Read interference RSSI
+        self.get_interference_rssi()
+        self.wifi_chan1_rssi_metric.metric_value = self.interference_rssi[0][
+            'rssi']
+        self.wifi_chan6_rssi_metric.metric_value = self.interference_rssi[1][
+            'rssi']
+        self.wifi_chan11_rssi_metric.metric_value = self.interference_rssi[2][
+            'rssi']
         for atten in self.bt_attenuation_range:
             # Set attenuation for BT link
             self.attenuator.set_atten(atten)
-            [rssi_master, pwl_master, rssi_slave] = self._get_bt_link_metrics()
-            tag = 'attenuation_{}dB_'.format(atten)
+            [
+                rssi_master, pwl_master, rssi_c0_master, rssi_c1_master,
+                txpw_c0_master, txpw_c1_master, bftx_master, divtx_master
+            ], [rssi_slave] = self._get_bt_link_metrics()
+            rssi_primary = rssi_master.get(self.dut.serial, -127)
+            pwl_primary = pwl_master.get(self.dut.serial, -127)
+            rssi_secondary = rssi_slave.get(self.bt_device_controller.serial,
+                                            -127)
+            tag = 'attenuation_{}dB'.format(atten)
             self.log.info(
                 'BT attenuation set to {} dB and start A2DP streaming'.format(
                     atten))
@@ -140,8 +119,8 @@
 
             #play a2dp streaming and run thdn analysis
             queue = Queue()
-            proc_bt = Process(target=self.play_and_record_audio, 
-                              args=(self.audio_params['duration'],queue))
+            proc_bt = Process(target=self.play_and_record_audio,
+                              args=(self.audio_params['duration'], queue))
             for proc in procs_iperf:
                 proc.start()
             proc_bt.start()
@@ -155,23 +134,23 @@
                         obj.channel, iperf_throughput))
                 obj.iperf_server.stop()
                 self.log.info('Stopped IPERF server at port {}'.format(
-                        obj.iperf_server.port))
+                    obj.iperf_server.port))
             audio_captured = queue.get()
             thdns = self.run_thdn_analysis(audio_captured, tag)
-            self.log.info('THDN results are {} at {} dB attenuation'
-                          .format(thdns, atten))
-            self.log.info('master rssi {} dBm, master tx power level {}, '
-                          'slave rssi {} dBm'
-                          .format(rssi_master, pwl_master, rssi_slave))
+            self.log.info('THDN results are {} at {} dB attenuation'.format(
+                thdns, atten))
+            self.log.info('DUT rssi {} dBm, master tx power level {}, '
+                          'RemoteDevice rssi {} dBm'.format(rssi_primary, pwl_primary,
+                                                     rssi_secondary))
             for thdn in thdns:
                 if thdn >= self.audio_params['thdn_threshold']:
                     self.log.info('Under the WiFi interference condition: '
                                   'channel 1 RSSI: {} dBm, '
                                   'channel 6 RSSI: {} dBm'
-                                  'channel 11 RSSI: {} dBm'
-                                  .format(self.interference_rssi[0]['rssi'],
-                                          self.interference_rssi[1]['rssi'],
-                                          self.interference_rssi[2]['rssi']))
+                                  'channel 11 RSSI: {} dBm'.format(
+                                      self.interference_rssi[0]['rssi'],
+                                      self.interference_rssi[1]['rssi'],
+                                      self.interference_rssi[2]['rssi']))
                     raise TestPass(
                         'Max range for this test is {}, with BT master RSSI at'
-                        ' {} dBm'.format(atten, rssi_master))
+                        ' {} dBm'.format(atten, rssi_primary))
diff --git a/acts_tests/tests/google/bt/performance/InjectWifiInterferenceTest.py b/acts_tests/tests/google/bt/performance/InjectWifiInterferenceTest.py
new file mode 100644
index 0000000..22c2b2f
--- /dev/null
+++ b/acts_tests/tests/google/bt/performance/InjectWifiInterferenceTest.py
@@ -0,0 +1,190 @@
+#!/usr/bin/env python3
+#
+# Copyright (C) 2019 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may not
+# use this file except in compliance with the License. You may obtain a copy of
+# the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations under
+# the License.
+import json
+import random
+import sys
+import logging
+import re
+from acts.base_test import BaseTestClass
+from acts_contrib.test_utils.bt.BtInterferenceBaseTest import inject_static_wifi_interference
+from acts_contrib.test_utils.bt.BtInterferenceBaseTest import unpack_custom_file
+from acts_contrib.test_utils.power.PowerBaseTest import ObjNew
+from acts_contrib.test_utils.wifi import wifi_performance_test_utils as wpeutils
+from acts_contrib.test_utils.wifi import wifi_test_utils as wutils
+import time
+
+MAX_ATTENUATION = 95
+INIT_ATTEN = 0
+SCAN = 'wpa_cli scan'
+SCAN_RESULTS = 'wpa_cli scan_results'
+
+
+class InjectWifiInterferenceTest(BaseTestClass):
+    def __init__(self, configs):
+        super().__init__(configs)
+        req_params = ['custom_files', 'wifi_networks']
+        self.unpack_userparams(req_params)
+        for file in self.custom_files:
+            if 'static_interference' in file:
+                self.static_wifi_interference = unpack_custom_file(file)
+            elif 'dynamic_interference' in file:
+                self.dynamic_wifi_interference = unpack_custom_file(file)
+
+    def setup_class(self):
+
+        self.dut = self.android_devices[0]
+        # Set attenuator to minimum attenuation
+        if hasattr(self, 'attenuators'):
+            self.attenuator = self.attenuators[0]
+            self.attenuator.set_atten(INIT_ATTEN)
+        self.wifi_int_pairs = []
+        for i in range(len(self.attenuators) - 1):
+            tmp_dict = {
+                'attenuator': self.attenuators[i + 1],
+                'network': self.wifi_networks[i],
+                'channel': self.wifi_networks[i]['channel']
+            }
+            tmp_obj = ObjNew(**tmp_dict)
+            self.wifi_int_pairs.append(tmp_obj)
+        ##Setup connection between WiFi APs and Phones and get DHCP address
+        # for the interface
+        for obj in self.wifi_int_pairs:
+            obj.attenuator.set_atten(INIT_ATTEN)
+
+    def setup_test(self):
+        self.log.info("Setup test initiated")
+
+    def teardown_class(self):
+        for obj in self.wifi_int_pairs:
+            obj.attenuator.set_atten(MAX_ATTENUATION)
+
+    def teardown_test(self):
+        for obj in self.wifi_int_pairs:
+            obj.attenuator.set_atten(MAX_ATTENUATION)
+
+    def test_inject_static_wifi_interference(self):
+        condition = True
+        while condition:
+            attenuation = [
+                int(x) for x in input(
+                    "Please enter 4 channel attenuation value followed by comma :\n"
+                ).split(',')
+            ]
+            self.set_atten_all_channel(attenuation)
+            # Read interference RSSI
+            self.interference_rssi = get_interference_rssi(
+                self.dut, self.wifi_int_pairs)
+            self.log.info('Under the WiFi interference condition: '
+                          'channel 1 RSSI: {} dBm, '
+                          'channel 6 RSSI: {} dBm'
+                          'channel 11 RSSI: {} dBm'.format(
+                              self.interference_rssi[0]['rssi'],
+                              self.interference_rssi[1]['rssi'],
+                              self.interference_rssi[2]['rssi']))
+            condition = True
+        return True
+
+    def test_inject_dynamic_interface(self):
+        atten = int(input("Please enter the attenuation level for CHAN1 :"))
+        self.attenuator.set_atten(atten)
+        self.log.info("Attenuation for CHAN1 set to:{} dB".format(atten))
+        interference_rssi = None
+        self.channel_change_interval = self.dynamic_wifi_interference[
+            'channel_change_interval_second']
+        self.wifi_int_levels = list(
+            self.dynamic_wifi_interference['interference_level'].keys())
+        for wifi_level in self.wifi_int_levels:
+            interference_atten_level = self.dynamic_wifi_interference[
+                'interference_level'][wifi_level]
+            all_pair = range(len(self.wifi_int_pairs))
+            # Set initial WiFi interference at channel 1
+            logging.info('Start with interference at channel 1')
+            self.wifi_int_pairs[0].attenuator.set_atten(
+                interference_atten_level)
+            self.wifi_int_pairs[1].attenuator.set_atten(MAX_ATTENUATION)
+            self.wifi_int_pairs[2].attenuator.set_atten(MAX_ATTENUATION)
+            current_int_pair = [0]
+            inactive_int_pairs = [
+                item for item in all_pair if item not in current_int_pair
+            ]
+            logging.info(
+                'Inject random changing channel (1,6,11) wifi interference'
+                'every {} second'.format(self.channel_change_interval))
+            while True:
+                current_int_pair = [
+                    random.randint(inactive_int_pairs[0],
+                                   inactive_int_pairs[1])
+                ]
+                inactive_int_pairs = [
+                    item for item in all_pair if item not in current_int_pair
+                ]
+                self.wifi_int_pairs[current_int_pair[0]].attenuator.set_atten(
+                    interference_atten_level)
+                logging.info('Current interference at channel {}'.format(
+                    self.wifi_int_pairs[current_int_pair[0]].channel))
+                for i in inactive_int_pairs:
+                    self.wifi_int_pairs[i].attenuator.set_atten(
+                        MAX_ATTENUATION)
+                # Read interference RSSI
+                self.interference_rssi = get_interference_rssi(
+                    self.dut, self.wifi_int_pairs)
+                self.log.info('Under the WiFi interference condition: '
+                              'channel 1 RSSI: {} dBm, '
+                              'channel 6 RSSI: {} dBm'
+                              'channel 11 RSSI: {} dBm'.format(
+                                  self.interference_rssi[0]['rssi'],
+                                  self.interference_rssi[1]['rssi'],
+                                  self.interference_rssi[2]['rssi']))
+                time.sleep(self.channel_change_interval)
+            return True
+
+    def set_atten_all_channel(self, attenuation):
+        self.attenuators[0].set_atten(attenuation[0])
+        self.attenuators[1].set_atten(attenuation[1])
+        self.attenuators[2].set_atten(attenuation[2])
+        self.attenuators[3].set_atten(attenuation[3])
+        self.log.info(
+            "Attenuation set to CHAN1:{},CHAN2:{},CHAN3:{},CHAN4:{}".format(
+                self.attenuators[0].get_atten(),
+                self.attenuators[1].get_atten(),
+                self.attenuators[2].get_atten(),
+                self.attenuators[3].get_atten()))
+
+
+def get_interference_rssi(dut, wifi_int_pairs):
+    """Function to read wifi interference RSSI level."""
+
+    bssids = []
+    interference_rssi = []
+    wutils.wifi_toggle_state(dut, True)
+    for item in wifi_int_pairs:
+        ssid = item.network['SSID']
+        bssid = item.network['bssid']
+        bssids.append(bssid)
+        interference_rssi_dict = {
+            "ssid": ssid,
+            "bssid": bssid,
+            "chan": item.channel,
+            "rssi": 0
+        }
+        interference_rssi.append(interference_rssi_dict)
+    scaned_rssi = wpeutils.get_scan_rssi(dut, bssids, num_measurements=2)
+    for item in interference_rssi:
+        item['rssi'] = scaned_rssi[item['bssid']]['mean']
+        logging.info('Interference RSSI at channel {} is {} dBm'.format(
+            item['chan'], item['rssi']))
+    wutils.wifi_toggle_state(dut, False)
+    return interference_rssi
diff --git a/acts_tests/tests/google/bt/performance/StartIperfTrafficTest.py b/acts_tests/tests/google/bt/performance/StartIperfTrafficTest.py
new file mode 100644
index 0000000..edc373f
--- /dev/null
+++ b/acts_tests/tests/google/bt/performance/StartIperfTrafficTest.py
@@ -0,0 +1,102 @@
+#!/usr/bin/env python3
+#
+# Copyright (C) 2019 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may not
+# use this file except in compliance with the License. You may obtain a copy of
+# the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations under
+# the License.
+import sys
+import time
+import acts.controllers.iperf_client as ipc
+from acts_contrib.test_utils.bt.BtInterferenceBaseTest import BtInterferenceBaseTest
+from acts_contrib.test_utils.power.PowerBaseTest import ObjNew
+from multiprocessing import Process, Queue
+from acts_contrib.test_utils.bt.BtInterferenceBaseTest import setup_ap_connection
+from acts_contrib.test_utils.wifi import wifi_power_test_utils as wputils
+from acts.signals import TestPass
+
+
+class StartIperfTrafficTest(BtInterferenceBaseTest):
+    """
+    """
+    def __init__(self, configs):
+        super().__init__(configs)
+        req_params =["IperfDuration"]
+        self.unpack_userparams(req_params)
+
+    def setup_class(self):
+        self.dut = self.android_devices[0]
+        self.wifi_int_pairs = []
+        for i in range(len(self.attenuators) - 1):
+            tmp_dict = {
+                'dut': self.android_devices[i],
+                'ap': self.access_points[i],
+                'network': self.wifi_networks[i],
+                'channel': self.wifi_networks[i]['channel'],
+                'iperf_server': self.iperf_servers[i],
+                'ether_int': self.packet_senders[i],
+                'iperf_client': ipc.IPerfClientOverAdb(self.android_devices[i])
+            }
+            tmp_obj = ObjNew(**tmp_dict)
+            self.wifi_int_pairs.append(tmp_obj)
+        ##Setup connection between WiFi APs and Phones and get DHCP address
+        # for the interface
+        for obj in self.wifi_int_pairs:
+            brconfigs = setup_ap_connection(obj.dut, obj.network, obj.ap)
+            iperf_server_address = wputils.wait_for_dhcp(
+                obj.ether_int.interface)
+            setattr(obj, 'server_address', iperf_server_address)
+            setattr(obj, 'brconfigs', brconfigs)
+
+    def setup_test(self):
+        self.log.info("Setup test initiated")
+
+    def teardown_class(self):
+        for obj in self.wifi_int_pairs:
+            obj.ap.bridge.teardown(obj.brconfigs)
+            self.log.info('Stop IPERF server at port {}'.format(
+                obj.iperf_server.port))
+            obj.iperf_server.stop()
+            self.log.info('Stop IPERF process on {}'.format(obj.dut.serial))
+            #obj.dut.adb.shell('pkill -9 iperf3')
+            #only for glinux machine
+            #            wputils.bring_down_interface(obj.ether_int.interface)
+            obj.ap.close()
+
+    def teardown_test(self):
+        self.log.info("Setup test initiated")
+
+    def test_start_iperf_traffic(self):
+        self.channel_change_interval = self.dynamic_wifi_interference[
+            'channel_change_interval_second']
+        self.wifi_int_levels = list(
+            self.dynamic_wifi_interference['interference_level'].keys())
+        for wifi_level in self.wifi_int_levels:
+            interference_atten_level = self.dynamic_wifi_interference[
+                'interference_level'][wifi_level]
+            end_time = time.time() + self.IperfDuration
+            while time.time() < end_time:
+                procs_iperf = []
+                # Start IPERF on all three interference pairs
+                for obj in self.wifi_int_pairs:
+                    obj.iperf_server.start()
+                    iperf_args = '-i 1 -t {} -p {} -J -R'.format(
+                        self.IperfDuration, obj.iperf_server.port)
+                    tag = 'chan_{}'.format(obj.channel)
+                    proc_iperf = Process(target=obj.iperf_client.start,
+                                         args=(obj.server_address, iperf_args,
+                                               tag))
+                    proc_iperf.start()
+                    procs_iperf.append(proc_iperf)
+                for proc in procs_iperf:
+                    self.log.info('Started IPERF on all three channels')
+                    proc.join()
+        return True
diff --git a/acts_tests/tests/google/gnss/GnssBlankingThTest.py b/acts_tests/tests/google/gnss/GnssBlankingThTest.py
new file mode 100644
index 0000000..e625171
--- /dev/null
+++ b/acts_tests/tests/google/gnss/GnssBlankingThTest.py
@@ -0,0 +1,146 @@
+#!/usr/bin/env python3
+#
+#   Copyright 2021 - The Android Open Source Project
+#
+#   Licensed under the Apache License, Version 2.0 (the 'License');
+#   you may not use this file except in compliance with the License.
+#   You may obtain a copy of the License at
+#
+#       http://www.apache.org/licenses/LICENSE-2.0
+#
+#   Unless required by applicable law or agreed to in writing, software
+#   distributed under the License is distributed on an 'AS IS' BASIS,
+#   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#   See the License for the specific language governing permissions and
+#   limitations under the License.
+
+from acts_contrib.test_utils.gnss.GnssBlankingBase import GnssBlankingBase
+
+
+class GnssBlankingThTest(GnssBlankingBase):
+    """ LAB GNSS Cellular Coex Tx Power Sweep TTFF/FFPE Tests"""
+
+    def gnss_wwan_blanking_sweep_base(self):
+        """
+        GNSS WWAN blanking cellular power sweep base function
+        """
+        # Get parameters from user params.
+        first_wait = self.user_params.get('first_wait', 300)
+
+        # Start the test item with gnss_init_power_setting.
+        if self.gnss_init_power_setting(first_wait):
+            self.log.info('Successfully set the GNSS power level to %d' %
+                          self.sa_sensitivity)
+            self.log.info('Start searching for cellular power level threshold')
+            # After the GNSS power initialization is done, start the cellular power sweep.
+            self.result_cell_pwr = self.cell_power_sweep()
+
+    def test_gnss_gsm850_sweep(self):
+        """
+        GNSS WWAN blanking cellular power sweep GSM850, Ch190.
+        """
+        self.eecoex_func = 'CELLR,2,850,190,1,1,{}'
+        self.start_pwr = self.gsm_sweep_params[0]
+        self.stop_pwr = self.gsm_sweep_params[1]
+        self.offset = self.gsm_sweep_params[2]
+        self.gnss_wwan_blanking_sweep_base()
+
+    def test_gnss_gsm900_sweep(self):
+        """
+        GNSS WWAN blanking cellular power sweep GSM900, Ch20.
+        """
+        self.eecoex_func = 'CELLR,2,900,20,1,1,{}'
+        self.start_pwr = self.gsm_sweep_params[0]
+        self.stop_pwr = self.gsm_sweep_params[1]
+        self.offset = self.gsm_sweep_params[2]
+        self.gnss_wwan_blanking_sweep_base()
+
+    def test_gnss_gsm1800_sweep(self):
+        """
+        GNSS WWAN blanking cellular power sweep GSM1800, Ch699.
+        """
+        self.eecoex_func = 'CELLR,2,1800,699,1,1,{}'
+        self.start_pwr = self.gsm_sweep_params[0]
+        self.stop_pwr = self.gsm_sweep_params[1]
+        self.offset = self.gsm_sweep_params[2]
+        self.gnss_wwan_blanking_sweep_base()
+
+    def test_gnss_gsm1900_sweep(self):
+        """
+        GNSS WWAN blanking cellular power sweep GSM1900, Ch661.
+        """
+        self.eecoex_func = 'CELLR,2,1900,661,1,1,{}'
+        self.start_pwr = self.gsm_sweep_params[0]
+        self.stop_pwr = self.gsm_sweep_params[1]
+        self.offset = self.gsm_sweep_params[2]
+        self.gnss_wwan_blanking_sweep_base()
+
+    def test_gnss_lte_b38_sweep(self):
+        """
+        GNSS WWAN blanking cellular power sweep LTE-TDD, B38, 10M, 12RB@0, Ch38000.
+        """
+        self.eecoex_func = 'CELLR,5,38,38000,true,PRIMARY,{},10MHz,0,12'
+        self.start_pwr = self.lte_tdd_pc3_sweep_params[0]
+        self.stop_pwr = self.lte_tdd_pc3_sweep_params[1]
+        self.offset = self.lte_tdd_pc3_sweep_params[2]
+        self.gnss_wwan_blanking_sweep_base()
+
+    def test_gnss_lte_b39_sweep(self):
+        """
+        GNSS WWAN blanking cellular power sweep LTE-TDD, B39, 10M, 12RB@0, Ch38450.
+        """
+        self.eecoex_func = 'CELLR,5,39,38450,true,PRIMARY,{},10MHz,0,12'
+        self.start_pwr = self.lte_tdd_pc3_sweep_params[0]
+        self.stop_pwr = self.lte_tdd_pc3_sweep_params[1]
+        self.offset = self.lte_tdd_pc3_sweep_params[2]
+        self.gnss_wwan_blanking_sweep_base()
+
+    def test_gnss_lte_b40_sweep(self):
+        """
+        GNSS WWAN blanking cellular power sweep LTE-TDD, B40, 10M, 12RB@0, Ch39150.
+        """
+        self.eecoex_func = 'CELLR,5,40,39150,true,PRIMARY,{},10MHz,0,12'
+        self.start_pwr = self.lte_tdd_pc3_sweep_params[0]
+        self.stop_pwr = self.lte_tdd_pc3_sweep_params[1]
+        self.offset = self.lte_tdd_pc3_sweep_params[2]
+        self.gnss_wwan_blanking_sweep_base()
+
+    def test_gnss_lte_b41_sweep(self):
+        """
+        GNSS WWAN blanking cellular power sweep LTE-TDD, B41, 10M, 12RB@0, Ch40620.
+        """
+        self.eecoex_func = 'CELLR,5,41,40620,true,PRIMARY,{},10MHz,0,12'
+        self.start_pwr = self.lte_tdd_pc3_sweep_params[0]
+        self.stop_pwr = self.lte_tdd_pc3_sweep_params[1]
+        self.offset = self.lte_tdd_pc3_sweep_params[2]
+        self.gnss_wwan_blanking_sweep_base()
+
+    def test_gnss_lte_b42_sweep(self):
+        """
+        GNSS WWAN blanking cellular power sweep LTE-TDD, B42, 10M, 12RB@0, Ch42590.
+        """
+        self.eecoex_func = 'CELLR,5,42,42590,true,PRIMARY,{},10MHz,0,12'
+        self.start_pwr = self.lte_tdd_pc3_sweep_params[0]
+        self.stop_pwr = self.lte_tdd_pc3_sweep_params[1]
+        self.offset = self.lte_tdd_pc3_sweep_params[2]
+        self.gnss_wwan_blanking_sweep_base()
+
+    def test_gnss_lte_b48_sweep(self):
+        """
+        GNSS WWAN blanking cellular power sweep LTE-TDD, B48, 10M, 12RB@0, Ch55990.
+        """
+        self.eecoex_func = 'CELLR,5,48,55990,true,PRIMARY,{},10MHz,0,12'
+        self.start_pwr = self.lte_tdd_pc3_sweep_params[0]
+        self.stop_pwr = self.lte_tdd_pc3_sweep_params[1]
+        self.offset = self.lte_tdd_pc3_sweep_params[2]
+        self.gnss_wwan_blanking_sweep_base()
+
+    def test_gnss_stand_alone_gnss(self):
+        """
+        GNSS stand alone test item.
+        """
+        self.eecoex_func = ''
+        self.start_pwr = 0
+        self.stop_pwr = 0
+        self.offset = 0
+        self.gnss_wwan_blanking_sweep_base()
diff --git a/acts_tests/tests/google/gnss/GnssConcurrencyTest.py b/acts_tests/tests/google/gnss/GnssConcurrencyTest.py
index c03ea08..9169f4e 100644
--- a/acts_tests/tests/google/gnss/GnssConcurrencyTest.py
+++ b/acts_tests/tests/google/gnss/GnssConcurrencyTest.py
@@ -16,9 +16,11 @@
 
 import time
 import datetime
+import re
 from acts import utils
 from acts import signals
 from acts.base_test import BaseTestClass
+from acts.test_decorators import test_tracker_info
 from acts_contrib.test_utils.tel.tel_logging_utils import start_adb_tcpdump
 from acts_contrib.test_utils.tel.tel_logging_utils import stop_adb_tcpdump
 from acts_contrib.test_utils.tel.tel_logging_utils import get_tcpdump_log
@@ -39,12 +41,17 @@
         self.ad = self.android_devices[0]
         req_params = [
             "standalone_cs_criteria", "chre_tolerate_rate", "qdsp6m_path",
-            "outlier_criteria", "max_outliers"
+            "outlier_criteria", "max_outliers", "pixel_lab_location",
+            "max_interval", "onchip_interval"
         ]
         self.unpack_userparams(req_param_names=req_params)
         gutils._init_device(self.ad)
+        self.ad.adb.shell("setprop persist.vendor.radio.adb_log_on 0")
+        self.ad.adb.shell("sync")
+        gutils.reboot(self.ad)
 
     def setup_test(self):
+        gutils.clear_logd_gnss_qxdm_log(self.ad)
         gutils.start_pixel_logger(self.ad)
         start_adb_tcpdump(self.ad)
         # related properties
@@ -61,6 +68,11 @@
         gutils.get_gnss_qxdm_log(self.ad, self.qdsp6m_path)
         get_tcpdump_log(self.ad, test_name, begin_time)
 
+    def is_brcm_test(self):
+        """ Check the test is for BRCM and skip if not. """
+        if gutils.check_chipset_vendor_by_qualcomm(self.ad):
+            raise signals.TestSkip("Not BRCM chipset. Skip the test.")
+
     def load_chre_nanoapp(self):
         """ Load CHRE nanoapp to target Android Device. """
         for _ in range(0, 3):
@@ -76,7 +88,7 @@
         else:
             raise signals.TestError("Failed to load CHRE nanoapp")
 
-    def enable_gnss_concurrency(self, freq):
+    def enable_chre(self, freq):
         """ Enable or disable gnss concurrency via nanoapp.
 
         Args:
@@ -90,32 +102,14 @@
             if "ap" not in type:
                 self.ad.adb.shell(" ".join([cmd, type, option]))
 
-    def run_concurrency_test(self, ap_freq, chre_freq, test_time):
-        """ Run the concurrency test with specific sequence.
-
-        Args:
-            ap_freq: int for AP side location request frequency.
-            chre_freq: int forCHRE side location request frequency.
-            test_time: int for test duration.
-        Return: test begin time.
-        """
-        gutils.process_gnss_by_gtw_gpstool(self.ad, self.standalone_cs_criteria)
-        begin_time = utils.get_current_epoch_time()
-        gutils.start_gnss_by_gtw_gpstool(self.ad, True, freq=ap_freq)
-        self.enable_gnss_concurrency(chre_freq)
-        time.sleep(test_time)
-        self.enable_gnss_concurrency(0)
-        gutils.start_gnss_by_gtw_gpstool(self.ad, False)
-        return begin_time
-
     def parse_concurrency_result(self, begin_time, type, criteria):
         """ Parse the test result with given time and criteria.
 
         Args:
             begin_time: test begin time.
             type: str for location request type.
-            criteria: int for test criteria.
-        Return: List for the failure and outlier loops.
+            criteria: dictionary for test criteria.
+        Return: List for the failure and outlier loops and results.
         """
         results = []
         failures = []
@@ -125,6 +119,9 @@
         start_time = utils.epoch_to_human_time(begin_time)
         start_time = datetime.datetime.strptime(start_time,
                                                 "%m-%d-%Y %H:%M:%S ")
+        if not search_results:
+            raise signals.TestFailure(f"No log entry found for keyword:"
+                                      f"{CONCURRENCY_TYPE[type]}")
         results.append(
             (search_results[0]["datetime_obj"] - start_time).total_seconds())
         samples = len(search_results) - 1
@@ -151,53 +148,193 @@
         self.ad.log.info("TestResult %s_max_time %.2f" %
                          (type, max(results[1:])))
 
-        return outliers, failures
+        return outliers, failures, results
 
-    def execute_gnss_concurrency_test(self, criteria, test_duration):
+    def run_gnss_concurrency_test(self, criteria, test_duration):
         """ Execute GNSS concurrency test steps.
 
         Args:
             criteria: int for test criteria.
             test_duration: int for test duration.
         """
-        failures = {}
-        outliers = {}
-        begin_time = self.run_concurrency_test(criteria["ap_location"],
-                                               criteria["gnss"], test_duration)
-        for type in CONCURRENCY_TYPE.keys():
-            self.ad.log.info("Starting process %s result" % type)
-            outliers[type], failures[type] = self.parse_concurrency_result(
-                begin_time, type, criteria[type])
-        for type in CONCURRENCY_TYPE.keys():
-            if len(failures[type]) > 0:
-                raise signals.TestFailure("Test exceeds criteria: %.2f" %
-                                          criteria[type])
-            elif len(outliers[type]) > self.max_outliers:
-                raise signals.TestFailure("Outliers excceds max amount: %d" %
-                                          len(outliers[type]))
+        begin_time = utils.get_current_epoch_time()
+        self.ad.log.info("Tests Start at %s" %
+                         utils.epoch_to_human_time(begin_time))
+        gutils.start_gnss_by_gtw_gpstool(
+            self.ad, True, freq=criteria["ap_location"])
+        self.enable_chre(criteria["gnss"])
+        time.sleep(test_duration)
+        self.enable_chre(0)
+        gutils.start_gnss_by_gtw_gpstool(self.ad, False)
+        self.validate_location_test_result(begin_time, criteria)
 
-    # Test Cases
+    def run_chre_only_test(self, criteria, test_duration):
+        """ Execute CHRE only test steps.
+
+        Args:
+            criteria: int for test criteria.
+            test_duration: int for test duration.
+        """
+        begin_time = utils.get_current_epoch_time()
+        self.ad.log.info("Tests Start at %s" %
+                         utils.epoch_to_human_time(begin_time))
+        self.enable_chre(criteria["gnss"])
+        time.sleep(test_duration)
+        self.enable_chre(0)
+        self.validate_location_test_result(begin_time, criteria)
+
+    def validate_location_test_result(self, begin_time, request):
+        """ Validate GNSS concurrency/CHRE test results.
+
+        Args:
+            begin_time: epoc of test begin time
+            request: int for test criteria.
+        """
+        results = {}
+        outliers = {}
+        failures = {}
+        failure_log = ""
+        for request_type, criteria in request.items():
+            criteria = criteria if criteria > 1 else 1
+            self.ad.log.info("Starting process %s result" % request_type)
+            outliers[request_type], failures[request_type], results[
+                request_type] = self.parse_concurrency_result(
+                    begin_time, request_type, criteria)
+            if not results[request_type]:
+                failure_log += "[%s] Fail to find location report.\n" % request_type
+            if len(failures[request_type]) > 0:
+                failure_log += "[%s] Test exceeds criteria: %.2f\n" % (
+                    request_type, criteria)
+            if len(outliers[request_type]) > self.max_outliers:
+                failure_log += "[%s] Outliers excceds max amount: %d\n" % (
+                    request_type, len(outliers[request_type]))
+
+        if failure_log:
+            raise signals.TestFailure(failure_log)
+
+    def run_engine_switching_test(self, freq):
+        """ Conduct engine switching test with given frequency.
+
+        Args:
+            freq: a list identify source1/2 frequency [freq1, freq2]
+        """
+        request = {"ap_location": self.max_interval}
+        begin_time = utils.get_current_epoch_time()
+        self.ad.droid.startLocating(freq[0] * 1000, 0)
+        time.sleep(10)
+        for i in range(5):
+            gutils.start_gnss_by_gtw_gpstool(self.ad, True, freq=freq[1])
+            time.sleep(10)
+            gutils.start_gnss_by_gtw_gpstool(self.ad, False)
+        self.ad.droid.stopLocating()
+        self.calculate_position_error(begin_time)
+        self.validate_location_test_result(begin_time, request)
+
+    def calculate_position_error(self, begin_time):
+        """ Calculate the position error for the logcat search results.
+
+        Args:
+            begin_time: test begin time
+        """
+        position_errors = []
+        search_results = self.ad.search_logcat("reportLocation", begin_time)
+        for result in search_results:
+            # search for location like 25.000717,121.455163
+            regex = r"(-?\d{1,5}\.\d{1,10}),\s*(-?\d{1,5}\.\d{1,10})"
+            result = re.search(regex, result["log_message"])
+            if not result:
+                raise ValueError("lat/lon does not found. "
+                                 f"original text: {result['log_message']}")
+            lat = float(result.group(1))
+            lon = float(result.group(2))
+            pe = gutils.calculate_position_error(lat, lon,
+                                                 self.pixel_lab_location)
+            position_errors.append(pe)
+        self.ad.log.info("TestResult max_position_error %.2f" %
+                         max(position_errors))
+
+    # Concurrency Test Cases
+    @test_tracker_info(uuid="9b0daebf-461e-4005-9773-d5d10aaeaaa4")
     def test_gnss_concurrency_ct1(self):
         test_duration = 15
         criteria = {"ap_location": 1, "gnss": 1, "gnss_meas": 1}
-        self.execute_gnss_concurrency_test(criteria, test_duration)
+        self.run_gnss_concurrency_test(criteria, test_duration)
 
+    @test_tracker_info(uuid="f423db2f-12a0-4858-b66f-99e7ca6010c3")
     def test_gnss_concurrency_ct2(self):
         test_duration = 30
         criteria = {"ap_location": 1, "gnss": 8, "gnss_meas": 8}
-        self.execute_gnss_concurrency_test(criteria, test_duration)
+        self.run_gnss_concurrency_test(criteria, test_duration)
 
+    @test_tracker_info(uuid="f72d2df0-f70a-4a11-9f68-2a38f6974454")
     def test_gnss_concurrency_ct3(self):
         test_duration = 60
         criteria = {"ap_location": 15, "gnss": 8, "gnss_meas": 8}
-        self.execute_gnss_concurrency_test(criteria, test_duration)
+        self.run_gnss_concurrency_test(criteria, test_duration)
 
+    @test_tracker_info(uuid="8e5563fd-afcd-40d3-9392-7fc0d10f49da")
     def test_gnss_concurrency_aoc1(self):
         test_duration = 120
         criteria = {"ap_location": 61, "gnss": 1, "gnss_meas": 1}
-        self.execute_gnss_concurrency_test(criteria, test_duration)
+        self.run_gnss_concurrency_test(criteria, test_duration)
 
+    @test_tracker_info(uuid="fb258565-6ac8-4bf7-a554-01d63fc4ef54")
     def test_gnss_concurrency_aoc2(self):
         test_duration = 120
         criteria = {"ap_location": 61, "gnss": 10, "gnss_meas": 10}
-        self.execute_gnss_concurrency_test(criteria, test_duration)
+        self.run_gnss_concurrency_test(criteria, test_duration)
+
+    # CHRE Only Test Cases
+    @test_tracker_info(uuid="cb85fa60-9f1a-4957-b5e3-0f2e5db70b47")
+    def test_gnss_chre1(self):
+        test_duration = 15
+        criteria = {"gnss": 1, "gnss_meas": 1}
+        self.run_chre_only_test(criteria, test_duration)
+
+    @test_tracker_info(uuid="6ab17866-0d0e-4d9e-b3af-441d9db0e324")
+    def test_gnss_chre2(self):
+        test_duration = 30
+        criteria = {"gnss": 8, "gnss_meas": 8}
+        self.run_chre_only_test(criteria, test_duration)
+
+    # Interval tests
+    @test_tracker_info(uuid="53b161e5-335e-44a7-ae2e-eae7464a2b37")
+    def test_variable_interval_via_chre(self):
+        test_duration = 10
+        intervals = [{
+            "gnss": 0.1,
+            "gnss_meas": 0.1
+        }, {
+            "gnss": 0.5,
+            "gnss_meas": 0.5
+        }, {
+            "gnss": 1.5,
+            "gnss_meas": 1.5
+        }]
+        for interval in intervals:
+            self.run_chre_only_test(interval, test_duration)
+
+    @test_tracker_info(uuid="ee0a46fe-aa5f-4dfd-9cb7-d4924f9e9cea")
+    def test_variable_interval_via_framework(self):
+        test_duration = 10
+        intervals = [0, 0.5, 1.5]
+        for interval in intervals:
+            begin_time = utils.get_current_epoch_time()
+            self.ad.droid.startLocating(interval * 1000, 0)
+            time.sleep(test_duration)
+            self.ad.droid.stopLocating()
+            criteria = interval if interval > 1 else 1
+            self.parse_concurrency_result(begin_time, "ap_location", criteria)
+
+    # Engine switching test
+    @test_tracker_info(uuid="8b42bcb2-cb8c-4ef9-bd98-4fb74a521224")
+    def test_gps_engine_switching_host_to_onchip(self):
+        self.is_brcm_test()
+        freq = [1, self.onchip_interval]
+        self.run_engine_switching_test(freq)
+
+    @test_tracker_info(uuid="636041dc-2bd6-4854-aa5d-61c87943d99c")
+    def test_gps_engine_switching_onchip_to_host(self):
+        self.is_brcm_test()
+        freq = [self.onchip_interval, 1]
+        self.run_engine_switching_test(freq)
diff --git a/acts_tests/tests/google/gnss/GnssFunctionTest.py b/acts_tests/tests/google/gnss/GnssFunctionTest.py
index 706acca..d45a997 100644
--- a/acts_tests/tests/google/gnss/GnssFunctionTest.py
+++ b/acts_tests/tests/google/gnss/GnssFunctionTest.py
@@ -19,7 +19,6 @@
 import fnmatch
 from multiprocessing import Process
 
-from acts import utils
 from acts import asserts
 from acts import signals
 from acts.base_test import BaseTestClass
@@ -29,18 +28,13 @@
 from acts_contrib.test_utils.gnss import gnss_test_utils as gutils
 from acts.utils import get_current_epoch_time
 from acts.utils import unzip_maintain_permissions
-from acts.utils import force_airplane_mode
 from acts_contrib.test_utils.wifi.wifi_test_utils import wifi_toggle_state
-from acts_contrib.test_utils.tel.tel_test_utils import flash_radio
+from acts_contrib.test_utils.tel.tel_bootloader_utils import flash_radio
 from acts_contrib.test_utils.tel.tel_test_utils import verify_internet_connection
-from acts_contrib.test_utils.tel.tel_test_utils import abort_all_tests
-from acts_contrib.test_utils.tel.tel_test_utils import stop_qxdm_logger
 from acts_contrib.test_utils.tel.tel_test_utils import check_call_state_connected_by_adb
 from acts_contrib.test_utils.tel.tel_voice_utils import initiate_call
 from acts_contrib.test_utils.tel.tel_voice_utils import hangup_call
-from acts_contrib.test_utils.tel.tel_test_utils import http_file_download_by_sl4a
-from acts_contrib.test_utils.tel.tel_test_utils import start_qxdm_logger
-from acts_contrib.test_utils.tel.tel_test_utils import trigger_modem_crash
+from acts_contrib.test_utils.tel.tel_data_utils import http_file_download_by_sl4a
 from acts_contrib.test_utils.gnss.gnss_test_utils import get_baseband_and_gms_version
 from acts_contrib.test_utils.gnss.gnss_test_utils import set_attenuator_gnss_signal
 from acts_contrib.test_utils.gnss.gnss_test_utils import _init_device
@@ -63,7 +57,6 @@
 from acts_contrib.test_utils.gnss.gnss_test_utils import check_ttff_data
 from acts_contrib.test_utils.gnss.gnss_test_utils import start_youtube_video
 from acts_contrib.test_utils.gnss.gnss_test_utils import fastboot_factory_reset
-from acts_contrib.test_utils.gnss.gnss_test_utils import gnss_trigger_modem_ssr_by_adb
 from acts_contrib.test_utils.gnss.gnss_test_utils import gnss_trigger_modem_ssr_by_mds
 from acts_contrib.test_utils.gnss.gnss_test_utils import disable_supl_mode
 from acts_contrib.test_utils.gnss.gnss_test_utils import connect_to_wifi_network
@@ -73,9 +66,13 @@
 from acts_contrib.test_utils.gnss.gnss_test_utils import enable_supl_mode
 from acts_contrib.test_utils.gnss.gnss_test_utils import start_toggle_gnss_by_gtw_gpstool
 from acts_contrib.test_utils.gnss.gnss_test_utils import grant_location_permission
-from acts_contrib.test_utils.tel.tel_test_utils import start_adb_tcpdump
-from acts_contrib.test_utils.tel.tel_test_utils import stop_adb_tcpdump
-from acts_contrib.test_utils.tel.tel_test_utils import get_tcpdump_log
+from acts_contrib.test_utils.gnss.gnss_test_utils import is_mobile_data_on
+from acts_contrib.test_utils.gnss.gnss_test_utils import is_wearable_btwifi
+from acts_contrib.test_utils.gnss.gnss_test_utils import delete_lto_file
+from acts_contrib.test_utils.gnss.gnss_test_utils import is_device_wearable
+from acts_contrib.test_utils.tel.tel_logging_utils import start_adb_tcpdump
+from acts_contrib.test_utils.tel.tel_logging_utils import stop_adb_tcpdump
+from acts_contrib.test_utils.tel.tel_logging_utils import get_tcpdump_log
 
 
 class GnssFunctionTest(BaseTestClass):
@@ -94,13 +91,14 @@
                       "weak_signal_xtra_cs_criteria",
                       "weak_signal_xtra_ws_criteria",
                       "weak_signal_xtra_hs_criteria",
+                      "wearable_reboot_hs_criteria",
                       "default_gnss_signal_attenuation",
                       "weak_gnss_signal_attenuation",
                       "no_gnss_signal_attenuation", "gnss_init_error_list",
                       "gnss_init_error_allowlist", "pixel_lab_location",
-                      "legacy_wifi_xtra_cs_criteria", "legacy_projects",
                       "qdsp6m_path", "supl_capabilities", "ttff_test_cycle",
-                      "collect_logs", "dpo_threshold"]
+                      "collect_logs", "dpo_threshold",
+                      "brcm_error_log_allowlist"]
         self.unpack_userparams(req_param_names=req_params)
         # create hashmap for SSID
         self.ssid_map = {}
@@ -109,11 +107,8 @@
             self.ssid_map[SSID] = network
         self.ttff_mode = {"cs": "Cold Start",
                           "ws": "Warm Start",
-                          "hs": "Hot Start"}
-        if self.ad.model in self.legacy_projects:
-            self.wifi_xtra_cs_criteria = self.legacy_wifi_xtra_cs_criteria
-        else:
-            self.wifi_xtra_cs_criteria = self.xtra_cs_criteria
+                          "hs": "Hot Start",
+                          "csa": "CSWith Assist"}
         if self.collect_logs and \
             gutils.check_chipset_vendor_by_qualcomm(self.ad):
             self.flash_new_radio_or_mbn()
@@ -126,6 +121,11 @@
             clear_logd_gnss_qxdm_log(self.ad)
             set_attenuator_gnss_signal(self.ad, self.attenuators,
                                        self.default_gnss_signal_attenuation)
+        # TODO (b/202101058:chenstanley): Need to double check how to disable wifi successfully in wearable projects.
+        if is_wearable_btwifi(self.ad):
+            wifi_toggle_state(self.ad, True)
+            connect_to_wifi_network(
+            self.ad, self.ssid_map[self.pixel_lab_network[0]["SSID"]])
         if not verify_internet_connection(self.ad.log, self.ad, retries=3,
                                           expected_state=True):
             raise signals.TestFailure("Fail to connect to LTE network.")
@@ -136,18 +136,23 @@
             stop_adb_tcpdump(self.ad)
             set_attenuator_gnss_signal(self.ad, self.attenuators,
                                        self.default_gnss_signal_attenuation)
-        if check_call_state_connected_by_adb(self.ad):
-            hangup_call(self.ad.log, self.ad)
-        if int(self.ad.adb.shell("settings get global airplane_mode_on")) != 0:
+        # TODO(chenstanley): sim structure issue
+        if not is_device_wearable(self.ad):
+            if check_call_state_connected_by_adb(self.ad):
+                hangup_call(self.ad.log, self.ad)
+        if self.ad.droid.connectivityCheckAirplaneMode():
             self.ad.log.info("Force airplane mode off")
-            force_airplane_mode(self.ad, False)
-        if self.ad.droid.wifiCheckState():
+            self.ad.droid.connectivityToggleAirplaneMode(False)
+        if not is_wearable_btwifi and self.ad.droid.wifiCheckState():
             wifi_toggle_state(self.ad, False)
-        if int(self.ad.adb.shell("settings get global mobile_data")) != 1:
+        if not is_mobile_data_on(self.ad):
             set_mobile_data(self.ad, True)
         if int(self.ad.adb.shell(
             "settings get global wifi_scan_always_enabled")) != 1:
             set_wifi_and_bt_scanning(self.ad, True)
+        if not verify_internet_connection(self.ad.log, self.ad, retries=3,
+                                          expected_state=True):
+            raise signals.TestFailure("Fail to connect to LTE network.")
 
     def on_fail(self, test_name, begin_time):
         if self.collect_logs:
@@ -287,7 +292,7 @@
         """
         self.start_qxdm_and_tcpdump_log()
         self.ad.log.info("Turn airplane mode on")
-        force_airplane_mode(self.ad, True)
+        self.ad.droid.connectivityToggleAirplaneMode(True)
         self.run_ttff_via_gtw_gpstool(mode, criteria)
 
     def supl_ttff_weak_gnss_signal(self, mode, criteria):
@@ -337,12 +342,37 @@
         disable_supl_mode(self.ad)
         self.start_qxdm_and_tcpdump_log()
         self.ad.log.info("Turn airplane mode on")
-        force_airplane_mode(self.ad, True)
+        self.ad.droid.connectivityToggleAirplaneMode(True)
         wifi_toggle_state(self.ad, True)
         connect_to_wifi_network(
             self.ad, self.ssid_map[self.pixel_lab_network[0]["SSID"]])
         self.run_ttff_via_gtw_gpstool(mode, criteria)
 
+    def ttff_with_assist(self, mode, criteria):
+        """Verify CS/WS TTFF functionality with Assist data.
+
+        Args:
+            mode: "csa" or "ws"
+            criteria: Criteria for the test.
+        """
+        disable_supl_mode(self.ad)
+        begin_time = get_current_epoch_time()
+        process_gnss_by_gtw_gpstool(
+            self.ad, self.standalone_cs_criteria)
+        check_xtra_download(self.ad, begin_time)
+        self.ad.log.info("Turn airplane mode on")
+        self.ad.droid.connectivityToggleAirplaneMode(True)
+        start_gnss_by_gtw_gpstool(self.ad, True)
+        start_ttff_by_gtw_gpstool(
+            self.ad, mode, iteration=self.ttff_test_cycle)
+        ttff_data = process_ttff_by_gtw_gpstool(
+            self.ad, begin_time, self.pixel_lab_location)
+        result = check_ttff_data(
+            self.ad, ttff_data, mode, criteria)
+        asserts.assert_true(
+            result, "TTFF %s fails to reach designated criteria of %d "
+                    "seconds." % (self.ttff_mode.get(mode), criteria))
+
     """ Test Cases """
 
     @test_tracker_info(uuid="ab859f2a-2c95-4d15-bb7f-bd0e3278340f")
@@ -383,26 +413,14 @@
                                       type="gnss",
                                       testtime=tracking_minutes,
                                       meas_flag=True)
-        dpo_results = self.ad.search_logcat("HardwareClockDiscontinuityCount",
-                                            dpo_begin_time)
-        if not dpo_results:
-            raise signals.TestError(
-                "No \"HardwareClockDiscontinuityCount\" is found in logs.")
-        self.ad.log.info(dpo_results[0]["log_message"])
-        self.ad.log.info(dpo_results[-1]["log_message"])
-        first_dpo_count = int(dpo_results[0]["log_message"].split()[-1])
-        final_dpo_count = int(dpo_results[-1]["log_message"].split()[-1])
-        dpo_rate = ((final_dpo_count - first_dpo_count)/(tracking_minutes * 60))
-        dpo_engage_rate = "{percent:.2%}".format(percent=dpo_rate)
-        self.ad.log.info("DPO is ON for %d seconds during %d minutes test." % (
-            final_dpo_count - first_dpo_count, tracking_minutes))
-        self.ad.log.info("TestResult DPO_Engage_Rate " + dpo_engage_rate)
-        threshold = "{percent:.0%}".format(percent=self.dpo_threshold / 100)
-        asserts.assert_true(dpo_rate * 100 > self.dpo_threshold,
-                            "DPO only engaged %s in %d minutes test with "
-                            "threshold %s." % (dpo_engage_rate,
-                                               tracking_minutes,
-                                               threshold))
+        if gutils.check_chipset_vendor_by_qualcomm(self.ad):
+            gutils.check_dpo_rate_via_gnss_meas(self.ad,
+                                                dpo_begin_time,
+                                                self.dpo_threshold)
+        else:
+            gutils.check_dpo_rate_via_brcm_log(self.ad,
+                                               self.dpo_threshold,
+                                               self.brcm_error_log_allowlist)
 
     @test_tracker_info(uuid="499d2091-640a-4735-9c58-de67370e4421")
     def test_gnss_init_error(self):
@@ -936,7 +954,7 @@
             All SUPL TTFF Cold Start results should be within supl_cs_criteria.
         """
         for times in range(1, 4):
-            fastboot_factory_reset(self.ad)
+            fastboot_factory_reset(self.ad, True)
             self.ad.unlock_screen(password=None)
             _init_device(self.ad)
             begin_time = get_current_epoch_time()
@@ -1052,9 +1070,9 @@
 
         Expected Results:
             XTRA/LTO TTFF Cold Start results should be within
-            wifi_xtra_cs_criteria.
+            xtra_cs_criteria.
         """
-        self.xtra_ttff_wifi("cs", self.wifi_xtra_cs_criteria)
+        self.xtra_ttff_wifi("cs", self.xtra_cs_criteria)
 
     @test_tracker_info(uuid="f6e79b31-99d5-49ca-974f-4543957ea449")
     def test_xtra_ttff_ws_wifi(self):
@@ -1171,7 +1189,7 @@
         disable_supl_mode(self.ad)
         self.start_qxdm_and_tcpdump_log()
         self.ad.log.info("Turn airplane mode on")
-        force_airplane_mode(self.ad, True)
+        self.ad.droid.connectivityToggleAirplaneMode(True)
         wifi_toggle_state(self.ad, True)
         connect_to_wifi_network(
             self.ad, self.ssid_map[self.pixel_lab_network[0]["SSID"]])
@@ -1283,12 +1301,80 @@
         start_gnss_by_gtw_gpstool(self.ad, False)
         for test_loop in range(1, 11):
             reboot(self.ad)
-            test_result = process_gnss_by_gtw_gpstool(
-                self.ad, self.supl_hs_criteria, clear_data=False)
+            self.start_qxdm_and_tcpdump_log()
+            if is_device_wearable(self.ad):
+                test_result = process_gnss_by_gtw_gpstool(
+                    self.ad, self.wearable_reboot_hs_criteria, clear_data=False)
+            else:
+                test_result = process_gnss_by_gtw_gpstool(
+                    self.ad, self.supl_hs_criteria, clear_data=False)
             start_gnss_by_gtw_gpstool(self.ad, False)
             self.ad.log.info("Iteration %d => %s" % (test_loop, test_result))
             overall_test_result.append(test_result)
+            gutils.stop_pixel_logger(self.ad)
+            stop_adb_tcpdump(self.ad)
         pass_rate = overall_test_result.count(True)/len(overall_test_result)
         self.ad.log.info("TestResult Pass_rate %s" % format(pass_rate, ".0%"))
         asserts.assert_true(all(overall_test_result),
                             "GNSS init fail after reboot.")
+
+    @test_tracker_info(uuid="2c62183a-4354-4efc-92f2-84580cbd3398")
+    def test_lto_download_after_reboot(self):
+        """Verify LTO data could be downloaded and injected after device reboot.
+
+        Steps:
+            1. Reboot device.
+            2. Verify whether LTO is auto downloaded and injected without trigger GPS.
+            3. Repeat Step 1 to Step 2 for 5 times.
+
+        Expected Results:
+            LTO data is properly downloaded and injected at the first time tether to phone.
+        """
+        reboot_lto_test_results_all = []
+        disable_supl_mode(self.ad)
+        for times in range(1, 6):
+            delete_lto_file(self.ad)
+            reboot(self.ad)
+            self.start_qxdm_and_tcpdump_log()
+            # Wait 20 seconds for boot busy and lto auto-download time
+            time.sleep(20)
+            begin_time = get_current_epoch_time()
+            reboot_lto_test_result = gutils.check_xtra_download(self.ad, begin_time)
+            self.ad.log.info("Iteration %d => %s" % (times, reboot_lto_test_result))
+            reboot_lto_test_results_all.append(reboot_lto_test_result)
+            gutils.stop_pixel_logger(self.ad)
+            tutils.stop_adb_tcpdump(self.ad)
+        asserts.assert_true(all(reboot_lto_test_results_all),
+                                "Fail to Download and Inject LTO File.")
+
+    @test_tracker_info(uuid="a7048a4f-8a40-40a4-bb6c-7fc90e8227bd")
+    def test_ws_with_assist(self):
+        """Verify Warm Start functionality with existed LTO data.
+
+        Steps:
+            1. Disable SUPL mode.
+            2. Make LTO is downloaded.
+            3. Turn on AirPlane mode to make sure there's no network connection.
+            4. TTFF Warm Start with Assist for 10 iteration.
+
+        Expected Results:
+            All TTFF Warm Start with Assist results should be within
+            xtra_ws_criteria.
+        """
+        self.ttff_with_assist("ws", self.xtra_ws_criteria)
+
+    @test_tracker_info(uuid="c5fb9519-63b0-42bd-bd79-fce7593604ea")
+    def test_cs_with_assist(self):
+        """Verify Cold Start functionality with existed LTO data.
+
+        Steps:
+            1. Disable SUPL mode.
+            2. Make sure LTO is downloaded.
+            3. Turn on AirPlane mode to make sure there's no network connection.
+            4. TTFF Cold Start with Assist for 10 iteration.
+
+        Expected Results:
+            All TTFF Cold Start with Assist results should be within
+            standalone_cs_criteria.
+        """
+        self.ttff_with_assist("csa", self.standalone_cs_criteria)
diff --git a/acts_tests/tests/google/gnss/GnssHsSenTest.py b/acts_tests/tests/google/gnss/GnssHsSenTest.py
new file mode 100644
index 0000000..5269ae0
--- /dev/null
+++ b/acts_tests/tests/google/gnss/GnssHsSenTest.py
@@ -0,0 +1,180 @@
+#!/usr/bin/env python3
+#
+#   Copyright 2021 - The Android Open Source Project
+#
+#   Licensed under the Apache License, Version 2.0 (the 'License');
+#   you may not use this file except in compliance with the License.
+#   You may obtain a copy of the License at
+#
+#       http://www.apache.org/licenses/LICENSE-2.0
+#
+#   Unless required by applicable law or agreed to in writing, software
+#   distributed under the License is distributed on an 'AS IS' BASIS,
+#   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#   See the License for the specific language governing permissions and
+#   limitations under the License.
+
+import os
+from acts_contrib.test_utils.gnss.GnssBlankingBase import GnssBlankingBase
+from acts_contrib.test_utils.gnss.dut_log_test_utils import get_gpstool_logs
+from acts_contrib.test_utils.gnss.gnss_test_utils import excute_eecoexer_function
+
+
+class GnssHsSenTest(GnssBlankingBase):
+    """ LAB GNSS Cellular coex hot start sensitivity search"""
+
+    def __init__(self, controllers):
+        super().__init__(controllers)
+        self.gnss_simulator_power_level = -130
+        self.sa_sensitivity = -150
+        self.gnss_pwr_lvl_offset = 5
+
+    def gnss_hot_start_sensitivity_search_base(self, cellular_enable=False):
+        """
+        Perform GNSS hot start sensitivity search.
+
+        Args:
+                cellular_enable: argument to identify if Tx cellular signal is required or not.
+                Type, bool.
+                Default, False.
+        """
+        # Get parameters from user_params.
+        first_wait = self.user_params.get('first_wait', 300)
+        wait_between_pwr = self.user_params.get('wait_between_pwr', 60)
+        gnss_pwr_sweep = self.user_params.get('gnss_pwr_sweep')
+        gnss_init_pwr = gnss_pwr_sweep.get('init')
+        self.gnss_simulator_power_level = gnss_init_pwr[0]
+        self.sa_sensitivity = gnss_init_pwr[1]
+        self.gnss_pwr_lvl_offset = gnss_init_pwr[2]
+        gnss_pwr_fine_sweep = gnss_pwr_sweep.get('fine_sweep')
+        ttft_iteration = self.user_params.get('ttff_iteration', 25)
+
+        # Start the test item with gnss_init_power_setting.
+        if self.gnss_init_power_setting(first_wait):
+            self.log.info('Successfully set the GNSS power level to %d' %
+                          self.sa_sensitivity)
+            # Create gnss log folders for init and cellular sweep
+            gnss_init_log_dir = os.path.join(self.gnss_log_path, 'GNSS_init')
+
+            # Pull all exist GPStool logs into GNSS_init folder
+            get_gpstool_logs(self.dut, gnss_init_log_dir, False)
+
+            if cellular_enable:
+                self.log.info('Start cellular coexistence test.')
+                # Set cellular Tx power level.
+                eecoex_cmd = self.eecoex_func.format('Infinity')
+                eecoex_cmd_file_str = eecoex_cmd.replace(',', '_')
+                excute_eecoexer_function(self.dut, eecoex_cmd)
+            else:
+                self.log.info('Start stand alone test.')
+                eecoex_cmd_file_str = 'Stand_alone'
+
+            for i, gnss_pwr in enumerate(gnss_pwr_fine_sweep):
+                self.log.info('Start fine GNSS power level sweep part %d' %
+                              (i + 1))
+                sweep_start = gnss_pwr[0]
+                sweep_stop = gnss_pwr[1]
+                sweep_offset = gnss_pwr[2]
+                self.log.info(
+                    'The GNSS simulator (start, stop, offset): (%.1f, %.1f, %.1f)'
+                    % (sweep_start, sweep_stop, sweep_offset))
+                result, sensitivity = self.hot_start_gnss_power_sweep(
+                    sweep_start, sweep_stop, sweep_offset, wait_between_pwr,
+                    ttft_iteration, True, eecoex_cmd_file_str)
+                if not result:
+                    break
+            self.log.info('The sensitivity level is: %.1f' % sensitivity)
+
+    def test_hot_start_sensitivity_search(self):
+        """
+        GNSS hot start stand alone sensitivity search.
+        """
+        self.gnss_hot_start_sensitivity_search_base(False)
+
+    def test_hot_start_sensitivity_search_gsm850(self):
+        """
+        GNSS hot start GSM850 Ch190 coexistence sensitivity search.
+        """
+        self.eecoex_func = 'CELLR,2,850,190,1,1,{}'
+        self.log.info('Running GSM850 and GNSS coexistence sensitivity search.')
+        self.gnss_hot_start_sensitivity_search_base(True)
+
+    def test_hot_start_sensitivity_search_gsm900(self):
+        """
+        GNSS hot start GSM900 Ch20 coexistence sensitivity search.
+        """
+        self.eecoex_func = 'CELLR,2,900,20,1,1,{}'
+        self.log.info('Running GSM900 and GNSS coexistence sensitivity search.')
+        self.gnss_hot_start_sensitivity_search_base(True)
+
+    def test_hot_start_sensitivity_search_gsm1800(self):
+        """
+        GNSS hot start GSM1800 Ch699 coexistence sensitivity search.
+        """
+        self.eecoex_func = 'CELLR,2,1800,699,1,1,{}'
+        self.log.info(
+            'Running GSM1800 and GNSS coexistence sensitivity search.')
+        self.gnss_hot_start_sensitivity_search_base(True)
+
+    def test_hot_start_sensitivity_search_gsm1900(self):
+        """
+        GNSS hot start GSM1900 Ch661 coexistence sensitivity search.
+        """
+        self.eecoex_func = 'CELLR,2,1900,661,1,1,{}'
+        self.log.info(
+            'Running GSM1900 and GNSS coexistence sensitivity search.')
+        self.gnss_hot_start_sensitivity_search_base(True)
+
+    def test_hot_start_sensitivity_search_lte_b38(self):
+        """
+        GNSS hot start LTE B38 Ch38000 coexistence sensitivity search.
+        """
+        self.eecoex_func = 'CELLR,5,38,38000,true,PRIMARY,{},10MHz,0,12'
+        self.log.info(
+            'Running LTE B38 and GNSS coexistence sensitivity search.')
+        self.gnss_hot_start_sensitivity_search_base(True)
+
+    def test_hot_start_sensitivity_search_lte_b39(self):
+        """
+        GNSS hot start LTE B39 Ch38450 coexistence sensitivity search.
+        """
+        self.eecoex_func = 'CELLR,5,39,38450,true,PRIMARY,{},10MHz,0,12'
+        self.log.info(
+            'Running LTE B38 and GNSS coexistence sensitivity search.')
+        self.gnss_hot_start_sensitivity_search_base(True)
+
+    def test_hot_start_sensitivity_search_lte_b40(self):
+        """
+        GNSS hot start LTE B40 Ch39150 coexistence sensitivity search.
+        """
+        self.eecoex_func = 'CELLR,5,40,39150,true,PRIMARY,{},10MHz,0,12'
+        self.log.info(
+            'Running LTE B38 and GNSS coexistence sensitivity search.')
+        self.gnss_hot_start_sensitivity_search_base(True)
+
+    def test_hot_start_sensitivity_search_lte_b41(self):
+        """
+        GNSS hot start LTE B41 Ch40620 coexistence sensitivity search.
+        """
+        self.eecoex_func = 'CELLR,5,41,40620,true,PRIMARY,{},10MHz,0,12'
+        self.log.info(
+            'Running LTE B41 and GNSS coexistence sensitivity search.')
+        self.gnss_hot_start_sensitivity_search_base(True)
+
+    def test_hot_start_sensitivity_search_lte_b42(self):
+        """
+        GNSS hot start LTE B42 Ch42590 coexistence sensitivity search.
+        """
+        self.eecoex_func = 'CELLR,5,42,42590,true,PRIMARY,{},10MHz,0,12'
+        self.log.info(
+            'Running LTE B42 and GNSS coexistence sensitivity search.')
+        self.gnss_hot_start_sensitivity_search_base(True)
+
+    def test_hot_start_sensitivity_search_lte_b48(self):
+        """
+        GNSS hot start LTE B48 Ch55990 coexistence sensitivity search.
+        """
+        self.eecoex_func = 'CELLR,5,48,55990,true,PRIMARY,{},10MHz,0,12'
+        self.log.info(
+            'Running LTE B48 and GNSS coexistence sensitivity search.')
+        self.gnss_hot_start_sensitivity_search_base(True)
diff --git a/acts_tests/tests/google/gnss/GnssPowerAGPSTest.py b/acts_tests/tests/google/gnss/GnssPowerAGPSTest.py
index df52d64..15ec3c2 100644
--- a/acts_tests/tests/google/gnss/GnssPowerAGPSTest.py
+++ b/acts_tests/tests/google/gnss/GnssPowerAGPSTest.py
@@ -36,16 +36,17 @@
         self.set_cell_only()
         self.start_gnss_tracking_with_power_data()
 
-    def test_cell_strong_cn_long(self):
-        self.set_cell_only()
-        self.start_gnss_tracking_with_power_data()
-
-    def test_cell_weak_cn_long(self):
-        self.set_attenuation(self.atten_level['weak_signal'])
-        self.set_cell_only()
-        self.start_gnss_tracking_with_power_data()
-
     def test_cell_no_signal(self):
         self.set_attenuation(self.atten_level['no_signal'])
         self.set_cell_only()
         self.start_gnss_tracking_with_power_data(is_signal=False)
+
+    # Long Interval tests
+    def test_cell_strong_cn_long(self):
+        self.set_cell_only()
+        self.start_gnss_tracking_with_power_data(freq=self.interval)
+
+    def test_cell_weak_cn_long(self):
+        self.set_attenuation(self.atten_level['weak_signal'])
+        self.set_cell_only()
+        self.start_gnss_tracking_with_power_data(freq=self.interval)
diff --git a/acts_tests/tests/google/gnss/GnssPowerBasicTest.py b/acts_tests/tests/google/gnss/GnssPowerBasicTest.py
index b74aa96..d4ee545 100644
--- a/acts_tests/tests/google/gnss/GnssPowerBasicTest.py
+++ b/acts_tests/tests/google/gnss/GnssPowerBasicTest.py
@@ -46,7 +46,7 @@
         self.start_gnss_tracking_with_power_data()
 
     # Long Interval tests
-    def test_standalone_DPO_long_strong_cn(self):
+    def test_standalone_DPO_strong_cn_long(self):
         self.start_gnss_tracking_with_power_data(freq=self.interval)
 
     def test_standalone_NDPO_strong_cn_long(self):
diff --git a/acts_tests/tests/google/gnss/GnssPowerFrequecyTest.py b/acts_tests/tests/google/gnss/GnssPowerFrequecyTest.py
new file mode 100644
index 0000000..e24845e
--- /dev/null
+++ b/acts_tests/tests/google/gnss/GnssPowerFrequecyTest.py
@@ -0,0 +1,55 @@
+#!/usr/bin/env python3
+#
+#   Copyright 2021 - The Android Open Source Project
+#
+#   Licensed under the Apache License, Version 2.0 (the "License");
+#   you may not use this file except in compliance with the License.
+#   You may obtain a copy of the License at
+#
+#       http://www.apache.org/licenses/LICENSE-2.0
+#
+#   Unless required by applicable law or agreed to in writing, software
+#   distributed under the License is distributed on an "AS IS" BASIS,
+#   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#   See the License for the specific language governing permissions and
+#   limitations under the License.
+
+from acts import utils
+from acts_contrib.test_utils.power.PowerGTWGnssBaseTest import PowerGTWGnssBaseTest
+
+
+class GnssPowerFrequencyTest(PowerGTWGnssBaseTest):
+    """Gnss Power Low Power Mode Test"""
+
+    # Test cases
+    # L1 only test cases
+    def test_L1_only_strong(self):
+        self.ad.adb.shell('settings put secure location_mode 3')
+        self.set_attenuation(self.atten_level['l1_strong_signal'])
+        self.start_gnss_tracking_with_power_data()
+
+    def test_L1_only_weak(self):
+        self.ad.adb.shell('settings put secure location_mode 3')
+        self.set_attenuation(self.atten_level['l1_weak_signal'])
+        self.start_gnss_tracking_with_power_data()
+
+    # L5 tests
+    def test_L1L5_strong(self):
+        self.ad.adb.shell('settings put secure location_mode 3')
+        self.set_attenuation(self.atten_level['l1l5_strong_signal'])
+        self.start_gnss_tracking_with_power_data()
+
+    def test_L1L5_weak(self):
+        self.ad.adb.shell('settings put secure location_mode 3')
+        self.set_attenuation(self.atten_level['l1l5_weak_signal'])
+        self.start_gnss_tracking_with_power_data()
+
+    def test_L1_weak_L5_strong(self):
+        self.ad.adb.shell('settings put secure location_mode 3')
+        self.set_attenuation(self.atten_level['l1_w_l5_s_signal'])
+        self.start_gnss_tracking_with_power_data()
+
+    def test_L1_strong_L5_weak(self):
+        self.ad.adb.shell('settings put secure location_mode 3')
+        self.set_attenuation(self.atten_level['l1_s_l5_w_signal'])
+        self.start_gnss_tracking_with_power_data()
diff --git a/acts_tests/tests/google/gnss/GnssPowerMeasurementTest.py b/acts_tests/tests/google/gnss/GnssPowerMeasurementTest.py
index 3125263..affd322 100644
--- a/acts_tests/tests/google/gnss/GnssPowerMeasurementTest.py
+++ b/acts_tests/tests/google/gnss/GnssPowerMeasurementTest.py
@@ -31,6 +31,7 @@
         self.start_gnss_tracking_with_power_data(
             mode='standalone', freq=self.meas_interval, meas=True)
 
+    # Long Interval tests
     def test_measurement_DPO_long(self):
         self.start_gnss_tracking_with_power_data(
             mode='standalone', freq=self.interval, meas=True)
diff --git a/acts_tests/tests/google/gnss/GnssSimInventoryTest.py b/acts_tests/tests/google/gnss/GnssSimInventoryTest.py
index cabd82d..801aa85 100644
--- a/acts_tests/tests/google/gnss/GnssSimInventoryTest.py
+++ b/acts_tests/tests/google/gnss/GnssSimInventoryTest.py
@@ -1,6 +1,6 @@
 import time
 import os
-import tempfile
+import re
 
 from acts import utils
 from acts import signals
@@ -15,8 +15,6 @@
     def setup_class(self):
         super().setup_class()
         self.ad = self.android_devices[0]
-        req_params = ["sim_inventory_recipient", "sim_inventory_ldap"]
-        self.unpack_userparams(req_param_names=req_params)
 
     def check_device_status(self):
         if int(self.ad.adb.shell("settings get global airplane_mode_on")) != 0:
@@ -27,34 +25,32 @@
 
     def get_imsi(self):
         self.ad.log.info("Get imsi from netpolicy.xml")
-        tmp_path = tempfile.mkdtemp()
-        self.ad.pull_files("/data/system/netpolicy.xml", tmp_path)
-        netpolicy_path = os.path.join(tmp_path, "netpolicy.xml")
-        with open(netpolicy_path, "r", encoding="utf-8") as file:
-            for line in file.readlines():
-                if "subscriberId" in line:
-                    imsi = line.split(" ")[2].split("=")[-1].strip('"')
-                    return imsi
-        raise signals.TestFailure("Fail to get imsi")
+        try:
+            tmp_imsi = self.ad.adb.shell("cat /data/system/netpolicy.xml")
+            imsi = re.compile(r'(\d{15})').search(tmp_imsi).group(1)
+            return imsi
+        except Exception as e:
+            raise signals.TestFailure("Fail to get imsi : %s" % e)
 
     def get_iccid(self):
         iccid = str(get_iccid_by_adb(self.ad))
         if not isinstance(iccid, int):
             self.ad.log.info("Unable to get iccid via adb. Changed to isub.")
-            iccid = str(self.ad.adb.shell(
-                "dumpsys isub | grep iccid")).split(" ")[4].strip(",")
+            tmp_iccid = self.ad.adb.shell("dumpsys isub | grep iccid")
+            iccid = re.compile(r'(\d{20})').search(tmp_iccid).group(1)
             return iccid
         raise signals.TestFailure("Fail to get iccid")
 
     def test_gnss_sim_inventory(self):
+        sim_inventory_recipient = "0958787507"
         self.check_device_status()
         sms_message = "imsi: %s, iccid: %s, ldap: %s, model: %s, sn: %s" % (
-            self.get_imsi(), self.get_iccid(), self.sim_inventory_ldap,
-            self.ad.model, self.ad.serial)
+            self.get_imsi(), self.get_iccid(), os.getlogin(), self.ad.model,
+            self.ad.serial)
         self.ad.log.info(sms_message)
         try:
             self.ad.log.info("Send SMS by SL4A.")
-            self.ad.droid.smsSendTextMessage(self.sim_inventory_recipient,
+            self.ad.droid.smsSendTextMessage(sim_inventory_recipient,
                                              sms_message, True)
             self.ad.ed.pop_event(EventSmsSentSuccess, 10)
         except Exception as e:
diff --git a/acts_tests/tests/google/gnss/GnssUserBuildBroadcomConfigurationTest.py b/acts_tests/tests/google/gnss/GnssUserBuildBroadcomConfigurationTest.py
new file mode 100644
index 0000000..9f3ccfd
--- /dev/null
+++ b/acts_tests/tests/google/gnss/GnssUserBuildBroadcomConfigurationTest.py
@@ -0,0 +1,419 @@
+"""Make sure the user build configuration is working as expected.
+
+Although we can assume the features should be the same between user and user_debug build,
+the configuration difference between this two build are not tested.
+
+In this test suite, we modify the gps configuration to be the same as user build
+and check if the setting is working.
+For more details, please refer to : go/p22_user_build_verification
+"""
+import os
+import shutil
+import tempfile
+import time
+
+from acts import asserts
+from acts import signals
+from acts.base_test import BaseTestClass
+from acts.controllers.adb_lib.error import AdbCommandError
+from acts.libs.proc.job import TimeoutError
+from acts.test_decorators import test_tracker_info
+from acts_contrib.test_utils.gnss import gnss_test_utils as gutils
+
+
+class GpsConfig:
+    def __init__(self, ad, name) -> None:
+        self.ad = ad
+        self.name = name
+        self.folder = "/vendor/etc/gnss"
+        self.full_path = os.path.join(self.folder, self.name)
+        self.logenabled = "LogEnabled"
+        self._log_enable = "true"
+        self._log_disable = "false"
+
+    def _change_file_content(self, pattern, target):
+        """Modify file via sed command
+
+        command will be sed -i 's/<pattern>/<target>/g' <file_path>
+        Args:
+            pattern: a string will be used as search pattern
+            target: string that will overwrite the matched result
+        """
+        self.ad.adb.remount()
+        command = f"sed -i s/{pattern}/{target}/g {self.full_path}"
+        self.ad.adb.shell(command)
+
+    def _get_setting_value(self, key):
+        """Get setting value from config file
+
+        command is grep <key> self.full_path
+        Args:
+            key: a string will be used as search pattern
+        Returns:
+            string: grep result ("" for no grep result)
+        """
+        command = f"grep {key} {self.full_path}"
+        result = self.ad.adb.shell(command)
+        return result
+
+    def _adjust_log_enable_setting(self, key, enable):
+        """Enable / Disable in self.full_path by setting key = true / false
+        Args:
+            key: The target will be changed
+            enable: True to enable / False to disable
+        """
+        src = self._log_disable if enable else self._log_enable
+        target = self._log_enable if enable else self._log_disable
+        pattern = f"{key}={src}"
+        target = f"{key}={target}"
+        self._change_file_content(pattern, target)
+        result = self._get_setting_value(key)
+        self.ad.log.debug("%s setting: %s", self.name, result)
+
+    def _check_file_exist(self, file_pattern):
+        """use command ls to check if file/dir exists
+        command ls <file_pattern>
+        Args:
+            file_pattern: A string represents the file or dir
+        Returns:
+            bool: True -> file exists / False -> file doesn't exist
+        """
+        command = f"ls {file_pattern}"
+        try:
+            self.ad.adb.shell(command)
+            result = True
+        except AdbCommandError as e:
+            result = False
+        return result
+
+    def enable_diagnostic_log(self):
+        """Set LogEnabled=true in config file
+        In gps.xml it will be LogEnabled=\"true\"
+        """
+        self.ad.log.info("Enable diagnostic log in %s", self.name)
+        self._adjust_log_enable_setting(key=self.logenabled, enable=True)
+
+    def disable_diagnostic_log(self):
+        """Set LogEnabled=false in config file
+        In gps.xml it will be LogEnabled=\"false\"
+        """
+        self.ad.log.info("Disable diagnostic log in %s", self.name)
+        self._adjust_log_enable_setting(key=self.logenabled, enable=False)
+
+
+class ScdConf(GpsConfig):
+    def __init__(self, ad) -> None:
+        super().__init__(ad, "scd.conf")
+
+
+class GpsXml(GpsConfig):
+    def __init__(self, ad) -> None:
+        super().__init__(ad, "gps.xml")
+        self.supllogenable = "SuplLogEnable"
+        self.supl_log = "/data/vendor/gps/suplflow.txt"
+        self._log_enable = "\\\"true\\\""
+        self._log_disable = "\\\"false\\\""
+
+    def enable_supl_log(self):
+        """Set SuplLogEnable=\"true\" in gps.xml"""
+        self.ad.log.info("Enable SUPL logs")
+        self._adjust_log_enable_setting(key=self.supllogenable, enable=True)
+
+    def disable_supl_log(self):
+        """Set SuplLogEnable=\"false\" in gps.xml"""
+        self.ad.log.info("Disable SUPL log")
+        self._adjust_log_enable_setting(key=self.supllogenable, enable=False)
+
+    def remove_supl_logs(self):
+        """Remove /data/vendor/gps/suplflow.txt"""
+        self.ad.log.info("Remove SUPL logs")
+        command = f"rm -f {self.supl_log}"
+        self.ad.adb.shell(command)
+
+    def is_supl_log_file_exist(self):
+        """Check if /data/vendor/gps/suplflow.txt exist
+        Returns:
+            bool: True -> supl log exists / False -> supl log doesn't exist
+        """
+        result = self._check_file_exist(self.supl_log)
+        self.ad.log.debug("Supl file exists?: %s", result)
+        return result
+
+
+class LhdConf(GpsConfig):
+    def __init__(self, ad) -> None:
+        super().__init__(ad, "lhd.conf")
+        self.lhefailsafe = "LheFailSafe"
+        self.lheconsole = "LheConsole"
+        self.lheconsole_hub = self.get_lheconsole_value()
+        self.esw_crash_dump_pattern = self.get_esw_crash_dump_pattern()
+        self.ad.log.info(f"here is {self.esw_crash_dump_pattern}")
+
+    def _adjust_lhe_setting(self, key, enable):
+        """Set lhe setting.
+        Enable - uncomment out the setting
+        Dissable - comment out the setting
+        Args:
+            key: A string will be used as search pattern
+            enable: bool True to enable / False to disable
+        """
+        pattern = f"#\ {key}" if enable else key
+        target = key if enable else f"#\ {key}"
+        self._change_file_content(pattern, target)
+
+    def enable_lhefailsafe(self):
+        """Uncomment out LheFailSafe"""
+        self.ad.log.info("Enable %s", self.lhefailsafe)
+        self._adjust_lhe_setting(key=self.lhefailsafe, enable=True)
+
+    def disable_lhefailsafe(self):
+        """Comment out LheFailSafe"""
+        self.ad.log.info("Disable %s", self.lhefailsafe)
+        self._adjust_lhe_setting(key=self.lhefailsafe, enable=False)
+
+    def enable_lheconsole(self):
+        """Uncomment out LheConsole"""
+        self.ad.log.info("Enable %s", self.lheconsole)
+        self._adjust_lhe_setting(key=self.lheconsole, enable=True)
+
+    def disable_lheconsole(self):
+        """Comment out LheConsole"""
+        self.ad.log.info("Disable %s", self.lheconsole)
+        self._adjust_lhe_setting(key=self.lheconsole, enable=False)
+
+    def get_lhefailsafe_value(self):
+        """Get the LheFailSafe value
+
+        Returns:
+            string: the LheFailSafe value in config
+        Raises:
+            ValueError: No LheFailSafe value
+        """
+        result = self._get_setting_value(self.lhefailsafe)
+        if not result:
+            raise ValueError(("%s should exists in %s", self.lhefailsafe, self.name))
+        result = result.split("=")[1]
+        self.ad.log.debug("%s is %s", self.lhefailsafe, result)
+        return result
+
+    def get_lheconsole_value(self):
+        """Get the LheConsole value
+
+        Returns:
+            string: the LheConsole value in config
+        Raises:
+            ValueError: No LheConsole value
+        """
+        result = self._get_setting_value(self.lheconsole)
+        if not result:
+            raise ValueError(("%s should exists in %s", self.lheconsole, self.name))
+        result = result.split("=")[1]
+        self.ad.log.debug("%s is %s", self.lheconsole, result)
+        return result
+
+    def get_esw_crash_dump_pattern(self):
+        """Get the esw crash dump file pattern
+        The value is set in LheFailSafe, but we need to add wildcard.
+        Returns:
+            string: esw crash dump pattern
+        Raises:
+            ValueError: No LheFailSafe value
+        """
+        value = self.get_lhefailsafe_value()
+        value = value.replace(".txt", "*.txt")
+        self.ad.log.debug("Dump file pattern is %s", value)
+        return value
+
+    def remove_esw_crash_dump_file(self):
+        """Remove crash dump file"""
+        self.ad.log.info("Remove esw crash file")
+        command = f"rm -f {self.esw_crash_dump_pattern}"
+        self.ad.adb.shell(command)
+
+    def trigger_firmware_crash(self):
+        """Send command to LheConsole to trigger firmware crash"""
+        self.ad.log.info("Trigger firmware crash")
+        command = f"echo Lhe:write=0xFFFFFFFF,4 > {self.lheconsole_hub}.toAsic"
+        self.ad.adb.shell(command, timeout=10)
+
+    def is_esw_crash_dump_file_exist(self):
+        """Check if esw_crash_dump_pattern exists
+        Will try 3 times, 1 second interval for each attempt
+        Returns:
+            bool: True -> file exists / False -> file doesn't exist
+        """
+        for attempt in range(1, 4):
+            result = self._check_file_exist(self.esw_crash_dump_pattern)
+            self.ad.log.debug("(Attempt %s)esw dump file exists?: %s", attempt, result)
+            if result:
+                return result
+            time.sleep(1)
+        return False
+
+
+class GnssUserBuildBroadcomConfigurationTest(BaseTestClass):
+    """ GNSS user build configuration Tests on Broadcom device."""
+    def setup_class(self):
+        super().setup_class()
+        self.ad = self.android_devices[0]
+
+        if not gutils.check_chipset_vendor_by_qualcomm(self.ad):
+            gutils._init_device(self.ad)
+            self.gps_config_path = tempfile.mkdtemp()
+            self.gps_xml = GpsXml(self.ad)
+            self.lhd_conf = LhdConf(self.ad)
+            self.scd_conf = ScdConf(self.ad)
+            self.enable_testing_setting()
+            self.backup_gps_config()
+
+    def teardown_class(self):
+        if hasattr(self, "gps_config_path") and os.path.isdir(self.gps_config_path):
+            shutil.rmtree(self.gps_config_path)
+
+    def setup_test(self):
+        if gutils.check_chipset_vendor_by_qualcomm(self.ad):
+            raise signals.TestSkip("Device is Qualcomm, skip the test")
+        gutils.clear_logd_gnss_qxdm_log(self.ad)
+
+    def teardown_test(self):
+        if not gutils.check_chipset_vendor_by_qualcomm(self.ad):
+            self.revert_gps_config()
+            self.ad.reboot()
+
+    def on_fail(self, test_name, begin_time):
+        self.ad.take_bug_report(test_name, begin_time)
+        gutils.get_gnss_qxdm_log(self.ad)
+
+    def enable_testing_setting(self):
+        """Enable setting to the testing target
+        Before backing up config, enable all the testing target
+        To ensure the teardown_test can bring the device back to the desired state
+        """
+        self.set_gps_logenabled(enable=True)
+        self.gps_xml.enable_supl_log()
+        self.lhd_conf.enable_lheconsole()
+        self.lhd_conf.enable_lhefailsafe()
+
+    def backup_gps_config(self):
+        """Copy the gps config
+
+        config file will be copied: gps.xml / lhd.conf / scd.conf
+        """
+        for conf in [self.gps_xml, self.scd_conf, self.lhd_conf]:
+            self.ad.log.debug("Backup %s", conf.full_path)
+            self.ad.adb.pull(conf.full_path, self.gps_config_path)
+
+    def revert_gps_config(self):
+        """Revert the gps config from the one we backup in the setup_class
+
+        config file will be reverted: gps.xml / lhd.conf / scd.conf
+        """
+        self.ad.adb.remount()
+        for conf in [self.gps_xml, self.scd_conf, self.lhd_conf]:
+            file_path = os.path.join(self.gps_config_path, conf.name)
+            self.ad.log.debug("Revert %s", conf.full_path)
+            self.ad.adb.push(file_path, conf.full_path)
+
+    def run_gps_and_capture_log(self):
+        """Enable GPS via gps tool for 15s and capture pixel log"""
+        gutils.start_pixel_logger(self.ad)
+        gutils.start_gnss_by_gtw_gpstool(self.ad, state=True)
+        time.sleep(15)
+        gutils.start_gnss_by_gtw_gpstool(self.ad, state=False)
+        gutils.stop_pixel_logger(self.ad)
+
+    def set_gps_logenabled(self, enable):
+        """Set LogEnabled in gps.xml / lhd.conf / scd.conf
+
+        Args:
+            enable: True to enable / False to disable
+        """
+        if enable:
+            self.gps_xml.enable_diagnostic_log()
+            self.scd_conf.enable_diagnostic_log()
+            self.lhd_conf.enable_diagnostic_log()
+        else:
+            self.gps_xml.disable_diagnostic_log()
+            self.scd_conf.disable_diagnostic_log()
+            self.lhd_conf.disable_diagnostic_log()
+
+    @test_tracker_info(uuid="1dd68d9c-38b0-4fbc-8635-1228c72872ff")
+    def test_gps_logenabled_setting(self):
+        """Verify the LogEnabled setting in gps.xml / scd.conf / lhd.conf
+        Steps:
+            1. default setting is on in user_debug build
+            2. enable gps for 15s
+            3. assert gps log pattern "slog    :" in pixel logger
+            4. disable LogEnabled in all the gps conf
+            5. enable gps for 15s
+            6. assert gps log pattern "slog    :" in pixel logger
+        """
+        self.run_gps_and_capture_log()
+        result, _ = gutils.parse_brcm_nmea_log(self.ad, "slog    :", [])
+        asserts.assert_true(bool(result), "LogEnabled is set to true, but no gps log was found")
+
+        self.set_gps_logenabled(enable=False)
+        gutils.clear_logd_gnss_qxdm_log(self.ad)
+
+        self.run_gps_and_capture_log()
+        result, _ = gutils.parse_brcm_nmea_log(self.ad, "slog    :", [])
+        asserts.assert_false(bool(result), ("LogEnabled is set to False but still found %d slog",
+                                            len(result)))
+
+    @test_tracker_info(uuid="152a12e0-7957-47e0-9ea7-14725254fd1d")
+    def test_gps_supllogenable_setting(self):
+        """Verify SuplLogEnable in gps.xml
+        Steps:
+            1. default setting is on in user_debug build
+            2. remove existing supl log
+            3. enable gps for 15s
+            4. supl log should exist
+            5. disable SuplLogEnable in gps.xml
+            6. remove existing supl log
+            7. enable gps for 15s
+            8. supl log should not exist
+        """
+        def is_supl_log_exist_after_supl_request():
+            self.gps_xml.remove_supl_logs()
+            self.run_gps_and_capture_log()
+            return self.gps_xml.is_supl_log_file_exist()
+
+        result = is_supl_log_exist_after_supl_request()
+        asserts.assert_true(result, "SuplLogEnable is enable, should find supl log file")
+
+        self.gps_xml.disable_supl_log()
+        self.ad.reboot()
+
+        result = is_supl_log_exist_after_supl_request()
+        asserts.assert_false(result, "SuplLogEnable is disable, should not find supl log file")
+
+    @test_tracker_info(uuid="892d0037-8c0c-45b6-bd0f-9e4073d37232")
+    def test_lhe_setting(self):
+        """Verify lhefailsafe / lheconsole setting in lhd.conf
+        Steps:
+            1. both setting is enabled
+            2. trigger firmware crash and check if dump file exist
+            3. disable lhefailsafe
+            4. trigger firmware crash and check if dump file exist
+            5. disable lheconsle
+            6. trigger firmware crash and check if command timeout
+        """
+        def is_dump_file_exist_after_firmware_crash():
+            self.lhd_conf.remove_esw_crash_dump_file()
+            self.lhd_conf.trigger_firmware_crash()
+            return self.lhd_conf.is_esw_crash_dump_file_exist()
+
+        result = is_dump_file_exist_after_firmware_crash()
+        asserts.assert_true(result, "LheFailSafe is enabled, but no crash file was found")
+
+        self.lhd_conf.disable_lhefailsafe()
+        self.ad.reboot()
+
+        result = is_dump_file_exist_after_firmware_crash()
+        asserts.assert_false(result, "LheFailSafe is disabled, but still found crash file")
+
+        self.lhd_conf.disable_lheconsole()
+        self.ad.reboot()
+
+        with asserts.assert_raises(TimeoutError):
+            self.lhd_conf.trigger_firmware_crash()
diff --git a/acts_tests/tests/google/gnss/GnssWearableTetherFunctionTest.py b/acts_tests/tests/google/gnss/GnssWearableTetherFunctionTest.py
new file mode 100644
index 0000000..87a233d
--- /dev/null
+++ b/acts_tests/tests/google/gnss/GnssWearableTetherFunctionTest.py
@@ -0,0 +1,272 @@
+#!/usr/bin/env python3
+#
+#   Copyright 2020 - The Android Open Source Project
+#
+#   Licensed under the Apache License, Version 2.0 (the "License");
+#   you may not use this file except in compliance with the License.
+#   You may obtain a copy of the License at
+#
+#       http://www.apache.org/licenses/LICENSE-2.0
+#
+#   Unless required by applicable law or agreed to in writing, software
+#   distributed under the License is distributed on an "AS IS" BASIS,
+#   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#   See the License for the specific language governing permissions and
+#   limitations under the License.
+import time
+import os
+
+from acts import asserts
+from acts import signals
+from acts.base_test import BaseTestClass
+from acts.test_decorators import test_tracker_info
+from acts_contrib.test_utils.gnss import gnss_test_utils as gutils
+from acts_contrib.test_utils.wifi import wifi_test_utils as wutils
+from acts_contrib.test_utils.tel import tel_logging_utils as tutils
+from acts_contrib.test_utils.tel.tel_test_utils import verify_internet_connection
+from acts.utils import get_current_epoch_time
+from acts_contrib.test_utils.gnss.gnss_test_utils import delete_lto_file, pair_to_wearable
+from acts_contrib.test_utils.gnss.gnss_test_utils import process_gnss_by_gtw_gpstool
+from acts_contrib.test_utils.gnss.gnss_test_utils import start_gnss_by_gtw_gpstool
+from acts_contrib.test_utils.gnss.gnss_test_utils import check_tracking_file
+from uiautomator import Device
+
+
+class GnssWearableTetherFunctionTest(BaseTestClass):
+    """ GNSS Wearable Tether Function Tests"""
+    def setup_class(self):
+        super().setup_class()
+        self.watch = self.android_devices[0]
+        self.phone = self.android_devices[1]
+        self.phone.uia = Device(self.phone.serial)
+        req_params = ["pixel_lab_network", "standalone_cs_criteria",
+                      "flp_ttff_max_threshold", "pixel_lab_location",
+                      "flp_ttff_cycle", "default_gnss_signal_attenuation",
+                      "flp_waiting_time", "tracking_test_time",
+                      "fast_start_criteria" ]
+        self.unpack_userparams(req_param_names=req_params)
+        # create hashmap for SSID
+        self.ssid_map = {}
+        for network in self.pixel_lab_network:
+            SSID = network["SSID"]
+            self.ssid_map[SSID] = network
+        self.ttff_mode = {"cs": "Cold Start",
+                          "ws": "Warm Start",
+                          "hs": "Hot Start"}
+        gutils._init_device(self.watch)
+        pair_to_wearable(self.watch, self.phone)
+
+    def setup_test(self):
+        gutils.get_baseband_and_gms_version(self.watch)
+        gutils.clear_logd_gnss_qxdm_log(self.watch)
+        gutils.clear_logd_gnss_qxdm_log(self.phone)
+        gutils.set_attenuator_gnss_signal(self.watch, self.attenuators,
+                                       self.default_gnss_signal_attenuation)
+        if not gutils.is_mobile_data_on(self.watch):
+            gutils.set_mobile_data(self.watch, True)
+        # TODO (b/202101058:chenstanley): Need to double check how to disable wifi successfully in wearable projects.
+        if gutils.is_wearable_btwifi(self.watch):
+            wutils.wifi_toggle_state(self.watch, True)
+            gutils.connect_to_wifi_network(
+                self.watch, self.ssid_map[self.pixel_lab_network[0]["SSID"]])
+        if not verify_internet_connection(self.watch.log, self.watch, retries=3,
+                                          expected_state=True):
+            raise signals.TestFailure("Fail to connect to LTE or WiFi network.")
+        if not gutils.is_bluetooth_connected(self.watch, self.phone):
+            gutils.pair_to_wearable(self.phone, self.watch)
+
+    def teardown_test(self):
+        gutils.stop_pixel_logger(self.watch)
+        tutils.stop_adb_tcpdump(self.watch)
+        gutils.set_attenuator_gnss_signal(self.watch, self.attenuators,
+                                       self.default_gnss_signal_attenuation)
+
+    def on_fail(self, test_name, begin_time):
+        self.watch.take_bug_report(test_name, begin_time)
+        gutils.get_gnss_qxdm_log(self.watch)
+        tutils.get_tcpdump_log(self.watch, test_name, begin_time)
+
+    def start_qxdm_and_tcpdump_log(self):
+        """Start QXDM and adb tcpdump if collect_logs is True."""
+        gutils.start_pixel_logger(self.watch)
+        tutils.start_adb_tcpdump(self.watch)
+
+    def flp_ttff(self, mode, criteria, location):
+        self.start_qxdm_and_tcpdump_log()
+        start_gnss_by_gtw_gpstool(self.phone, True, type="FLP")
+        time.sleep(self.flp_waiting_time)
+        self.watch.unlock_screen(password=None)
+        begin_time = get_current_epoch_time()
+        process_gnss_by_gtw_gpstool(
+            self.watch, self.standalone_cs_criteria, type="flp")
+        gutils.start_ttff_by_gtw_gpstool(
+            self.watch, mode, iteration=self.flp_ttff_cycle)
+        results = gutils.process_ttff_by_gtw_gpstool(
+            self.watch, begin_time, location, type="flp")
+        gutils.check_ttff_data(self.watch, results, mode, criteria)
+        self.check_location_from_phone()
+        start_gnss_by_gtw_gpstool(self.phone, False, type="FLP")
+
+    def check_location_from_phone(self):
+        watch_file = check_tracking_file(self.watch)
+        phone_file = check_tracking_file(self.phone)
+        return gutils.compare_watch_phone_location(self, watch_file, phone_file)
+
+    """ Test Cases """
+
+    @test_tracker_info(uuid="2c62183a-4354-4efc-92f2-84580cbd3398")
+    def test_lto_download_after_reboot(self):
+        """Verify LTO data could be downloaded and injected after device reboot.
+
+        Steps:
+            1. Reboot device.
+            2. Verify whether LTO is auto downloaded and injected without trigger GPS.
+            3. Repeat Step 1 to Step 2 for 5 times.
+
+        Expected Results:
+            LTO data is properly downloaded and injected at the first time tether to phone.
+        """
+        reboot_lto_test_results_all = []
+        gutils.disable_supl_mode(self.watch)
+        for times in range(1, 6):
+            delete_lto_file(self.watch)
+            gutils.reboot(self.watch)
+            self.start_qxdm_and_tcpdump_log()
+            # Wait 20 seconds for boot busy and lto auto-download time
+            time.sleep(20)
+            begin_time = get_current_epoch_time()
+            reboot_lto_test_result = gutils.check_xtra_download(self.watch, begin_time)
+            self.watch.log.info("Iteration %d => %s" % (times, reboot_lto_test_result))
+            reboot_lto_test_results_all.append(reboot_lto_test_result)
+            gutils.stop_pixel_logger(self.watch)
+            tutils.stop_adb_tcpdump(self.watch)
+        asserts.assert_true(all(reboot_lto_test_results_all),
+                                "Fail to Download and Inject LTO File.")
+
+    @test_tracker_info(uuid="7ed596df-df71-42ca-bdb3-69a3cad81963")
+    def test_flp_ttff_cs(self):
+        """Verify FLP TTFF Cold Start while tether with phone.
+
+        Steps:
+            1. Pair with phone via Bluetooth.
+            2. FLP TTFF Cold Start for 10 iteration.
+            3. Check location source is from Phone.
+
+        Expected Results:
+            1. FLP TTFF Cold Start results should be within
+            flp_ttff_max_threshold.
+            2. Watch uses phone's FLP location.
+        """
+        self.flp_ttff("cs", self.flp_ttff_max_threshold, self.pixel_lab_location)
+
+    @test_tracker_info(uuid="de19617c-1f03-4077-99af-542b300ab4ed")
+    def test_flp_ttff_ws(self):
+        """Verify FLP TTFF Warm Start while tether with phone.
+
+        Steps:
+            1. Pair with phone via Bluetooth.
+            2. FLP TTFF Warm Start for 10 iteration.
+            3. Check location source is from Phone.
+
+        Expected Results:
+            1. FLP TTFF Warm Start results should be within
+            flp_ttff_max_threshold.
+            2. Watch uses phone's FLP location.
+        """
+        self.flp_ttff("ws", self.flp_ttff_max_threshold, self.pixel_lab_location)
+
+    @test_tracker_info(uuid="c58c90ae-9f4a-4619-a9f8-f2f98c930008")
+    def test_flp_ttff_hs(self):
+        """Verify FLP TTFF Hot Start while tether with phone.
+
+        Steps:
+            1. Pair with phone via Bluetooth.
+            2. FLP TTFF Hot Start for 10 iteration.
+            3. Check location source is from Phone.
+
+        Expected Results:
+            1. FLP TTFF Hot Start results should be within
+            flp_ttff_max_threshold.
+            2. Watch uses phone's FLP location.
+        """
+        self.flp_ttff("hs", self.flp_ttff_max_threshold, self.pixel_lab_location)
+
+    @test_tracker_info(uuid="ca955ad3-e2eb-4fde-af2b-3e19abe47792")
+    def test_tracking_during_bt_disconnect_resume(self):
+        """Verify tracking is correct during Bluetooth disconnect and resume.
+
+        Steps:
+            1. Make sure watch Bluetooth is on and in paired status.
+            2. Do 1 min tracking.
+            3. After 1 min tracking, check location source is using phone's FLP.
+            4. Turn off watch Bluetooth, and do 1 min tracking.
+            5. After 1 min tracking, check tracking results.
+            6. Repeat Step 1 to Step 5 for 5 times.
+
+        Expected Results:
+            1. Watch uses phone's FLP location in Bluetooth connect state.
+            2. Tracking results should be within pixel_lab_location criteria.
+        """
+        self.start_qxdm_and_tcpdump_log()
+        for i in range(1, 6):
+            if not self.watch.droid.bluetoothCheckState():
+                self.watch.droid.bluetoothToggleState(True)
+                self.watch.log.info("Turn Bluetooth on")
+                self.watch.log.info("Wait 1 min for Bluetooth auto re-connect")
+                time.sleep(60)
+            if not gutils.is_bluetooth_connect(self.watch, self.phone):
+                raise signals.TestFailure("Fail to connect to device via Bluetooth.")
+            start_gnss_by_gtw_gpstool(self.phone, True, type="FLP")
+            time.sleep(self.flp_waiting_time)
+            start_gnss_by_gtw_gpstool(self.watch, True, type="FLP")
+            time.sleep(self.flp_waiting_time)
+            self.watch.log.info("Wait 1 min for tracking")
+            time.sleep(self.tracking_test_time)
+            if not self.check_location_from_phone():
+                raise signals.TestFailure("Watch is not using phone location")
+            self.watch.droid.bluetoothToggleState(False)
+            self.watch.log.info("Turn off Watch Bluetooth")
+            self.watch.log.info("Wait 1 min for tracking")
+            time.sleep(self.tracking_test_time)
+            if self.check_location_from_phone():
+                raise signals.TestError("Watch should not use phone location")
+            gutils.parse_gtw_gpstool_log(self.watch, self.pixel_lab_location, type="FLP")
+            start_gnss_by_gtw_gpstool(self.phone, False, type="FLP")
+
+    @test_tracker_info(uuid="654a8f1b-f9c6-433e-a21f-59224cce822e")
+    def test_fast_start_first_fix_and_ttff(self):
+        """Verify first fix and TTFF of Fast Start (Warm Start v4) within the criteria
+
+        Steps:
+            1. Pair watch to phone during OOBE.
+            2. Ensure LTO file download in watch.
+            3. Ensure UTC time inject in watch.
+            4. Enable AirPlane mode to untether to phone.
+            5. Open GPSTool to get first fix in LTO and UTC time injected.
+            6. Repeat Step1 ~ Step5 for 5 times.
+            7. After Step6, Warm Start TTFF for 10 iterations.
+
+        Expected Results:
+            1. First fix should be within fast_start_threshold.
+            2. TTFF should be within fast_start_threshold.
+        """
+        for i in range(1,6):
+            self.watch.log.info("First fix of Fast Start - attempts %s" % i)
+            pair_to_wearable(self.watch, self.phone)
+            gutils.enable_framework_log(self.watch)
+            self.start_qxdm_and_tcpdump_log()
+            begin_time = get_current_epoch_time()
+            gutils.check_xtra_download(self.watch, begin_time)
+            gutils.check_inject_time(self.watch)
+            self.watch.log.info("Turn airplane mode on")
+            self.watch.droid.connectivityToggleAirplaneMode(True)
+            self.watch.unlock_screen(password=None)
+            gutils.process_gnss_by_gtw_gpstool(
+                self.watch, self.fast_start_criteria, clear_data=False)
+        gutils.start_ttff_by_gtw_gpstool(
+            self.watch, ttff_mode="ws", iteration=self.ttff_test_cycle)
+        ttff_data = gutils.process_ttff_by_gtw_gpstool(self.watch, begin_time,
+                                                self.pixel_lab_location)
+        result = gutils.check_ttff_data(self.watch, ttff_data, self.ttff_mode.get("ws"),
+                                 criteria=self.fast_start_criteria)
+        asserts.assert_true(result, "TTFF fails to reach designated criteria")
diff --git a/acts_tests/tests/google/gnss/LabTtffGeneralCoexTest.py b/acts_tests/tests/google/gnss/LabTtffGeneralCoexTest.py
new file mode 100644
index 0000000..24da4d3
--- /dev/null
+++ b/acts_tests/tests/google/gnss/LabTtffGeneralCoexTest.py
@@ -0,0 +1,96 @@
+#!/usr/bin/env python3
+#
+#   Copyright 2021 - The Android Open Source Project
+#
+##   Licensed under the Apache License, Version 2.0 (the 'License');
+##   you may not use this file except in compliance with the License.
+##   You may obtain a copy of the License at
+##
+##       http://www.apache.org/licenses/LICENSE-2.0
+##
+##   Unless required by applicable law or agreed to in writing, software
+##   distributed under the License is distributed on an 'AS IS' BASIS,
+##   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+##   See the License for the specific language governing permissions and
+##   limitations under the License.
+
+from acts_contrib.test_utils.gnss import LabTtffTestBase as lttb
+from acts_contrib.test_utils.gnss.gnss_test_utils import launch_eecoexer
+from acts_contrib.test_utils.gnss.gnss_test_utils import excute_eecoexer_function
+
+
+class LabTtffGeneralCoexTest(lttb.LabTtffTestBase):
+    """Lab stand alone GNSS general coex TTFF/FFPE test"""
+
+    def setup_class(self):
+        super().setup_class()
+        req_params = ['coex_testcase_ls']
+        self.unpack_userparams(req_param_names=req_params)
+
+    def setup_test(self):
+        super().setup_test()
+        launch_eecoexer(self.dut)
+        # Set DUT temperature the limit to 60 degree
+        self.dut.adb.shell(
+            'setprop persist.com.google.eecoexer.cellular.temperature_limit 60')
+
+    def exe_eecoexer_loop_cmd(self, cmd_list=list()):
+        """
+        Function for execute EECoexer command list
+            Args:
+                cmd_list: a list of EECoexer function command.
+                Type, list.
+        """
+        for cmd in cmd_list:
+            self.log.info('Execute EEcoexer Command: {}'.format(cmd))
+            excute_eecoexer_function(self.dut, cmd)
+
+    def gnss_ttff_ffpe_coex_base(self, mode):
+        """
+        TTFF and FFPE general coex base test function
+
+            Args:
+                mode: Set the TTFF mode for testing. Definitions are as below.
+                cs(cold start), ws(warm start), hs(hot start)
+        """
+        # Loop all test case in coex_testcase_ls
+        for test_item in self.coex_testcase_ls:
+
+            # get test_log_path from coex_testcase_ls['test_name']
+            test_log_path = test_item['test_name']
+
+            # get test_cmd from coex_testcase_ls['test_cmd']
+            test_cmd = test_item['test_cmd']
+
+            # get stop_cmd from coex_testcase_ls['stop_cmd']
+            stop_cmd = test_item['stop_cmd']
+
+            # Start aggressor Tx by EEcoexer
+            self.exe_eecoexer_loop_cmd(test_cmd)
+
+            # Start GNSS TTFF FFPE testing
+            self.gnss_ttff_ffpe(mode, test_log_path)
+
+            # Stop aggressor Tx by EEcoexer
+            self.exe_eecoexer_loop_cmd(stop_cmd)
+
+            # Clear GTW GPSTool log. Need to clean the log every round of the test.
+            self.clear_gps_log()
+
+    def test_gnss_cold_ttff_ffpe_coex(self):
+        """
+        Cold start TTFF and FFPE GNSS general coex testing
+        """
+        self.gnss_ttff_ffpe_coex_base('cs')
+
+    def test_gnss_warm_ttff_ffpe_coex(self):
+        """
+        Warm start TTFF and FFPE GNSS general coex testing
+        """
+        self.gnss_ttff_ffpe_coex_base('ws')
+
+    def test_gnss_hot_ttff_ffpe_coex(self):
+        """
+        Hot start TTFF and FFPE GNSS general coex testing
+        """
+        self.gnss_ttff_ffpe_coex_base('hs')
diff --git a/acts_tests/tests/google/gnss/LabTtffTest.py b/acts_tests/tests/google/gnss/LabTtffTest.py
index 5c05fa9..374b570 100644
--- a/acts_tests/tests/google/gnss/LabTtffTest.py
+++ b/acts_tests/tests/google/gnss/LabTtffTest.py
@@ -14,284 +14,29 @@
 #   See the License for the specific language governing permissions and
 #   limitations under the License.
 
-import os
-import time
-import glob
-import errno
+from acts_contrib.test_utils.gnss import LabTtffTestBase as lttb
 
-from acts import utils
-from acts import asserts
-from acts import signals
-from acts import base_test
-from pandas import DataFrame
-from collections import namedtuple
-from acts.controllers.spectracom_lib import gsg6
-from acts.test_utils.gnss import dut_log_test_utils as diaglog
-from acts_contrib.test_utils.gnss import gnss_test_utils as gutils
-from acts_contrib.test_utils.gnss import gnss_testlog_utils as glogutils
 
-DEVICE_GPSLOG_FOLDER = '/sdcard/Android/data/com.android.gpstool/files/'
-GPS_PKG_NAME = 'com.android.gpstool'
-
-class LabTtffTest(base_test.BaseTestClass):
-
-    """ LAB TTFF Tests"""
-    GTW_GPSTOOL_APP = 'gtw_gpstool_apk'
-    SPECTRACOM_IP_KEY = 'spectracom_ip'
-    SPECTRACOM_PORT_KEY = 'spectracom_port'
-    SPECTRACOM_FILES_KEY = 'spectracom_files'
-    SPECTRACOM_POWER_KEY = 'spectracom_power_level'
-    SPIRENT_IP_KEY = 'spirent_ip'
-    SPIRENT_SCENARIO = 'sprient_scenario'
-    CUSTOM_FILES_KEY = 'custom_files'
-    CSTTFF_CRITERIA = 'cs_criteria'
-    HSTTFF_CRITERIA = 'hs_criteria'
-    WSTTFF_CRITERIA = 'ws_criteria'
-    CSTTFF_PECRITERIA = 'cs_ttff_pecriteria'
-    HSTTFF_PECRITERIA = 'hs_ttff_pecriteria'
-    WSTTFF_PECRITERIA = 'ws_ttff_pecriteria'
-    TTFF_ITERATION = 'ttff_iteration'
-    SIMULATOR_LOCATION = 'simulator_location'
-    DIAG_OPTION = 'diag_option'
-
-    def __init__(self, controllers):
-        """ Initializes class attributes. """
-
-        super().__init__(controllers)
-
-        self.dut = None
-        self.spectracom = None
-
-    def setup_class(self):
-        super().setup_class()
-
-        req_params = [
-            self.SPECTRACOM_IP_KEY, self.SPECTRACOM_PORT_KEY,
-            self.SPECTRACOM_FILES_KEY, self.SPECTRACOM_POWER_KEY,
-            self.CSTTFF_CRITERIA, self.HSTTFF_CRITERIA,
-            self.WSTTFF_CRITERIA, self.TTFF_ITERATION,
-            self.SIMULATOR_LOCATION, self.DIAG_OPTION
-        ]
-
-        for param in req_params:
-            if param not in self.user_params:
-                self.log.error('Required parameter {} is missing in config '
-                               'file.'.format(param))
-                raise signals.TestAbortClass(
-                    'Required parameter {} is missing in config '
-                               'file.'.format(param)) 
-        self.dut = self.android_devices[0]
-        self.spectracom_ip = self.user_params[self.SPECTRACOM_IP_KEY]
-        self.spectracom_port = self.user_params[self.SPECTRACOM_PORT_KEY]
-        self.spectracom_file = self.user_params[self.SPECTRACOM_FILES_KEY]
-        self.spectracom_power = self.user_params[self.SPECTRACOM_POWER_KEY]
-        self.gtw_gpstool_app = self.user_params[self.GTW_GPSTOOL_APP]
-        custom_files = self.user_params.get(self.CUSTOM_FILES_KEY, [])
-        self.cs_ttff_criteria = self.user_params.get(self.CSTTFF_CRITERIA, [])
-        self.hs_ttff_criteria = self.user_params.get(self.HSTTFF_CRITERIA, [])
-        self.ws_ttff_criteria = self.user_params.get(self.WSTTFF_CRITERIA, [])
-        self.cs_ttff_pecriteria = self.user_params.get(
-            self.CSTTFF_PECRITERIA, [])
-        self.hs_ttff_pecriteria = self.user_params.get(
-            self.HSTTFF_PECRITERIA, [])
-        self.ws_ttff_pecriteria = self.user_params.get(
-            self.WSTTFF_PECRITERIA, [])
-        self.ttff_iteration = self.user_params.get(self.TTFF_ITERATION, [])
-        self.simulator_location = self.user_params.get(
-            self.SIMULATOR_LOCATION, [])
-	self.diag_option = self.user_params.get(self.DIAG_OPTION, [])
-
-        test_type = namedtuple('Type', ['command', 'criteria'])
-        self.test_types = {
-            'cs': test_type('Cold Start', self.cs_ttff_criteria),
-            'ws': test_type('Warm Start', self.ws_ttff_criteria),
-            'hs': test_type('Hot Start', self.hs_ttff_criteria)
-        }
-
-        # Unpack the rockbottom script or fail class setup if it can't be found
-        for file in custom_files:
-            if 'rockbottom_' + self.dut.model in file:
-                self.rockbottom_script = file
-                break
-        else:
-            raise signals.TestAbortClass(
-                'Required rockbottom script is missing.')
-
-    def setup_test(self):
-
-	self.clear_gps_log()
-        self.spectracom = gsg6.GSG6(self.spectracom_ip, self.spectracom_port)
-
-        self.spectracom.stop_scenario()
-        time.sleep(10)
-        self.spectracom.close()
-
-        self.dut_rockbottom()
-        utils.set_location_service(self.dut, True)
-        gutils.reinstall_package_apk(self.dut, GPS_PKG_NAME,
-                                     self.gtw_gpstool_app)
-        self.spectracom = gsg6.GSG6(self.spectracom_ip, self.spectracom_port)
-        self.spectracom.connect()
-
-    def dut_rockbottom(self):
-        """
-        Set the dut to rockbottom state
-
-        """
-        # The rockbottom script might include a device reboot, so it is
-        # necessary to stop SL4A during its execution.
-        self.dut.stop_services()
-        self.log.info('Executing rockbottom script for ' + self.dut.model)
-        os.chmod(self.rockbottom_script, 0o777)
-        os.system('{} {}'.format(self.rockbottom_script, self.dut.serial))
-        # Make sure the DUT is in root mode after coming back
-        self.dut.root_adb()
-        # Restart SL4A
-        self.dut.start_services()
-
-    def teardown_class(self):
-        """ Executed after completing all selected test cases."""
-        self.clear_gps_log()
-        if self.spectracom:
-            self.spectracom.stop_scenario()
-            time.sleep(10)
-            self.spectracom.close()
-
-    def start_and_set_spectracom_power(self):
-        """
-        Start spectracom secnario and set power level.
-
-        """
-
-        self.spectracom.start_scenario(self.spectracom_file)
-        time.sleep(25)
-        self.spectracom.set_power(self.spectracom_power)
-
-    def get_and_verify_ttff(self, mode):
-        """Retrieve ttff with designate mode.
-
-            Args:
-                mode: A string for identify gnss test mode.
-        """
-        if mode not in self.test_types:
-            raise signals.TestError('Unrecognized mode %s' % mode)
-        test_type = self.test_types.get(mode)
-
-        gutils.process_gnss_by_gtw_gpstool(self.dut,
-                                           self.test_types['cs'].criteria)
-        begin_time = gutils.get_current_epoch_time()
-        gutils.start_ttff_by_gtw_gpstool(
-            self.dut, ttff_mode=mode,
-            iteration=self.ttff_iteration, aid_data=True)
-        ttff_data = gutils.process_ttff_by_gtw_gpstool(self.dut, begin_time,
-                                                       self.simulator_location)
-
-        gps_log_path = os.path.join(self.log_path, 'GPSLogs')
-        self.dut.adb.pull("{} {}".format(DEVICE_GPSLOG_FOLDER, gps_log_path))
-
-        gps_api_log = glob.glob(gps_log_path + '/GPS_API_*.txt')
-        ttff_loop_log = glob.glob(gps_log_path + '/GPS_{}_*.txt'.
-                                  format(mode.upper()))
-
-        if not gps_api_log and ttff_loop_log:
-            raise FileNotFoundError(errno.ENOENT, os.strerror(errno.ENOENT),
-                                    gps_log_path)
-
-        df = DataFrame(glogutils.parse_gpstool_ttfflog_to_df(gps_api_log[0]))
-
-        ttff_dict = {}
-        for i in ttff_data:
-            d = ttff_data[i]._asdict()
-            ttff_dict[i] = dict(d)
-
-        ttff_time =[]
-        ttff_pe = []
-        for i, k in ttff_dict.items():
-            ttff_time.append(ttff_dict[i]['ttff_time'])
-            ttff_pe.append(ttff_dict[i]['ttff_pe'])
-        df['ttff_time'] = ttff_time
-        df['ttff_pe'] = ttff_pe
-        df.to_json(gps_log_path + '/gps_log.json', orient='table')
-        result = gutils.check_ttff_data(
-            self.dut,
-            ttff_data,
-            ttff_mode=test_type.command,
-            criteria=test_type.criteria)
-        if not result:
-            raise signals.TestFailure('%s TTFF fails to reach '
-                                      'designated criteria'
-                                      % test_type.command)
-        return ttff_data
-
-    def verify_pe(self, mode):
-        """
-        Verify ttff Position Error with designate mode.
-
-        Args:
-             mode: A string for identify gnss test mode.
-        """
-
-        ffpe_type = namedtuple('Type', ['command', 'pecriteria'])
-        ffpe_types = {
-            'cs': ffpe_type('Cold Start', self.cs_ttff_pecriteria),
-            'ws': ffpe_type('Warm Start', self.ws_ttff_pecriteria),
-            'hs': ffpe_type('Hot Start', self.hs_ttff_pecriteria)
-        }
-
-        if mode not in self.test_types:
-            raise signals.TestError('Unrecognized mode %s' % mode)
-        test_type = self.test_types.get(mode)
-
-        ttff_data = self.get_and_verify_ttff(mode)
-        result = gutils.check_ttff_pe(
-            self.dut,
-            ttff_data,
-            ttff_mode=test_type.command,
-            pecriteria=test_type.pecriteria
-        )
-        if not result:
-            raise signals.TestFailure('%s TTFF fails to reach '
-                                      'designated criteria'
-                                      % test_type.command)
-        return ttff_data
-
-    def clear_gps_log(self):
-        """
-        Delete the existing GPS GTW Log from DUT.
-
-        """
-        self.dut.adb.shell("rm -rf {}".format(DEVICE_GPSLOG_FOLDER))
+class LabTtffTest(lttb.LabTtffTestBase):
+    """ LAB Stand Alone TTFF Tests"""
 
     def test_gnss_cold_ttff_ffpe(self):
-
-        self.start_and_set_spectracom_power()
-        if self.diag_option is "QCOM":
-                diaglog.start_diagmdlog_background(self.dut, maskfile=self.maskfile)
-        else:
-                #start_tbdlog() yet to add for Broadcom
-                pass
-	self.verify_pe('cs')
-        diaglog.stop_background_diagmdlog(self.dut, self.qxdm_log_path, keep_logs=False)
+        """
+        Cold start TTFF and FFPE Testing
+        """
+        mode = 'cs'
+        self.gnss_ttff_ffpe(mode)
 
     def test_gnss_warm_ttff_ffpe(self):
-
-        self.start_and_set_spectracom_power()
-	if self.diag_option is "QCOM":
-	        diaglog.start_diagmdlog_background(self.dut, maskfile=self.maskfile)
-	else:
-		#start_tbdlog() yet to add for Broadcom
-		pass
-        self.verify_pe('ws')
-        diaglog.stop_background_diagmdlog(self.dut, self.qxdm_log_path, keep_logs=False)
+        """
+        Warm start TTFF and FFPE Testing
+        """
+        mode = 'ws'
+        self.gnss_ttff_ffpe(mode)
 
     def test_gnss_hot_ttff_ffpe(self):
-
-        self.start_and_set_spectracom_power()
-        if self.diag_option is "QCOM":
-                diaglog.start_diagmdlog_background(self.dut, maskfile=self.maskfile)
-        else:
-                #start_tbdlog() yet to add for Broadcom
-                pass
-        self.verify_pe('hs')
-        diaglog.stop_background_diagmdlog(self.dut, self.qxdm_log_path, keep_logs=False)
-
+        """
+        Hot start TTFF and FFPE Testing
+        """
+        mode = 'hs'
+        self.gnss_ttff_ffpe(mode)
diff --git a/acts_tests/tests/google/gnss/LocationPlatinumTest.py b/acts_tests/tests/google/gnss/LocationPlatinumTest.py
index ec80d87..6f4c253 100644
--- a/acts_tests/tests/google/gnss/LocationPlatinumTest.py
+++ b/acts_tests/tests/google/gnss/LocationPlatinumTest.py
@@ -22,14 +22,9 @@
 from acts.base_test import BaseTestClass
 from acts_contrib.test_utils.gnss import gnss_test_utils as gutils
 from acts_contrib.test_utils.wifi import wifi_test_utils as wutils
-from acts_contrib.test_utils.tel.tel_logging_utils import start_qxdm_logger
-from acts_contrib.test_utils.tel.tel_logging_utils import stop_qxdm_logger
-from acts_contrib.test_utils.tel.tel_logging_utils import start_adb_tcpdump
-from acts_contrib.test_utils.tel.tel_logging_utils import stop_adb_tcpdump
-from acts_contrib.test_utils.tel.tel_logging_utils import get_tcpdump_log
 
 BACKGROUND_LOCATION_PERMISSION = 'android.permission.ACCESS_BACKGROUND_LOCATION'
-APP_CLEAN_UP_TIME = 60
+APP_CLEAN_UP_TIME = 10
 
 class LocationPlatinumTest(BaseTestClass):
     """Location Platinum Tests"""
@@ -49,9 +44,7 @@
             # Hot Start Criteria, a int to define the criteria.
             'hs_criteria',
             # NetworkLocationProvide Criteria, a int to define the criteria.
-            'nlp_criteria',
-            # A list to identify QXDM log path.
-            'qdsp6m_path'
+            'nlp_criteria'
         ]
         self.unpack_userparams(req_param_names=req_params)
 
@@ -62,31 +55,19 @@
             'ws': test_type('Warm Start', self.ws_criteria),
             'hs': test_type('Hot Start', self.hs_criteria)
         }
-        gutils._init_device(self.ad)
-        self.begin_time = utils.get_current_epoch_time()
-        gutils.clear_logd_gnss_qxdm_log(self.ad)
-        start_qxdm_logger(self.ad, self.begin_time)
-        start_adb_tcpdump(self.ad)
+        self._init(self.ad)
+
+    def _init(self, ad):
+        gutils.enable_gnss_verbose_logging(ad)
+        if gutils.check_chipset_vendor_by_qualcomm(ad):
+            gutils.disable_xtra_throttle(ad)
 
     def setup_test(self):
         """Prepare device with mobile data, wifi and gps ready for test """
-        if int(self.ad.adb.shell('settings get secure location_mode')) != 3:
-            self.ad.adb.shell('settings put secure location_mode 3')
+        gutils.check_location_service(self.ad)
         if not self.ad.droid.wifiCheckState():
             wutils.wifi_toggle_state(self.ad, True)
             gutils.connect_to_wifi_network(self.ad, self.wifi_network)
-        if int(self.ad.adb.shell('settings get global mobile_data')) != 1:
-            gutils.set_mobile_data(self.ad, True)
-        gutils.grant_location_permission(self.ad, True)
-        self.ad.adb.shell('pm grant com.android.gpstool %s' %
-                          BACKGROUND_LOCATION_PERMISSION)
-
-    def teardown_class(self):
-        stop_qxdm_logger(self.ad)
-        gutils.get_gnss_qxdm_log(self.ad, self.qdsp6m_path)
-        stop_adb_tcpdump(self.ad)
-        get_tcpdump_log(self.ad, 'location_platinum', self.begin_time)
-        self.ad.take_bug_report('location_platinum', self.begin_time)
 
     def get_and_verify_ttff(self, mode):
         """Retrieve ttff with designate mode.
@@ -116,28 +97,28 @@
             '%s TTFF fails to reach designated criteria' % test_type.command)
 
     # Test cases
-    def test_gnss_cold_ttff(self):
+    def test_gnss_cs_ttff(self):
         """
             1. Send intent to GPSTool for cold start test.
             2. Retrieve ttff and validate with target criteria.
         """
         self.get_and_verify_ttff('cs')
 
-    def test_gnss_warm_ttff(self):
+    def test_gnss_ws_ttff(self):
         """
             1. Send intent to GPSTool for warm start test.
             2. Retrieve ttff and validate with target criteria.
         """
         self.get_and_verify_ttff('ws')
 
-    def test_gnss_hot_ttff(self):
+    def test_gnss_hs_ttff(self):
         """
             1. Send intent to GPSTool for hot start test.
             2. Retrieve ttff and validate with target criteria.
         """
         self.get_and_verify_ttff('hs')
 
-    def test_nlp_available_by_wifi(self):
+    def test_nlp_by_wifi(self):
         """
             1. Disable mobile data.
             2. Send intent to GPSTool for NLP.
@@ -149,7 +130,7 @@
                 self.ad, 1, 'wifi', self.nlp_criteria),
             'Fail to get NLP from wifi')
 
-    def test_nlp_available_by_cell(self):
+    def test_nlp_by_cell(self):
         """
             1. Disable wifi.
             2. Send intent to GPSTool for NLP.
@@ -161,7 +142,7 @@
                 self.ad, 1, 'cell', self.nlp_criteria),
             'Fail to get NLP from cell')
 
-    def test_toggle_location_setting_off_on_report_location(self):
+    def test_toggle_location_setting_off_on(self):
         """
             1. Toggle location setting off on.
             2. Open Google Map and ask for location.
@@ -174,7 +155,7 @@
             gutils.check_location_api(self.ad, retries=1),
             'DUT failed to receive location fix')
 
-    def test_toggle_location_setting_off_not_report_location(self):
+    def test_location_setting_off(self):
         """
             1. Toggle location setting off.
             2. Open Google Map and ask for location.
@@ -199,7 +180,7 @@
             gutils.check_location_api(self.ad, retries=1),
             'DUT fail to receive location fix')
 
-    def test_toggle_location_permission_off(self):
+    def test_location_permission_off(self):
         """
             1. Toggle Google Map location permission off.
             2. Open Google Map and ask for location.
diff --git a/acts_tests/tests/google/power/bt/PowerBTa2dpTest.py b/acts_tests/tests/google/power/bt/PowerBTa2dpTest.py
index 2d0bc9d..419c418 100644
--- a/acts_tests/tests/google/power/bt/PowerBTa2dpTest.py
+++ b/acts_tests/tests/google/power/bt/PowerBTa2dpTest.py
@@ -19,9 +19,17 @@
 import acts_contrib.test_utils.power.PowerBTBaseTest as PBtBT
 from acts import asserts
 from acts_contrib.test_utils.bt import BtEnum
+from acts.libs.proc import job
 
+DEFAULT_ADB_TIMEOUT = 60
 EXTRA_PLAY_TIME = 10
-
+GET_PROPERTY_HARDWARE_PLATFORM = 'getprop ro.boot.hardware.platform'
+PL_MAP = {
+    '10': 'EPA_BF',
+    '9': 'EPA_DIV',
+    '8': 'IPA_BF',
+    '7': 'IPA_DIV',
+}
 
 class PowerBTa2dpTest(PBtBT.PowerBTBaseTest):
     def __init__(self, configs):
@@ -45,10 +53,32 @@
         def test_case_fn():
             self.measure_a2dp_power(codec_config, tpl)
 
-        test_case_name = ('test_BTa2dp_{}_codec_at_PL{}'.format(
-            codec_config['codec_type'], tpl))
+        power_level = 'PL{}'.format(tpl)
+
+        # If the device is P21 and later, generate tests with different name.
+        platform = self._get_hardware_platform_at_init_stage()
+        self.log.info('Hardware Platform is: {}'.format(platform))
+        if platform.startswith('gs'):
+            power_level = PL_MAP[str(tpl)]
+            self.log.info('The device is P21 or later, use name {}'.format(
+                power_level))
+
+        test_case_name = ('test_BTa2dp_{}_codec_at_{}'.format(
+            codec_config['codec_type'], power_level))
         setattr(self, test_case_name, test_case_fn)
 
+    def _get_hardware_platform_at_init_stage(self):
+
+        # At __init__ stage the android devices are not registered. Thus, run
+        # adb command with device sn directly.
+        sn = self.controller_configs['AndroidDevice'][0]
+        cmd = 'adb -s {} shell {}'.format(sn, GET_PROPERTY_HARDWARE_PLATFORM)
+        result = job.run(cmd, ignore_status=True, timeout=DEFAULT_ADB_TIMEOUT)
+        ret, out, err = result.exit_status, result.stdout, result.stderr
+        self.log.info('get platform ret: {}, out: {}, err: {}'.format(
+            ret, out, err))
+        return out
+
     def measure_a2dp_power(self, codec_config, tpl):
 
         current_codec = self.dut.droid.bluetoothA2dpGetCurrentCodecConfig()
diff --git a/acts_tests/tests/google/power/bt/PowerBTcalibrationTest.py b/acts_tests/tests/google/power/bt/PowerBTcalibrationTest.py
index 13b3f39..c0bac23 100644
--- a/acts_tests/tests/google/power/bt/PowerBTcalibrationTest.py
+++ b/acts_tests/tests/google/power/bt/PowerBTcalibrationTest.py
@@ -21,6 +21,7 @@
 import acts_contrib.test_utils.power.PowerBTBaseTest as PBtBT
 
 EXTRA_PLAY_TIME = 30
+GET_PROPERTY_HARDWARE_PLATFORM = 'getprop ro.boot.hardware.platform'
 
 
 class PowerBTcalibrationTest(PBtBT.PowerBTBaseTest):
@@ -50,20 +51,23 @@
         self.media.play()
         time.sleep(EXTRA_PLAY_TIME)
 
-        # Loop through attenuation in 1 dB step until reaching at PL10
+        # Loop through attenuation in 1 dB step
         self.log.info('Starting Calibration Process')
-        pl10_count = 0
         for i in range(int(self.attenuator.get_max_atten())):
-
-            self.attenuator.set_atten(i)
-            bt_metrics_dict = btutils.get_bt_metric(self.dut)
-            pwl = bt_metrics_dict['pwlv'][self.dut.serial]
-            self.log.info('Reach PW {} at attenuation {} dB'.format(pwl, i))
-            self.cal_matrix.append([i, pwl])
-            if pwl == 10:
-                pl10_count += 1
-            if pl10_count > 5:
-                break
+            try:
+                self.attenuator.set_atten(i)
+                bt_metrics_dict = btutils.get_bt_metric(self.dut)
+                pwl = bt_metrics_dict['pwlv'][self.dut.serial]
+                rssi = bt_metrics_dict['rssi'][self.dut.serial]
+                bftx = bt_metrics_dict['bftx'][self.dut.serial]
+                self.log.info(
+                    'Reach PW {}, RSSI {}, BFTX {} at attenuation {} dB'.format(
+                        pwl, rssi, bftx, i))
+            except Exception as e:
+                self.log.warning('Get Exception {} at attenuation {} dB'.format(
+                    str(e), i))
+                continue
+            self.cal_matrix.append([i, pwl, rssi, bftx])
 
         # Write cal results to csv
         with open(self.log_file, 'w', newline='') as f:
diff --git a/acts_tests/tests/google/power/wifi/PowerWiFidtimTest.py b/acts_tests/tests/google/power/wifi/PowerWiFidtimTest.py
index 2d6eec1..bb68233 100644
--- a/acts_tests/tests/google/power/wifi/PowerWiFidtimTest.py
+++ b/acts_tests/tests/google/power/wifi/PowerWiFidtimTest.py
@@ -18,6 +18,7 @@
 from acts.test_decorators import test_tracker_info
 from acts_contrib.test_utils.power import PowerWiFiBaseTest as PWBT
 from acts_contrib.test_utils.wifi import wifi_power_test_utils as wputils
+from acts.controllers.adb_lib.error import AdbCommandError
 
 
 class PowerWiFidtimTest(PWBT.PowerWiFiBaseTest):
@@ -33,17 +34,34 @@
         attrs = ['screen_status', 'wifi_band', 'dtim']
         indices = [2, 4, 6]
         self.decode_test_configs(attrs, indices)
-        # Initialize the dut to rock-bottom state
-        rebooted = wputils.change_dtim(
-            self.dut,
-            gEnableModulatedDTIM=int(self.test_configs.dtim),
-            gMaxLIModulatedDTIM=dtim_max)
-        if rebooted:
-            self.dut_rockbottom()
-        self.dut.log.info('DTIM value of the phone is now {}'.format(
-            self.test_configs.dtim))
-        self.setup_ap_connection(
-            self.main_network[self.test_configs.wifi_band])
+
+        # Starts from P21 device, the dtim setting method is changed to use adb.
+        # If no file match '/vendor/firmware/wlan/*/*.ini', use adb to change
+        # the dtim.
+        change_dtim_with_adb = False
+        try:
+            self.dut.adb.shell('ls /vendor/firmware/wlan/*/*.ini')
+        except AdbCommandError as e:
+            change_dtim_with_adb = True
+
+        if not change_dtim_with_adb:
+            # Initialize the dut to rock-bottom state
+            rebooted = wputils.change_dtim(
+                self.dut,
+                gEnableModulatedDTIM=int(self.test_configs.dtim),
+                gMaxLIModulatedDTIM=dtim_max)
+            if rebooted:
+                self.dut_rockbottom()
+            self.dut.log.info('DTIM value of the phone is now {}'.format(
+                self.test_configs.dtim))
+        self.setup_ap_connection(self.main_network[self.test_configs.wifi_band])
+
+        if change_dtim_with_adb:
+            self.dut.log.info('No ini file for dtim, change dtim with adb')
+            wputils.change_dtim_adb(
+                self.dut,
+                gEnableModulatedDTIM=int(self.test_configs.dtim))
+
         if self.test_configs.screen_status == 'OFF':
             self.dut.droid.goToSleepNow()
             self.dut.log.info('Screen is OFF')
@@ -56,7 +74,10 @@
         self.dtim_test_func()
 
     @test_tracker_info(uuid='384d3b0f-4335-4b00-8363-308ec27a150c')
-    def test_screen_ON_band_2g_dtim_1(self):
+    def test_screen_OFF_band_2g_dtim_8(self):
+        self.dtim_test_func()
+
+    def test_screen_OFF_band_2g_dtim_9(self):
         self.dtim_test_func()
 
     @test_tracker_info(uuid='017f57c3-e133-461d-80be-d025d1491d8a')
@@ -64,5 +85,8 @@
         self.dtim_test_func()
 
     @test_tracker_info(uuid='327af44d-d9e7-49e0-9bda-accad6241dc7')
-    def test_screen_ON_band_5g_dtim_1(self):
+    def test_screen_OFF_band_5g_dtim_8(self):
+        self.dtim_test_func()
+
+    def test_screen_OFF_band_5g_dtim_9(self):
         self.dtim_test_func()
diff --git a/acts_tests/tests/google/power/wifi/PowerWiFimulticastTest.py b/acts_tests/tests/google/power/wifi/PowerWiFimulticastTest.py
index 8b7e063..f0559e4 100644
--- a/acts_tests/tests/google/power/wifi/PowerWiFimulticastTest.py
+++ b/acts_tests/tests/google/power/wifi/PowerWiFimulticastTest.py
@@ -19,6 +19,7 @@
 from acts_contrib.test_utils.power import PowerWiFiBaseTest as PWBT
 from acts_contrib.test_utils.wifi import wifi_power_test_utils as wputils
 from acts.controllers import packet_sender as pkt_utils
+from acts.controllers.adb_lib.error import AdbCommandError
 
 RA_SHORT_LIFETIME = 3
 RA_LONG_LIFETIME = 1000
@@ -54,18 +55,35 @@
         indices = [2, 4]
         self.decode_test_configs(attrs, indices)
         # Change DTIMx1 on the phone to receive all Multicast packets
-        rebooted = wputils.change_dtim(self.dut,
-                                       gEnableModulatedDTIM=1,
-                                       gMaxLIModulatedDTIM=10)
-        self.dut.log.info('DTIM value of the phone is now DTIMx1')
-        if rebooted:
-            self.dut_rockbottom()
+
+        # Starts from P21 device, the dtim setting method is changed to use adb.
+        # If no file match '/vendor/firmware/wlan/*/*.ini', use adb to change
+        # the dtim.
+        change_dtim_with_adb = False
+        try:
+            self.dut.adb.shell('ls /vendor/firmware/wlan/*/*.ini')
+        except AdbCommandError as e:
+            change_dtim_with_adb = True
+
+        if not change_dtim_with_adb:
+            # Initialize the dut to rock-bottom state
+            rebooted = wputils.change_dtim(
+                self.dut,
+                gEnableModulatedDTIM=1,
+                gMaxLIModulatedDTIM=10)
+            if rebooted:
+                self.dut_rockbottom()
+            self.dut.log.info('DTIM value of the phone is now DTIMx1')
 
         self.setup_ap_connection(
             self.main_network[self.test_configs.wifi_band])
         # Wait for DHCP with timeout of 60 seconds
         wputils.wait_for_dhcp(self.pkt_sender.interface)
 
+        if change_dtim_with_adb:
+            self.dut.log.info('No ini file for dtim, change dtim with adb')
+            wputils.change_dtim_adb(self.dut, gEnableModulatedDTIM=1)
+
         # Set the desired screen status
         if self.test_configs.screen_status == 'OFF':
             self.dut.droid.goToSleepNow()
diff --git a/acts_tests/tests/google/wifi/WifiManagerTest.py b/acts_tests/tests/google/wifi/WifiManagerTest.py
index 902272c..d2a07dc 100644
--- a/acts_tests/tests/google/wifi/WifiManagerTest.py
+++ b/acts_tests/tests/google/wifi/WifiManagerTest.py
@@ -205,8 +205,8 @@
                 " match. \nBefore reboot = %s \n After reboot = %s" %
                 (networks, network_info))
             raise signals.TestFailure(msg)
-        current_count = 0
         # For each network, check if it exists in configured list after reboot
+        current_ssids = set()
         for network in networks:
             exists = wutils.match_networks({
                 WifiEnums.SSID_KEY: network[WifiEnums.SSID_KEY]
@@ -218,10 +218,10 @@
             # Get the new network id for each network after reboot.
             network[WifiEnums.NETID_KEY] = exists[0]['networkId']
             if exists[0]['status'] == 'CURRENT':
-                current_count += 1
+                current_ssids.add(network[WifiEnums.SSID_KEY])
                 # At any given point, there can only be one currently active
                 # network, defined with 'status':'CURRENT'
-                if current_count > 1:
+                if len(current_ssids) > 1:
                     raise signals.TestFailure("More than one network showing"
                                               "as 'CURRENT' after reboot")
 
diff --git a/acts_tests/tests/google/wifi/WifiPingTest.py b/acts_tests/tests/google/wifi/WifiPingTest.py
index ea45381..096a935 100644
--- a/acts_tests/tests/google/wifi/WifiPingTest.py
+++ b/acts_tests/tests/google/wifi/WifiPingTest.py
@@ -29,6 +29,7 @@
 from acts_contrib.test_utils.wifi import ota_chamber
 from acts_contrib.test_utils.wifi import ota_sniffer
 from acts_contrib.test_utils.wifi import wifi_performance_test_utils as wputils
+from acts_contrib.test_utils.wifi.wifi_performance_test_utils.bokeh_figure import BokehFigure
 from acts_contrib.test_utils.wifi import wifi_retail_ap as retail_ap
 from acts_contrib.test_utils.wifi import wifi_test_utils as wutils
 from functools import partial
@@ -79,7 +80,11 @@
         self.access_point = retail_ap.create(self.RetailAccessPoints)[0]
         if hasattr(self,
                    'OTASniffer') and self.testbed_params['sniffer_enable']:
-            self.sniffer = ota_sniffer.create(self.OTASniffer)[0]
+            try:
+                self.sniffer = ota_sniffer.create(self.OTASniffer)[0]
+            except:
+                self.log.warning('Could not start sniffer. Disabling sniffs.')
+                self.testbed_params['sniffer_enable'] = 0
         self.log.info('Access Point Configuration: {}'.format(
             self.access_point.ap_settings))
         self.log_path = os.path.join(logging.log_path, 'results')
@@ -98,9 +103,13 @@
         self.user_params['retry_tests'] = [self.__class__.__name__]
 
     def teardown_class(self):
+        for attenuator in self.attenuators:
+            attenuator.set_atten(0, strict=False, retry=True)
         # Turn WiFi OFF and reset AP
+        self.access_point.teardown()
         for dev in self.android_devices:
             wutils.wifi_toggle_state(dev, False)
+            dev.go_to_sleep()
         self.process_testclass_results()
 
     def setup_test(self):
@@ -132,7 +141,9 @@
         results_file_path = os.path.join(self.log_path,
                                          'testclass_summary.json')
         with open(results_file_path, 'w') as results_file:
-            json.dump(testclass_summary, results_file, indent=4)
+            json.dump(wputils.serialize_dict(testclass_summary),
+                      results_file,
+                      indent=4)
 
     def pass_fail_check_ping_rtt(self, result):
         """Check the test result and decide if it passed or failed.
@@ -232,6 +243,10 @@
             range_index]
         ping_range_result['peak_throughput_pct'] = 100 - min(
             ping_loss_over_att)
+        ping_range_result['total_attenuation'] = [
+            ping_range_result['fixed_attenuation'] + att
+            for att in testcase_params['atten_range']
+        ]
         ping_range_result['range'] = (ping_range_result['atten_at_range'] +
                                       ping_range_result['fixed_attenuation'])
         ping_range_result['llstats_at_range'] = (
@@ -248,14 +263,15 @@
         results_file_path = os.path.join(
             self.log_path, '{}.json'.format(self.current_test_name))
         with open(results_file_path, 'w') as results_file:
-            json.dump(ping_range_result, results_file, indent=4)
+            json.dump(wputils.serialize_dict(ping_range_result),
+                      results_file,
+                      indent=4)
 
         # Plot results
-        if 'range' not in self.current_test_name:
-            figure = wputils.BokehFigure(
-                self.current_test_name,
-                x_label='Timestamp (s)',
-                primary_y_label='Round Trip Time (ms)')
+        if 'rtt' in self.current_test_name:
+            figure = BokehFigure(self.current_test_name,
+                                 x_label='Timestamp (s)',
+                                 primary_y_label='Round Trip Time (ms)')
             for idx, result in enumerate(ping_range_result['ping_results']):
                 if len(result['rtt']) > 1:
                     x_data = [
@@ -298,50 +314,86 @@
         if self.testbed_params['sniffer_enable']:
             self.sniffer.start_capture(
                 testcase_params['test_network'],
-                chan=int(testcase_params['channel']),
+                chan=testcase_params['channel'],
                 bw=testcase_params['bandwidth'],
                 duration=testcase_params['ping_duration'] *
                 len(testcase_params['atten_range']) + self.TEST_TIMEOUT)
         # Run ping and sweep attenuation as needed
         zero_counter = 0
+        pending_first_ping = 1
         for atten in testcase_params['atten_range']:
             for attenuator in self.attenuators:
                 attenuator.set_atten(atten, strict=False, retry=True)
-            rssi_future = wputils.get_connected_rssi_nb(
-                self.dut,
-                int(testcase_params['ping_duration'] / 2 /
-                    self.RSSI_POLL_INTERVAL), self.RSSI_POLL_INTERVAL,
-                testcase_params['ping_duration'] / 2)
+            if self.testclass_params.get('monitor_rssi', 1):
+                rssi_future = wputils.get_connected_rssi_nb(
+                    self.dut,
+                    int(testcase_params['ping_duration'] / 2 /
+                        self.RSSI_POLL_INTERVAL), self.RSSI_POLL_INTERVAL,
+                    testcase_params['ping_duration'] / 2)
             # Refresh link layer stats
             llstats_obj.update_stats()
-            current_ping_stats = wputils.get_ping_stats(
-                self.ping_server, self.dut_ip,
-                testcase_params['ping_duration'],
-                testcase_params['ping_interval'], testcase_params['ping_size'])
-            current_rssi = rssi_future.result()
+            if testcase_params.get('ping_from_dut', False):
+                current_ping_stats = wputils.get_ping_stats(
+                    self.dut,
+                    wputils.get_server_address(self.ping_server, self.dut_ip,
+                                               '255.255.255.0'),
+                    testcase_params['ping_duration'],
+                    testcase_params['ping_interval'],
+                    testcase_params['ping_size'])
+            else:
+                current_ping_stats = wputils.get_ping_stats(
+                    self.ping_server, self.dut_ip,
+                    testcase_params['ping_duration'],
+                    testcase_params['ping_interval'],
+                    testcase_params['ping_size'])
+            if self.testclass_params.get('monitor_rssi', 1):
+                current_rssi = rssi_future.result()
+            else:
+                current_rssi = collections.OrderedDict([
+                    ('time_stamp', []), ('bssid', []), ('ssid', []),
+                    ('frequency', []),
+                    ('signal_poll_rssi', wputils.empty_rssi_result()),
+                    ('signal_poll_avg_rssi', wputils.empty_rssi_result()),
+                    ('chain_0_rssi', wputils.empty_rssi_result()),
+                    ('chain_1_rssi', wputils.empty_rssi_result())
+                ])
             test_result['rssi_results'].append(current_rssi)
             llstats_obj.update_stats()
             curr_llstats = llstats_obj.llstats_incremental.copy()
             test_result['llstats'].append(curr_llstats)
             if current_ping_stats['connected']:
+                llstats_str = 'TX MCS = {0} ({1:.1f}%). RX MCS = {2} ({3:.1f}%)'.format(
+                    curr_llstats['summary']['common_tx_mcs'],
+                    curr_llstats['summary']['common_tx_mcs_freq'] * 100,
+                    curr_llstats['summary']['common_rx_mcs'],
+                    curr_llstats['summary']['common_rx_mcs_freq'] * 100)
                 self.log.info(
                     'Attenuation = {0}dB\tPacket Loss = {1:.1f}%\t'
-                    'Avg RTT = {2:.2f}ms\tRSSI = {3:.2f} [{4},{5}]\t'.format(
-                        atten, current_ping_stats['packet_loss_percentage'],
-                        statistics.mean(current_ping_stats['rtt']),
-                        current_rssi['signal_poll_rssi']['mean'],
-                        current_rssi['chain_0_rssi']['mean'],
-                        current_rssi['chain_1_rssi']['mean']))
+                    'Avg RTT = {2:.2f}ms\tRSSI = {3:.1f} [{4:.1f},{5:.1f}]\t{6}\t'
+                    .format(atten,
+                            current_ping_stats['packet_loss_percentage'],
+                            statistics.mean(current_ping_stats['rtt']),
+                            current_rssi['signal_poll_rssi']['mean'],
+                            current_rssi['chain_0_rssi']['mean'],
+                            current_rssi['chain_1_rssi']['mean'], llstats_str))
                 if current_ping_stats['packet_loss_percentage'] == 100:
                     zero_counter = zero_counter + 1
                 else:
                     zero_counter = 0
+                    pending_first_ping = 0
             else:
                 self.log.info(
                     'Attenuation = {}dB. Disconnected.'.format(atten))
                 zero_counter = zero_counter + 1
             test_result['ping_results'].append(current_ping_stats.as_dict())
-            if zero_counter == self.MAX_CONSECUTIVE_ZEROS:
+            # Test ends when ping loss stable at 0. If test has successfully
+            # started, test ends on MAX_CONSECUTIVE_ZEROS. In case of a restry
+            # extra zeros are allowed to ensure a test properly starts.
+            if self.retry_flag and pending_first_ping:
+                allowable_zeros = self.MAX_CONSECUTIVE_ZEROS**2
+            else:
+                allowable_zeros = self.MAX_CONSECUTIVE_ZEROS
+            if zero_counter == allowable_zeros:
                 self.log.info('Ping loss stable at 100%. Stopping test now.')
                 for idx in range(
                         len(testcase_params['atten_range']) -
@@ -349,6 +401,11 @@
                     test_result['ping_results'].append(
                         self.DISCONNECTED_PING_RESULT)
                 break
+        # Set attenuator to initial setting
+        for attenuator in self.attenuators:
+            attenuator.set_atten(testcase_params['atten_range'][0],
+                                 strict=False,
+                                 retry=True)
         if self.testbed_params['sniffer_enable']:
             self.sniffer.stop_capture()
         return test_result
@@ -361,12 +418,16 @@
         """
         band = self.access_point.band_lookup_by_channel(
             testcase_params['channel'])
-        if '2G' in band:
-            frequency = wutils.WifiEnums.channel_2G_to_freq[
-                testcase_params['channel']]
+        if '6G' in band:
+            frequency = wutils.WifiEnums.channel_6G_to_freq[int(
+                testcase_params['channel'].strip('6g'))]
         else:
-            frequency = wutils.WifiEnums.channel_5G_to_freq[
-                testcase_params['channel']]
+            if testcase_params['channel'] < 13:
+                frequency = wutils.WifiEnums.channel_2G_to_freq[
+                    testcase_params['channel']]
+            else:
+                frequency = wutils.WifiEnums.channel_5G_to_freq[
+                    testcase_params['channel']]
         if frequency in wutils.WifiEnums.DFS_5G_FREQUENCIES:
             self.access_point.set_region(self.testbed_params['DFS_region'])
         else:
@@ -380,34 +441,47 @@
         self.log.info('Access Point Configuration: {}'.format(
             self.access_point.ap_settings))
 
-    def setup_dut(self, testcase_params):
-        """Sets up the DUT in the configuration required by the test.
-
-        Args:
-            testcase_params: dict containing AP and other test params
-        """
-        # Check battery level before test
-        if not wputils.health_check(self.dut, 10):
-            asserts.skip('Battery level too low. Skipping test.')
-        # Turn screen off to preserve battery
-        self.dut.go_to_sleep()
+    def validate_and_connect(self, testcase_params):
         if wputils.validate_network(self.dut,
                                     testcase_params['test_network']['SSID']):
             self.log.info('Already connected to desired network')
         else:
-            wutils.wifi_toggle_state(self.dut, False)
-            wutils.set_wifi_country_code(self.dut,
-                                         self.testclass_params['country_code'])
-            wutils.wifi_toggle_state(self.dut, True)
-            wutils.reset_wifi(self.dut)
-            wutils.set_wifi_country_code(self.dut,
-                                         self.testclass_params['country_code'])
+            current_country = wputils.get_country_code(self.dut)
+            if current_country != self.testclass_params['country_code']:
+                self.log.warning(
+                    'Requested CC: {}, Current CC: {}. Resetting WiFi'.format(
+                        self.testclass_params['country_code'],
+                        current_country))
+                wutils.wifi_toggle_state(self.dut, False)
+                wutils.set_wifi_country_code(
+                    self.dut, self.testclass_params['country_code'])
+                wutils.wifi_toggle_state(self.dut, True)
+                wutils.reset_wifi(self.dut)
+                wutils.set_wifi_country_code(
+                    self.dut, self.testclass_params['country_code'])
+            if self.testbed_params.get('txbf_off', False):
+                wputils.disable_beamforming(self.dut)
             testcase_params['test_network']['channel'] = testcase_params[
                 'channel']
             wutils.wifi_connect(self.dut,
                                 testcase_params['test_network'],
                                 num_of_tries=5,
                                 check_connectivity=True)
+
+    def setup_dut(self, testcase_params):
+        """Sets up the DUT in the configuration required by the test.
+
+        Args:
+            testcase_params: dict containing AP and other test params
+        """
+        # Turn screen off to preserve battery
+        if self.testbed_params.get('screen_on',
+                                   False) or self.testclass_params.get(
+                                       'screen_on', False):
+            self.dut.droid.wakeLockAcquireDim()
+        else:
+            self.dut.go_to_sleep()
+        self.validate_and_connect(testcase_params)
         self.dut_ip = self.dut.droid.connectivityGetIPv4Addresses('wlan0')[0]
         if testcase_params['channel'] not in self.atten_dut_chain_map.keys():
             self.atten_dut_chain_map[testcase_params[
@@ -432,7 +506,9 @@
         self.setup_ap(testcase_params)
         # Set attenuator to 0 dB
         for attenuator in self.attenuators:
-            attenuator.set_atten(0, strict=False, retry=True)
+            attenuator.set_atten(testcase_params['atten_range'][0],
+                                 strict=False,
+                                 retry=True)
         # Reset, configure, and connect DUT
         self.setup_dut(testcase_params)
 
@@ -450,6 +526,11 @@
         return self.testclass_params['range_atten_start']
 
     def compile_test_params(self, testcase_params):
+        # Check if test should be skipped.
+        wputils.check_skip_conditions(testcase_params, self.dut,
+                                      self.access_point,
+                                      getattr(self, 'ota_chamber', None))
+
         band = self.access_point.band_lookup_by_channel(
             testcase_params['channel'])
         testcase_params['test_network'] = self.main_network[band]
@@ -520,11 +601,11 @@
         allowed_configs = {
             20: [
                 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 36, 40, 44, 48, 64, 100,
-                116, 132, 140, 149, 153, 157, 161
+                116, 132, 140, 149, 153, 157, 161, '6g37', '6g117', '6g213'
             ],
-            40: [36, 44, 100, 149, 157],
-            80: [36, 100, 149],
-            160: [36]
+            40: [36, 44, 100, 149, 157, '6g37', '6g117', '6g213'],
+            80: [36, 100, 149, '6g37', '6g117', '6g213'],
+            160: [36, '6g37', '6g117', '6g213']
         }
 
         for channel, mode, chain, test_type in itertools.product(
@@ -549,25 +630,33 @@
 class WifiPing_TwoChain_Test(WifiPingTest):
     def __init__(self, controllers):
         super().__init__(controllers)
-        self.tests = self.generate_test_cases(
-            ap_power='standard',
-            channels=[1, 6, 11, 36, 40, 44, 48, 149, 153, 157, 161],
-            modes=['bw20', 'bw40', 'bw80'],
-            test_types=[
-                'test_ping_range', 'test_fast_ping_rtt', 'test_slow_ping_rtt'
-            ],
-            chain_mask=['2x2'])
+        self.tests = self.generate_test_cases(ap_power='standard',
+                                              channels=[
+                                                  1, 6, 11, 36, 40, 44, 48,
+                                                  149, 153, 157, 161, '6g37',
+                                                  '6g117', '6g213'
+                                              ],
+                                              modes=['bw20', 'bw40', 'bw80'],
+                                              test_types=[
+                                                  'test_ping_range',
+                                                  'test_fast_ping_rtt',
+                                                  'test_slow_ping_rtt'
+                                              ],
+                                              chain_mask=['2x2'])
 
 
 class WifiPing_PerChainRange_Test(WifiPingTest):
     def __init__(self, controllers):
         super().__init__(controllers)
-        self.tests = self.generate_test_cases(
-            ap_power='standard',
-            chain_mask=['0', '1', '2x2'],
-            channels=[1, 6, 11, 36, 40, 44, 48, 149, 153, 157, 161],
-            modes=['bw20', 'bw40', 'bw80'],
-            test_types=['test_ping_range'])
+        self.tests = self.generate_test_cases(ap_power='standard',
+                                              chain_mask=['0', '1', '2x2'],
+                                              channels=[
+                                                  1, 6, 11, 36, 40, 44, 48,
+                                                  149, 153, 157, 161, '6g37',
+                                                  '6g117', '6g213'
+                                              ],
+                                              modes=['bw20', 'bw40', 'bw80'],
+                                              test_types=['test_ping_range'])
 
 
 class WifiPing_LowPowerAP_Test(WifiPingTest):
@@ -614,21 +703,24 @@
         range_vs_angle = collections.OrderedDict()
         for test in self.testclass_results:
             curr_params = test['testcase_params']
-            curr_config = curr_params['channel']
-            if curr_config in range_vs_angle:
-                if curr_params['position'] not in range_vs_angle[curr_config][
-                        'position']:
-                    range_vs_angle[curr_config]['position'].append(
+            curr_config = wputils.extract_sub_dict(
+                curr_params, ['channel', 'mode', 'chain_mask'])
+            curr_config_id = tuple(curr_config.items())
+            if curr_config_id in range_vs_angle:
+                if curr_params['position'] not in range_vs_angle[
+                        curr_config_id]['position']:
+                    range_vs_angle[curr_config_id]['position'].append(
                         curr_params['position'])
-                    range_vs_angle[curr_config]['range'].append(test['range'])
-                    range_vs_angle[curr_config]['llstats_at_range'].append(
+                    range_vs_angle[curr_config_id]['range'].append(
+                        test['range'])
+                    range_vs_angle[curr_config_id]['llstats_at_range'].append(
                         test['llstats_at_range'])
                 else:
-                    range_vs_angle[curr_config]['range'][-1] = test['range']
-                    range_vs_angle[curr_config]['llstats_at_range'][-1] = test[
-                        'llstats_at_range']
+                    range_vs_angle[curr_config_id]['range'][-1] = test['range']
+                    range_vs_angle[curr_config_id]['llstats_at_range'][
+                        -1] = test['llstats_at_range']
             else:
-                range_vs_angle[curr_config] = {
+                range_vs_angle[curr_config_id] = {
                     'position': [curr_params['position']],
                     'range': [test['range']],
                     'llstats_at_range': [test['llstats_at_range']]
@@ -639,21 +731,24 @@
             x_label = 'Angle (deg)'
         elif chamber_mode == 'stepped stirrers':
             x_label = 'Position Index'
-        figure = wputils.BokehFigure(
+        figure = BokehFigure(
             title='Range vs. Position',
             x_label=x_label,
             primary_y_label='Range (dB)',
         )
-        for channel, channel_data in range_vs_angle.items():
-            figure.add_line(x_data=channel_data['position'],
-                            y_data=channel_data['range'],
-                            hover_text=channel_data['llstats_at_range'],
-                            legend='Channel {}'.format(channel))
-            average_range = sum(channel_data['range']) / len(
-                channel_data['range'])
-            self.log.info('Average range for Channel {} is: {}dB'.format(
-                channel, average_range))
-            metric_name = 'ota_summary_ch{}.avg_range'.format(channel)
+        for curr_config_id, curr_config_data in range_vs_angle.items():
+            curr_config = collections.OrderedDict(curr_config_id)
+            figure.add_line(x_data=curr_config_data['position'],
+                            y_data=curr_config_data['range'],
+                            hover_text=curr_config_data['llstats_at_range'],
+                            legend='{}'.format(curr_config_id))
+            average_range = sum(curr_config_data['range']) / len(
+                curr_config_data['range'])
+            self.log.info('Average range for {} is: {}dB'.format(
+                curr_config_id, average_range))
+            metric_name = 'ota_summary_ch{}_{}_ch{}.avg_range'.format(
+                curr_config['channel'], curr_config['mode'],
+                curr_config['chain_mask'])
             self.testclass_metric_logger.add_metric(metric_name, average_range)
         current_context = context.get_current_context().get_full_output_path()
         plot_file_path = os.path.join(current_context, 'results.html')
@@ -663,20 +758,35 @@
         results_file_path = os.path.join(current_context,
                                          'testclass_summary.json')
         with open(results_file_path, 'w') as results_file:
-            json.dump(range_vs_angle, results_file, indent=4)
+            json.dump(wputils.serialize_dict(range_vs_angle),
+                      results_file,
+                      indent=4)
+
+    def setup_dut(self, testcase_params):
+        """Sets up the DUT in the configuration required by the test.
+
+        Args:
+            testcase_params: dict containing AP and other test params
+        """
+        wputils.set_chain_mask(self.dut, testcase_params['chain_mask'])
+        # Turn screen off to preserve battery
+        if self.testbed_params.get('screen_on',
+                                   False) or self.testclass_params.get(
+                                       'screen_on', False):
+            self.dut.droid.wakeLockAcquireDim()
+        else:
+            self.dut.go_to_sleep()
+        self.validate_and_connect(testcase_params)
+        self.dut_ip = self.dut.droid.connectivityGetIPv4Addresses('wlan0')[0]
 
     def setup_ping_test(self, testcase_params):
-        WifiPingTest.setup_ping_test(self, testcase_params)
         # Setup turntable
         if testcase_params['chamber_mode'] == 'orientation':
             self.ota_chamber.set_orientation(testcase_params['position'])
         elif testcase_params['chamber_mode'] == 'stepped stirrers':
             self.ota_chamber.step_stirrers(testcase_params['total_positions'])
-
-    def extract_test_id(self, testcase_params, id_fields):
-        test_id = collections.OrderedDict(
-            (param, testcase_params[param]) for param in id_fields)
-        return test_id
+        # Continue setting up ping test
+        WifiPingTest.setup_ping_test(self, testcase_params)
 
     def get_range_start_atten(self, testcase_params):
         """Gets the starting attenuation for this ping test.
@@ -694,12 +804,12 @@
             return self.testclass_params['range_atten_start']
         # Get the current and reference test config. The reference test is the
         # one performed at the current MCS+1
-        ref_test_params = self.extract_test_id(testcase_params,
-                                               ['channel', 'mode'])
+        ref_test_params = wputils.extract_sub_dict(
+            testcase_params, ['channel', 'mode', 'chain_mask'])
         # Check if reference test has been run and set attenuation accordingly
         previous_params = [
-            self.extract_test_id(result['testcase_params'],
-                                 ['channel', 'mode'])
+            wputils.extract_sub_dict(result['testcase_params'],
+                                     ['channel', 'mode', 'chain_mask'])
             for result in self.testclass_results
         ]
         try:
@@ -715,32 +825,32 @@
             start_atten = self.testclass_params['range_atten_start']
         return start_atten
 
-    def generate_test_cases(self, ap_power, channels, modes, chamber_mode,
-                            positions):
+    def generate_test_cases(self, ap_power, channels, modes, chain_masks,
+                            chamber_mode, positions):
         test_cases = []
         allowed_configs = {
             20: [
                 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 36, 40, 44, 48, 64, 100,
-                116, 132, 140, 149, 153, 157, 161
+                116, 132, 140, 149, 153, 157, 161, '6g37', '6g117', '6g213'
             ],
-            40: [36, 44, 100, 149, 157],
-            80: [36, 100, 149],
-            160: [36]
+            40: [36, 44, 100, 149, 157, '6g37', '6g117', '6g213'],
+            80: [36, 100, 149, '6g37', '6g117', '6g213'],
+            160: [36, '6g37', '6g117', '6g213']
         }
-        for channel, mode, position in itertools.product(
-                channels, modes, positions):
+        for channel, mode, chain_mask, position in itertools.product(
+                channels, modes, chain_masks, positions):
             bandwidth = int(''.join([x for x in mode if x.isdigit()]))
             if channel not in allowed_configs[bandwidth]:
                 continue
-            testcase_name = 'test_ping_range_ch{}_{}_pos{}'.format(
-                channel, mode, position)
+            testcase_name = 'test_ping_range_ch{}_{}_ch{}_pos{}'.format(
+                channel, mode, chain_mask, position)
             testcase_params = collections.OrderedDict(
                 test_type='test_ping_range',
                 ap_power=ap_power,
                 channel=channel,
                 mode=mode,
                 bandwidth=bandwidth,
-                chain_mask='2x2',
+                chain_mask=chain_mask,
                 chamber_mode=chamber_mode,
                 total_positions=len(positions),
                 position=position)
@@ -753,23 +863,29 @@
 class WifiOtaPing_TenDegree_Test(WifiOtaPingTest):
     def __init__(self, controllers):
         WifiOtaPingTest.__init__(self, controllers)
-        self.tests = self.generate_test_cases(ap_power='standard',
-                                              channels=[6, 36, 149],
-                                              modes=['bw20'],
-                                              chamber_mode='orientation',
-                                              positions=list(range(0, 360,
-                                                                   10)))
+        self.tests = self.generate_test_cases(
+            ap_power='standard',
+            channels=[6, 36, 149, '6g37', '6g117', '6g213'],
+            modes=['bw20'],
+            chain_masks=['2x2'],
+            chamber_mode='orientation',
+            positions=list(range(0, 360, 10)))
 
 
 class WifiOtaPing_45Degree_Test(WifiOtaPingTest):
     def __init__(self, controllers):
         WifiOtaPingTest.__init__(self, controllers)
-        self.tests = self.generate_test_cases(
-            ap_power='standard',
-            channels=[1, 6, 11, 36, 40, 44, 48, 149, 153, 157, 161],
-            modes=['bw20'],
-            chamber_mode='orientation',
-            positions=list(range(0, 360, 45)))
+        self.tests = self.generate_test_cases(ap_power='standard',
+                                              channels=[
+                                                  1, 6, 11, 36, 40, 44, 48,
+                                                  149, 153, 157, 161, '6g37',
+                                                  '6g117', '6g213'
+                                              ],
+                                              modes=['bw20'],
+                                              chain_masks=['2x2'],
+                                              chamber_mode='orientation',
+                                              positions=list(range(0, 360,
+                                                                   45)))
 
 
 class WifiOtaPing_SteppedStirrers_Test(WifiOtaPingTest):
@@ -778,6 +894,7 @@
         self.tests = self.generate_test_cases(ap_power='standard',
                                               channels=[6, 36, 149],
                                               modes=['bw20'],
+                                              chain_masks=['2x2'],
                                               chamber_mode='stepped stirrers',
                                               positions=list(range(100)))
 
@@ -788,6 +905,7 @@
         self.tests = self.generate_test_cases(ap_power='low_power',
                                               channels=[6, 36, 149],
                                               modes=['bw20'],
+                                              chain_masks=['2x2'],
                                               chamber_mode='orientation',
                                               positions=list(range(0, 360,
                                                                    10)))
@@ -800,6 +918,7 @@
             ap_power='low_power',
             channels=[1, 6, 11, 36, 40, 44, 48, 149, 153, 157, 161],
             modes=['bw20'],
+            chain_masks=['2x2'],
             chamber_mode='orientation',
             positions=list(range(0, 360, 45)))
 
@@ -810,5 +929,30 @@
         self.tests = self.generate_test_cases(ap_power='low_power',
                                               channels=[6, 36, 149],
                                               modes=['bw20'],
+                                              chain_masks=['2x2'],
                                               chamber_mode='stepped stirrers',
-                                              positions=list(range(100)))
\ No newline at end of file
+                                              positions=list(range(100)))
+
+
+class WifiOtaPing_LowPowerAP_PerChain_TenDegree_Test(WifiOtaPingTest):
+    def __init__(self, controllers):
+        WifiOtaPingTest.__init__(self, controllers)
+        self.tests = self.generate_test_cases(ap_power='low_power',
+                                              channels=[6, 36, 149],
+                                              modes=['bw20'],
+                                              chain_masks=[0, 1, '2x2'],
+                                              chamber_mode='orientation',
+                                              positions=list(range(0, 360,
+                                                                   10)))
+
+
+class WifiOtaPing_PerChain_TenDegree_Test(WifiOtaPingTest):
+    def __init__(self, controllers):
+        WifiOtaPingTest.__init__(self, controllers)
+        self.tests = self.generate_test_cases(
+            ap_power='standard',
+            channels=[6, 36, 149, '6g37', '6g117', '6g213'],
+            modes=['bw20'],
+            chain_masks=[0, 1, '2x2'],
+            chamber_mode='orientation',
+            positions=list(range(0, 360, 10)))
diff --git a/acts_tests/tests/google/wifi/WifiPnoTest.py b/acts_tests/tests/google/wifi/WifiPnoTest.py
index d93fa45..5378b8f 100644
--- a/acts_tests/tests/google/wifi/WifiPnoTest.py
+++ b/acts_tests/tests/google/wifi/WifiPnoTest.py
@@ -62,6 +62,9 @@
         self.dut.droid.goToSleepNow()
         wutils.reset_wifi(self.dut)
         self.dut.ed.clear_all_events()
+        # DUT to the saved networks so they won't be excluded from PNO scan.
+        wutils.connect_to_wifi_network(self.dut, self.pno_network_a)
+        wutils.connect_to_wifi_network(self.dut, self.pno_network_b)
 
     def teardown_test(self):
         super().teardown_test()
@@ -197,9 +200,6 @@
         self.add_network_and_enable(self.pno_network_b)
         # Force single scan so that both networks become preferred before PNO.
         wutils.start_wifi_connection_scan_and_return_status(self.dut)
-        self.dut.droid.goToSleepNow()
-        wutils.wifi_toggle_state(self.dut, False)
-        wutils.wifi_toggle_state(self.dut, True)
         time.sleep(10)
         self.trigger_pno_and_assert_connect("b_on_a_off", self.pno_network_b)
 
diff --git a/acts_tests/tests/google/wifi/WifiRoamingPerformanceTest.py b/acts_tests/tests/google/wifi/WifiRoamingPerformanceTest.py
index db42e2e..2c739c6 100644
--- a/acts_tests/tests/google/wifi/WifiRoamingPerformanceTest.py
+++ b/acts_tests/tests/google/wifi/WifiRoamingPerformanceTest.py
@@ -28,6 +28,7 @@
 from acts.controllers.utils_lib import ssh
 from acts.metrics.loggers.blackbox import BlackboxMappedMetricLogger
 from acts_contrib.test_utils.wifi import wifi_performance_test_utils as wputils
+from acts_contrib.test_utils.wifi.wifi_performance_test_utils.bokeh_figure import BokehFigure
 from acts_contrib.test_utils.wifi import wifi_retail_ap as retail_ap
 from acts_contrib.test_utils.wifi import wifi_test_utils as wutils
 
@@ -221,7 +222,7 @@
         roam_stats = collections.OrderedDict()
         current_context = context.get_current_context().get_full_output_path()
         for secondary_atten, results_list in results_dict.items():
-            figure = wputils.BokehFigure(title=self.current_test_name,
+            figure = BokehFigure(title=self.current_test_name,
                                          x_label='Time (ms)',
                                          primary_y_label=primary_y_axis,
                                          secondary_y_label='RSSI (dBm)')
@@ -383,7 +384,7 @@
             output_file_path: optional path to output file
         """
         if not figure:
-            figure = wputils.BokehFigure(title=self.current_test_name,
+            figure = BokehFigure(title=self.current_test_name,
                                          x_label='Time (ms)',
                                          primary_y_label='RTT (ms)',
                                          secondary_y_label='RSSI (dBm)')
@@ -418,7 +419,7 @@
             output_file_path: optional path to output file
         """
         if not figure:
-            figure = wputils.BokehFigure(title=self.current_test_name,
+            figure = BokehFigure(title=self.current_test_name,
                                          x_label='Time (s)',
                                          primary_y_label='Throughput (Mbps)',
                                          secondary_y_label='RSSI (dBm)')
diff --git a/acts_tests/tests/google/wifi/WifiRssiTest.py b/acts_tests/tests/google/wifi/WifiRssiTest.py
index 1c0c6df..06eed43 100644
--- a/acts_tests/tests/google/wifi/WifiRssiTest.py
+++ b/acts_tests/tests/google/wifi/WifiRssiTest.py
@@ -31,6 +31,7 @@
 from acts.metrics.loggers.blackbox import BlackboxMappedMetricLogger
 from acts_contrib.test_utils.wifi import ota_chamber
 from acts_contrib.test_utils.wifi import wifi_performance_test_utils as wputils
+from acts_contrib.test_utils.wifi.wifi_performance_test_utils.bokeh_figure import BokehFigure
 from acts_contrib.test_utils.wifi import wifi_retail_ap as retail_ap
 from acts_contrib.test_utils.wifi import wifi_test_utils as wutils
 from concurrent.futures import ThreadPoolExecutor
@@ -90,6 +91,13 @@
     def teardown_test(self):
         self.iperf_server.stop()
 
+    def teardown_class(self):
+        # Turn WiFi OFF and reset AP
+        self.access_point.teardown()
+        for dev in self.android_devices:
+            wutils.wifi_toggle_state(dev, False)
+            dev.go_to_sleep()
+
     def pass_fail_check_rssi_stability(self, testcase_params,
                                        postprocessed_results):
         """Check the test result and decide if it passed or failed.
@@ -220,7 +228,9 @@
         # Save output as text file
         results_file_path = os.path.join(self.log_path, self.current_test_name)
         with open(results_file_path, 'w') as results_file:
-            json.dump(rssi_result, results_file, indent=4)
+            json.dump(wputils.serialize_dict(rssi_result),
+                      results_file,
+                      indent=4)
         # Compile results into arrays of RSSIs suitable for plotting
         # yapf: disable
         postprocessed_results = collections.OrderedDict(
@@ -291,9 +301,9 @@
         Args:
             postprocessed_results: compiled arrays of RSSI data.
         """
-        figure = wputils.BokehFigure(self.current_test_name,
-                                     x_label='Attenuation (dB)',
-                                     primary_y_label='RSSI (dBm)')
+        figure = BokehFigure(self.current_test_name,
+                             x_label='Attenuation (dB)',
+                             primary_y_label='RSSI (dBm)')
         figure.add_line(postprocessed_results['total_attenuation'],
                         postprocessed_results['signal_poll_rssi']['mean'],
                         'Signal Poll RSSI',
@@ -329,7 +339,7 @@
             center_curvers: boolean indicating whether to shift curves to align
             them with predicted RSSIs
         """
-        figure = wputils.BokehFigure(
+        figure = BokehFigure(
             self.current_test_name,
             x_label='Time (s)',
             primary_y_label=center_curves * 'Centered' + 'RSSI (dBm)',
@@ -405,10 +415,10 @@
                 cum_prob += prob
                 rssi_dist[rssi_key]['rssi_cdf'].append(cum_prob)
 
-        figure = wputils.BokehFigure(self.current_test_name,
-                                     x_label='RSSI (dBm)',
-                                     primary_y_label='p(RSSI = x)',
-                                     secondary_y_label='p(RSSI <= x)')
+        figure = BokehFigure(self.current_test_name,
+                             x_label='RSSI (dBm)',
+                             primary_y_label='p(RSSI = x)',
+                             secondary_y_label='p(RSSI <= x)')
         for rssi_key, rssi_data in rssi_dist.items():
             figure.add_line(x_data=rssi_data['rssi_values'],
                             y_data=rssi_data['rssi_pdf'],
@@ -522,12 +532,18 @@
         Args:
             testcase_params: dict containing test-specific parameters
         """
-        if '2G' in testcase_params['band']:
-            frequency = wutils.WifiEnums.channel_2G_to_freq[
-                testcase_params['channel']]
+        band = self.access_point.band_lookup_by_channel(
+            testcase_params['channel'])
+        if '6G' in band:
+            frequency = wutils.WifiEnums.channel_6G_to_freq[int(
+                testcase_params['channel'].strip('6g'))]
         else:
-            frequency = wutils.WifiEnums.channel_5G_to_freq[
-                testcase_params['channel']]
+            if testcase_params['channel'] < 13:
+                frequency = wutils.WifiEnums.channel_2G_to_freq[
+                    testcase_params['channel']]
+            else:
+                frequency = wutils.WifiEnums.channel_5G_to_freq[
+                    testcase_params['channel']]
         if frequency in wutils.WifiEnums.DFS_5G_FREQUENCIES:
             self.access_point.set_region(self.testbed_params['DFS_region'])
         else:
@@ -541,11 +557,13 @@
 
     def setup_dut(self, testcase_params):
         """Sets up the DUT in the configuration required by the test."""
-        # Check battery level before test
-        if not wputils.health_check(self.dut, 10):
-            asserts.skip('Battery level too low. Skipping test.')
         # Turn screen off to preserve battery
-        self.dut.go_to_sleep()
+        if self.testbed_params.get('screen_on',
+                                   False) or self.testclass_params.get(
+                                       'screen_on', False):
+            self.dut.droid.wakeLockAcquireDim()
+        else:
+            self.dut.go_to_sleep()
         if wputils.validate_network(self.dut,
                                     testcase_params['test_network']['SSID']):
             self.log.info('Already connected to desired network')
@@ -556,6 +574,8 @@
                 'channel'] = testcase_params['channel']
             wutils.set_wifi_country_code(self.dut,
                                          self.testclass_params['country_code'])
+            if self.testbed_params.get('txbf_off', False):
+                wputils.disable_beamforming(self.dut)
             wutils.wifi_connect(self.dut,
                                 self.main_network[testcase_params['band']],
                                 num_of_tries=5)
@@ -603,6 +623,11 @@
         Args:
             testcase_params: dict containing test-specific parameters
         """
+        # Check if test should be skipped.
+        wputils.check_skip_conditions(testcase_params, self.dut,
+                                      self.access_point,
+                                      getattr(self, 'ota_chamber', None))
+
         testcase_params.update(
             connected_measurements=self.
             testclass_params['rssi_vs_atten_connected_measurements'],
@@ -621,14 +646,11 @@
                 'BSSID', '00:00:00:00')
         ]
 
-        num_atten_steps = int((self.testclass_params['rssi_vs_atten_stop'] -
-                               self.testclass_params['rssi_vs_atten_start']) /
-                              self.testclass_params['rssi_vs_atten_step'])
-        testcase_params['rssi_atten_range'] = [
-            self.testclass_params['rssi_vs_atten_start'] +
-            x * self.testclass_params['rssi_vs_atten_step']
-            for x in range(0, num_atten_steps)
-        ]
+        testcase_params['rssi_atten_range'] = numpy.arange(
+            self.testclass_params['rssi_vs_atten_start'],
+            self.testclass_params['rssi_vs_atten_stop'],
+            self.testclass_params['rssi_vs_atten_step']).tolist()
+
         testcase_params['traffic_timeout'] = self.get_traffic_timeout(
             testcase_params)
 
@@ -646,6 +668,10 @@
         Args:
             testcase_params: dict containing test-specific parameters
         """
+        # Check if test should be skipped.
+        wputils.check_skip_conditions(testcase_params, self.dut,
+                                      self.access_point,
+                                      getattr(self, 'ota_chamber', None))
         testcase_params.update(
             connected_measurements=int(
                 self.testclass_params['rssi_stability_duration'] /
@@ -678,6 +704,11 @@
         Args:
             testcase_params: dict containing test-specific parameters
         """
+        # Check if test should be skipped.
+        wputils.check_skip_conditions(testcase_params, self.dut,
+                                      self.access_point,
+                                      getattr(self, 'ota_chamber', None))
+
         testcase_params.update(connected_measurements=int(
             1 / self.testclass_params['polling_frequency']),
                                scan_measurements=0,
@@ -787,11 +818,11 @@
         allowed_configs = {
             20: [
                 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 36, 40, 44, 48, 64, 100,
-                116, 132, 140, 149, 153, 157, 161
+                116, 132, 140, 149, 153, 157, 161, '6g37', '6g117', '6g213'
             ],
-            40: [36, 44, 100, 149, 157],
-            80: [36, 100, 149],
-            160: [36]
+            40: [36, 44, 100, 149, 157, '6g37', '6g117', '6g213'],
+            80: [36, 100, 149, '6g37', '6g117', '6g213'],
+            160: [36, '6g37', '6g117', '6g213']
         }
 
         for channel, mode, traffic_mode, test_type in itertools.product(
@@ -835,9 +866,10 @@
     def __init__(self, controllers):
         super().__init__(controllers)
         self.tests = self.generate_test_cases(
-            ['test_rssi_stability', 'test_rssi_vs_atten'],
-            [1, 6, 11, 36, 40, 44, 48, 149, 153, 157, 161],
-            ['bw20', 'bw40', 'bw80'], ['ActiveTraffic'])
+            ['test_rssi_stability', 'test_rssi_vs_atten'], [
+                1, 6, 11, 36, 40, 44, 48, 149, 153, 157, 161, '6g37', '6g117',
+                '6g213'
+            ], ['bw20', 'bw40', 'bw80', 'bw160'], ['ActiveTraffic'])
 
 
 class WifiRssi_SampleChannels_NoTraffic_Test(WifiRssiTest):
@@ -879,6 +911,7 @@
             self.user_params['OTAChamber'])[0]
 
     def teardown_class(self):
+        WifiRssiTest.teardown_class(self)
         self.ota_chamber.reset_chamber()
         self.process_testclass_results()
 
@@ -937,7 +970,7 @@
             return
         plots = []
         for channel, channel_data in testclass_data.items():
-            current_plot = wputils.BokehFigure(
+            current_plot = BokehFigure(
                 title='Channel {} - Rssi vs. Position'.format(channel),
                 x_label=x_label,
                 primary_y_label='RSSI (dBm)',
@@ -950,7 +983,7 @@
             plots.append(current_plot)
         current_context = context.get_current_context().get_full_output_path()
         plot_file_path = os.path.join(current_context, 'results.html')
-        wputils.BokehFigure.save_figures(plots, plot_file_path)
+        BokehFigure.save_figures(plots, plot_file_path)
 
     def setup_rssi_test(self, testcase_params):
         # Test setup
@@ -966,22 +999,36 @@
         Args:
             testcase_params: dict containing test-specific parameters
         """
+        # Check if test should be skipped.
+        wputils.check_skip_conditions(testcase_params, self.dut,
+                                      self.access_point,
+                                      getattr(self, 'ota_chamber', None))
+
         if 'rssi_over_orientation' in self.test_name:
             rssi_test_duration = self.testclass_params[
                 'rssi_over_orientation_duration']
+            rssi_ota_test_attenuation = [
+                self.testclass_params['rssi_ota_test_attenuation']
+            ]
         elif 'rssi_variation' in self.test_name:
             rssi_test_duration = self.testclass_params[
                 'rssi_variation_duration']
-
-        testcase_params.update(
-            connected_measurements=int(
-                rssi_test_duration /
-                self.testclass_params['polling_frequency']),
-            scan_measurements=0,
-            first_measurement_delay=MED_SLEEP,
-            rssi_atten_range=[
+            rssi_ota_test_attenuation = [
                 self.testclass_params['rssi_ota_test_attenuation']
-            ])
+            ]
+        elif 'rssi_vs_atten' in self.test_name:
+            rssi_test_duration = self.testclass_params[
+                'rssi_over_orientation_duration']
+            rssi_ota_test_attenuation = numpy.arange(
+                self.testclass_params['rssi_vs_atten_start'],
+                self.testclass_params['rssi_vs_atten_stop'],
+                self.testclass_params['rssi_vs_atten_step']).tolist()
+
+        testcase_params.update(connected_measurements=int(
+            rssi_test_duration / self.testclass_params['polling_frequency']),
+                               scan_measurements=0,
+                               first_measurement_delay=MED_SLEEP,
+                               rssi_atten_range=rssi_ota_test_attenuation)
         testcase_params['band'] = self.access_point.band_lookup_by_channel(
             testcase_params['channel'])
         testcase_params['test_network'] = self.main_network[
@@ -1011,7 +1058,10 @@
         self.testclass_results.append(rssi_result)
         self.plot_rssi_vs_time(rssi_result,
                                rssi_result['postprocessed_results'], 1)
-        self.plot_rssi_distribution(rssi_result['postprocessed_results'])
+        if 'rssi_vs_atten' in self.test_name:
+            self.plot_rssi_vs_attenuation(rssi_result['postprocessed_results'])
+        elif 'rssi_variation' in self.test_name:
+            self.plot_rssi_distribution(rssi_result['postprocessed_results'])
 
     def generate_test_cases(self, test_types, channels, modes, traffic_modes,
                             chamber_modes, orientations):
@@ -1019,11 +1069,11 @@
         allowed_configs = {
             20: [
                 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 36, 40, 44, 48, 64, 100,
-                116, 132, 140, 149, 153, 157, 161
+                116, 132, 140, 149, 153, 157, 161, '6g37', '6g117', '6g213'
             ],
-            40: [36, 44, 100, 149, 157],
-            80: [36, 100, 149],
-            160: [36]
+            40: [36, 44, 100, 149, 157, '6g37', '6g117', '6g213'],
+            80: [36, 100, 149, '6g37', '6g117', '6g213'],
+            160: [36, '6g37', '6g117', '6g213']
         }
 
         for (channel, mode, traffic, chamber_mode, orientation,
@@ -1053,7 +1103,7 @@
     def __init__(self, controllers):
         super().__init__(controllers)
         self.tests = self.generate_test_cases(['test_rssi_vs_atten'],
-                                              [6, 36, 149], ['bw20'],
+                                              [6, 36, 149, '6g37'], ['bw20'],
                                               ['ActiveTraffic'],
                                               ['orientation'],
                                               list(range(0, 360, 45)))
@@ -1063,7 +1113,7 @@
     def __init__(self, controllers):
         WifiRssiTest.__init__(self, controllers)
         self.tests = self.generate_test_cases(['test_rssi_variation'],
-                                              [6, 36, 149], ['bw20'],
+                                              [6, 36, 149, '6g37'], ['bw20'],
                                               ['ActiveTraffic'],
                                               ['StirrersOn'], [0])
 
@@ -1072,7 +1122,7 @@
     def __init__(self, controllers):
         WifiRssiTest.__init__(self, controllers)
         self.tests = self.generate_test_cases(['test_rssi_over_orientation'],
-                                              [6, 36, 149], ['bw20'],
+                                              [6, 36, 149, '6g37'], ['bw20'],
                                               ['ActiveTraffic'],
                                               ['orientation'],
                                               list(range(0, 360, 10)))
diff --git a/acts_tests/tests/google/wifi/WifiRvrTest.py b/acts_tests/tests/google/wifi/WifiRvrTest.py
index 5096f93..ffa52d5 100644
--- a/acts_tests/tests/google/wifi/WifiRvrTest.py
+++ b/acts_tests/tests/google/wifi/WifiRvrTest.py
@@ -30,6 +30,7 @@
 from acts_contrib.test_utils.wifi import ota_chamber
 from acts_contrib.test_utils.wifi import ota_sniffer
 from acts_contrib.test_utils.wifi import wifi_performance_test_utils as wputils
+from acts_contrib.test_utils.wifi.wifi_performance_test_utils.bokeh_figure import BokehFigure
 from acts_contrib.test_utils.wifi import wifi_retail_ap as retail_ap
 from acts_contrib.test_utils.wifi import wifi_test_utils as wutils
 from functools import partial
@@ -62,6 +63,7 @@
         This function initializes hardwares and compiles parameters that are
         common to all tests in this class.
         """
+        self.sta_dut = self.android_devices[0]
         req_params = [
             'RetailAccessPoints', 'rvr_test_params', 'testbed_params',
             'RemoteServer', 'main_network'
@@ -77,7 +79,11 @@
         self.access_point = retail_ap.create(self.RetailAccessPoints)[0]
         if hasattr(self,
                    'OTASniffer') and self.testbed_params['sniffer_enable']:
-            self.sniffer = ota_sniffer.create(self.OTASniffer)[0]
+            try:
+                self.sniffer = ota_sniffer.create(self.OTASniffer)[0]
+            except:
+                self.log.warning('Could not start sniffer. Disabling sniffs.')
+                self.testbed_params['sniffer_enable'] = 0
         self.log.info('Access Point Configuration: {}'.format(
             self.access_point.ap_settings))
         self.log_path = os.path.join(logging.log_path, 'results')
@@ -100,15 +106,18 @@
                 self.log.info('Turning on airplane mode.')
                 asserts.assert_true(utils.force_airplane_mode(dev, True),
                                     'Can not turn on airplane mode.')
-        wutils.wifi_toggle_state(dev, True)
+                wutils.reset_wifi(dev)
+                wutils.wifi_toggle_state(dev, True)
 
     def teardown_test(self):
         self.iperf_server.stop()
 
     def teardown_class(self):
         # Turn WiFi OFF
+        self.access_point.teardown()
         for dev in self.android_devices:
             wutils.wifi_toggle_state(dev, False)
+            dev.go_to_sleep()
         self.process_testclass_results()
 
     def process_testclass_results(self):
@@ -119,7 +128,7 @@
             plot_id = (result['testcase_params']['channel'],
                        result['testcase_params']['mode'])
             if plot_id not in plots:
-                plots[plot_id] = wputils.BokehFigure(
+                plots[plot_id] = BokehFigure(
                     title='Channel {} {} ({})'.format(
                         result['testcase_params']['channel'],
                         result['testcase_params']['mode'],
@@ -129,13 +138,20 @@
             plots[plot_id].add_line(result['total_attenuation'],
                                     result['throughput_receive'],
                                     result['test_name'],
+                                    hover_text=result['hover_text'],
                                     marker='circle')
+            plots[plot_id].add_line(result['total_attenuation'],
+                                    result['avg_phy_rate'],
+                                    result['test_name'] + ' (PHY)',
+                                    hover_text=result['hover_text'],
+                                    marker='circle')
+
         figure_list = []
         for plot_id, plot in plots.items():
             plot.generate_figure()
             figure_list.append(plot)
         output_file_path = os.path.join(self.log_path, 'results.html')
-        wputils.BokehFigure.save_figures(figure_list, output_file_path)
+        BokehFigure.save_figures(figure_list, output_file_path)
 
     def pass_fail_check(self, rvr_result):
         """Check the test result and decide if it passed or failed.
@@ -242,19 +258,20 @@
             data
         """
         # Save output as text file
-        test_name = self.current_test_name
         results_file_path = os.path.join(
             self.log_path, '{}.json'.format(self.current_test_name))
         with open(results_file_path, 'w') as results_file:
-            json.dump(rvr_result, results_file, indent=4)
+            json.dump(wputils.serialize_dict(rvr_result),
+                      results_file,
+                      indent=4)
         # Plot and save
-        figure = wputils.BokehFigure(title=test_name,
-                                     x_label='Attenuation (dB)',
-                                     primary_y_label='Throughput (Mbps)')
+        figure = BokehFigure(title=self.current_test_name,
+                             x_label='Attenuation (dB)',
+                             primary_y_label='Throughput (Mbps)')
         try:
             golden_path = next(file_name
                                for file_name in self.golden_files_list
-                               if test_name in file_name)
+                               if self.current_test_name in file_name)
             with open(golden_path, 'r') as golden_file:
                 golden_results = json.load(golden_file)
             golden_attenuation = [
@@ -277,23 +294,52 @@
             self.log.warning('ValueError: Golden file not found')
 
         # Generate graph annotatios
-        hover_text = [
-            'TX MCS = {0} ({1:.1f}%). RX MCS = {2} ({3:.1f}%)'.format(
-                curr_llstats['summary']['common_tx_mcs'],
-                curr_llstats['summary']['common_tx_mcs_freq'] * 100,
-                curr_llstats['summary']['common_rx_mcs'],
-                curr_llstats['summary']['common_rx_mcs_freq'] * 100)
-            for curr_llstats in rvr_result['llstats']
-        ]
+        rvr_result['hover_text'] = {
+            'llstats': [
+                'TX MCS = {0} ({1:.1f}%). RX MCS = {2} ({3:.1f}%)'.format(
+                    curr_llstats['summary']['common_tx_mcs'],
+                    curr_llstats['summary']['common_tx_mcs_freq'] * 100,
+                    curr_llstats['summary']['common_rx_mcs'],
+                    curr_llstats['summary']['common_rx_mcs_freq'] * 100)
+                for curr_llstats in rvr_result['llstats']
+            ],
+            'rssi': [
+                '{0:.2f} [{1:.2f},{2:.2f}]'.format(
+                    rssi['signal_poll_rssi'],
+                    rssi['chain_0_rssi'],
+                    rssi['chain_1_rssi'],
+                ) for rssi in rvr_result['rssi']
+            ]
+        }
+        if 'DL' in self.current_test_name:
+            rvr_result['avg_phy_rate'] = [
+                curr_llstats['summary'].get('mean_rx_phy_rate', 0)
+                for curr_llstats in rvr_result['llstats']
+            ]
+        else:
+            rvr_result['avg_phy_rate'] = [
+                curr_llstats['summary'].get('mean_tx_phy_rate', 0)
+                for curr_llstats in rvr_result['llstats']
+            ]
         figure.add_line(rvr_result['total_attenuation'],
                         rvr_result['throughput_receive'],
-                        'Test Results',
-                        hover_text=hover_text,
+                        'Measured Throughput',
+                        hover_text=rvr_result['hover_text'],
                         color='red',
                         marker='circle')
+        rvr_result['avg_phy_rate'].extend(
+            [0] * (len(rvr_result['total_attenuation']) -
+                   len(rvr_result['avg_phy_rate'])))
+        figure.add_line(rvr_result['total_attenuation'],
+                        rvr_result['avg_phy_rate'],
+                        'Average PHY Rate',
+                        hover_text=rvr_result['hover_text'],
+                        color='red',
+                        style='dashed',
+                        marker='square')
 
-        output_file_path = os.path.join(self.log_path,
-                                        '{}.html'.format(test_name))
+        output_file_path = os.path.join(
+            self.log_path, '{}.html'.format(self.current_test_name))
         figure.generate_figure(output_file_path)
 
     def compute_test_metrics(self, rvr_result):
@@ -380,7 +426,7 @@
             if self.testbed_params['sniffer_enable']:
                 self.sniffer.start_capture(
                     network=testcase_params['test_network'],
-                    chan=int(testcase_params['channel']),
+                    chan=testcase_params['channel'],
                     bw=testcase_params['bandwidth'],
                     duration=self.testclass_params['iperf_duration'] / 5)
             # Start iperf session
@@ -439,9 +485,7 @@
                      atten, curr_throughput, current_rssi['signal_poll_rssi'],
                      current_rssi['chain_0_rssi'],
                      current_rssi['chain_1_rssi']))
-            if curr_throughput == 0 and (
-                    current_rssi['signal_poll_rssi'] < -80
-                    or numpy.isnan(current_rssi['signal_poll_rssi'])):
+            if curr_throughput == 0:
                 zero_counter = zero_counter + 1
             else:
                 zero_counter = 0
@@ -477,20 +521,25 @@
         Args:
             testcase_params: dict containing AP and other test params
         """
-        if '2G' in testcase_params['band']:
-            frequency = wutils.WifiEnums.channel_2G_to_freq[
-                testcase_params['channel']]
+        band = self.access_point.band_lookup_by_channel(
+            testcase_params['channel'])
+        if '6G' in band:
+            frequency = wutils.WifiEnums.channel_6G_to_freq[int(
+                testcase_params['channel'].strip('6g'))]
         else:
-            frequency = wutils.WifiEnums.channel_5G_to_freq[
-                testcase_params['channel']]
+            if testcase_params['channel'] < 13:
+                frequency = wutils.WifiEnums.channel_2G_to_freq[
+                    testcase_params['channel']]
+            else:
+                frequency = wutils.WifiEnums.channel_5G_to_freq[
+                    testcase_params['channel']]
         if frequency in wutils.WifiEnums.DFS_5G_FREQUENCIES:
             self.access_point.set_region(self.testbed_params['DFS_region'])
         else:
             self.access_point.set_region(self.testbed_params['default_region'])
-        self.access_point.set_channel(testcase_params['band'],
-                                      testcase_params['channel'])
-        self.access_point.set_bandwidth(testcase_params['band'],
-                                        testcase_params['mode'])
+        self.access_point.set_channel_and_bandwidth(testcase_params['band'],
+                                                    testcase_params['channel'],
+                                                    testcase_params['mode'])
         self.log.info('Access Point Configuration: {}'.format(
             self.access_point.ap_settings))
 
@@ -500,24 +549,25 @@
         Args:
             testcase_params: dict containing AP and other test params
         """
-        self.sta_dut = self.android_devices[0]
-        # Check battery level before test
-        if not wputils.health_check(
-                self.sta_dut,
-                20) and testcase_params['traffic_direction'] == 'UL':
-            asserts.skip('Overheating or Battery level low. Skipping test.')
         # Turn screen off to preserve battery
-        self.sta_dut.go_to_sleep()
+        if self.testbed_params.get('screen_on',
+                                   False) or self.testclass_params.get(
+                                       'screen_on', False):
+            self.sta_dut.droid.wakeLockAcquireDim()
+        else:
+            self.sta_dut.go_to_sleep()
         if wputils.validate_network(self.sta_dut,
                                     testcase_params['test_network']['SSID']):
             self.log.info('Already connected to desired network')
         else:
-            wutils.wifi_toggle_state(self.dut, False)
-            wutils.set_wifi_country_code(self.dut,
+            wutils.wifi_toggle_state(self.sta_dut, False)
+            wutils.set_wifi_country_code(self.sta_dut,
                                          self.testclass_params['country_code'])
-            wutils.wifi_toggle_state(self.dut, True)
-            wutils.reset_wifi(self.dut)
-            wutils.set_wifi_country_code(self.dut,
+            wutils.wifi_toggle_state(self.sta_dut, True)
+            wutils.reset_wifi(self.sta_dut)
+            if self.testbed_params.get('txbf_off', False):
+                wputils.disable_beamforming(self.sta_dut)
+            wutils.set_wifi_country_code(self.sta_dut,
                                          self.testclass_params['country_code'])
             if self.testbed_params['sniffer_enable']:
                 self.sniffer.start_capture(
@@ -530,6 +580,8 @@
                                     testcase_params['test_network'],
                                     num_of_tries=5,
                                     check_connectivity=True)
+                if self.testclass_params.get('num_streams', 2) == 1:
+                    wputils.set_nss_capability(self.sta_dut, 1)
             finally:
                 if self.testbed_params['sniffer_enable']:
                     self.sniffer.stop_capture(tag='connection_setup')
@@ -569,7 +621,7 @@
                         self.remote_server, sta_dut_ip, 'public')
         # Set DUT to monitor RSSI and LLStats on
         self.monitored_dut = self.sta_dut
-        self.monitored_interface = None
+        self.monitored_interface = 'wlan0'
 
     def compile_test_params(self, testcase_params):
         """Function that completes all test params based on the test name.
@@ -577,12 +629,18 @@
         Args:
             testcase_params: dict containing test-specific parameters
         """
-        num_atten_steps = int((self.testclass_params['atten_stop'] -
-                               self.testclass_params['atten_start']) /
-                              self.testclass_params['atten_step'])
+        # Check if test should be skipped based on parameters.
+        wputils.check_skip_conditions(testcase_params, self.sta_dut,
+                                      self.access_point,
+                                      getattr(self, 'ota_chamber', None))
+
+        band = wputils.CHANNEL_TO_BAND_MAP[testcase_params['channel']]
+        start_atten = self.testclass_params['atten_start'].get(band, 0)
+        num_atten_steps = int(
+            (self.testclass_params['atten_stop'] - start_atten) /
+            self.testclass_params['atten_step'])
         testcase_params['atten_range'] = [
-            self.testclass_params['atten_start'] +
-            x * self.testclass_params['atten_step']
+            start_atten + x * self.testclass_params['atten_step']
             for x in range(0, num_atten_steps)
         ]
         band = self.access_point.band_lookup_by_channel(
@@ -649,11 +707,11 @@
         allowed_configs = {
             20: [
                 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 36, 40, 44, 48, 64, 100,
-                116, 132, 140, 149, 153, 157, 161
+                116, 132, 140, 149, 153, 157, 161, '6g37', '6g117', '6g213'
             ],
-            40: [36, 44, 100, 149, 157],
-            80: [36, 100, 149],
-            160: [36]
+            40: [36, 44, 100, 149, 157, '6g37', '6g117', '6g213'],
+            80: [36, 100, 149, '6g37', '6g117', '6g213'],
+            160: [36, '6g37', '6g117', '6g213']
         }
 
         for channel, mode, traffic_type, traffic_direction in itertools.product(
@@ -678,7 +736,10 @@
     def __init__(self, controllers):
         super().__init__(controllers)
         self.tests = self.generate_test_cases(
-            channels=[1, 6, 11, 36, 40, 44, 48, 149, 153, 157, 161],
+            channels=[
+                1, 6, 11, 36, 40, 44, 48, 149, 153, 157, 161, '6g37', '6g117',
+                '6g213'
+            ],
             modes=['bw20', 'bw40', 'bw80', 'bw160'],
             traffic_types=['TCP'],
             traffic_directions=['DL', 'UL'])
@@ -698,7 +759,10 @@
     def __init__(self, controllers):
         super().__init__(controllers)
         self.tests = self.generate_test_cases(
-            channels=[1, 6, 11, 36, 40, 44, 48, 149, 153, 157, 161],
+            channels=[
+                1, 6, 11, 36, 40, 44, 48, 149, 153, 157, 161, '6g37', '6g117',
+                '6g213'
+            ],
             modes=['HE20', 'HE40', 'HE80', 'HE160'],
             traffic_types=['TCP'],
             traffic_directions=['DL', 'UL'])
@@ -708,7 +772,7 @@
     def __init__(self, controllers):
         super().__init__(controllers)
         self.tests = self.generate_test_cases(
-            channels=[6, 36, 149],
+            channels=[6, 36, 149, '6g37'],
             modes=['bw20', 'bw40', 'bw80', 'bw160'],
             traffic_types=['UDP'],
             traffic_directions=['DL', 'UL'])
@@ -729,7 +793,7 @@
         super().__init__(controllers)
         self.tests = self.generate_test_cases(
             channels=[6, 36, 149],
-            modes=['HE20', 'HE40', 'HE80', 'HE160'],
+            modes=['HE20', 'HE40', 'HE80', 'HE160', '6g37'],
             traffic_types=['UDP'],
             traffic_directions=['DL', 'UL'])
 
@@ -744,6 +808,57 @@
             traffic_directions=['DL', 'UL'])
 
 
+class WifiRvr_SingleChain_TCP_Test(WifiRvrTest):
+    def __init__(self, controllers):
+        super().__init__(controllers)
+        self.tests = self.generate_test_cases(
+            channels=[
+                1, 6, 11, 36, 40, 44, 48, 149, 153, 157, 161, '6g37', '6g117',
+                '6g213'
+            ],
+            modes=['bw20', 'bw40', 'bw80', 'bw160'],
+            traffic_types=['TCP'],
+            traffic_directions=['DL', 'UL'],
+            chains=[0, 1, '2x2'])
+
+    def setup_dut(self, testcase_params):
+        self.sta_dut = self.android_devices[0]
+        wputils.set_chain_mask(self.sta_dut, testcase_params['chain'])
+        WifiRvrTest.setup_dut(self, testcase_params)
+
+    def generate_test_cases(self, channels, modes, traffic_types,
+                            traffic_directions, chains):
+        """Function that auto-generates test cases for a test class."""
+        test_cases = []
+        allowed_configs = {
+            20: [
+                1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 36, 40, 44, 48, 64, 100,
+                116, 132, 140, 149, 153, 157, 161, '6g37', '6g117', '6g213'
+            ],
+            40: [36, 44, 100, 149, 157, '6g37', '6g117', '6g213'],
+            80: [36, 100, 149, '6g37', '6g117', '6g213'],
+            160: [36, '6g37', '6g117', '6g213']
+        }
+
+        for channel, mode, chain, traffic_type, traffic_direction in itertools.product(
+                channels, modes, chains, traffic_types, traffic_directions):
+            bandwidth = int(''.join([x for x in mode if x.isdigit()]))
+            if channel not in allowed_configs[bandwidth]:
+                continue
+            test_name = 'test_rvr_{}_{}_ch{}_{}_ch{}'.format(
+                traffic_type, traffic_direction, channel, mode, chain)
+            test_params = collections.OrderedDict(
+                channel=channel,
+                mode=mode,
+                bandwidth=bandwidth,
+                traffic_type=traffic_type,
+                traffic_direction=traffic_direction,
+                chain=chain)
+            setattr(self, test_name, partial(self._test_rvr, test_params))
+            test_cases.append(test_name)
+        return test_cases
+
+
 # Over-the air version of RVR tests
 class WifiOtaRvrTest(WifiRvrTest):
     """Class to test over-the-air RvR
@@ -771,7 +886,7 @@
 
     def extract_test_id(self, testcase_params, id_fields):
         test_id = collections.OrderedDict(
-            (param, testcase_params[param]) for param in id_fields)
+            (param, testcase_params.get(param, None)) for param in id_fields)
         return test_id
 
     def process_testclass_results(self):
@@ -781,10 +896,10 @@
         compiled_data = collections.OrderedDict()
         for result in self.testclass_results:
             test_id = tuple(
-                self.extract_test_id(
-                    result['testcase_params'],
-                    ['channel', 'mode', 'traffic_type', 'traffic_direction'
-                     ]).items())
+                self.extract_test_id(result['testcase_params'], [
+                    'channel', 'mode', 'traffic_type', 'traffic_direction',
+                    'chain'
+                ]).items())
             if test_id not in plots:
                 # Initialize test id data when not present
                 compiled_data[test_id] = {'throughput': [], 'metrics': {}}
@@ -792,7 +907,7 @@
                     key: []
                     for key in result['metrics'].keys()
                 }
-                plots[test_id] = wputils.BokehFigure(
+                plots[test_id] = BokehFigure(
                     title='Channel {} {} ({} {})'.format(
                         result['testcase_params']['channel'],
                         result['testcase_params']['mode'],
@@ -800,6 +915,15 @@
                         result['testcase_params']['traffic_direction']),
                     x_label='Attenuation (dB)',
                     primary_y_label='Throughput (Mbps)')
+                test_id_phy = test_id + tuple('PHY')
+                plots[test_id_phy] = BokehFigure(
+                    title='Channel {} {} ({} {}) (PHY Rate)'.format(
+                        result['testcase_params']['channel'],
+                        result['testcase_params']['mode'],
+                        result['testcase_params']['traffic_type'],
+                        result['testcase_params']['traffic_direction']),
+                    x_label='Attenuation (dB)',
+                    primary_y_label='PHY Rate (Mbps)')
             # Compile test id data and metrics
             compiled_data[test_id]['throughput'].append(
                 result['throughput_receive'])
@@ -812,11 +936,19 @@
             plots[test_id].add_line(result['total_attenuation'],
                                     result['throughput_receive'],
                                     result['test_name'],
+                                    hover_text=result['hover_text'],
                                     width=1,
                                     style='dashed',
                                     marker='circle')
+            plots[test_id_phy].add_line(result['total_attenuation'],
+                                        result['avg_phy_rate'],
+                                        result['test_name'] + ' PHY',
+                                        hover_text=result['hover_text'],
+                                        width=1,
+                                        style='dashed',
+                                        marker='circle')
 
-        # Compute average RvRs and compount metrics over orientations
+        # Compute average RvRs and compute metrics over orientations
         for test_id, test_data in compiled_data.items():
             test_id_dict = dict(test_id)
             metric_tag = '{}_{}_ch{}_{}'.format(
@@ -844,17 +976,17 @@
                                     marker='square')
 
         figure_list = []
-        for test_id, plot in plots.items():
+        for plot_id, plot in plots.items():
             plot.generate_figure()
             figure_list.append(plot)
         output_file_path = os.path.join(self.log_path, 'results.html')
-        wputils.BokehFigure.save_figures(figure_list, output_file_path)
+        BokehFigure.save_figures(figure_list, output_file_path)
 
     def setup_rvr_test(self, testcase_params):
-        # Set turntable orientation
-        self.ota_chamber.set_orientation(testcase_params['orientation'])
         # Continue test setup
         WifiRvrTest.setup_rvr_test(self, testcase_params)
+        # Set turntable orientation
+        self.ota_chamber.set_orientation(testcase_params['orientation'])
 
     def generate_test_cases(self, channels, modes, angles, traffic_types,
                             directions):
@@ -866,7 +998,7 @@
             ],
             40: [36, 44, 100, 149, 157],
             80: [36, 100, 149],
-            160: [36]
+            160: [36, '6g37', '6g117', '6g213']
         }
         for channel, mode, angle, traffic_type, direction in itertools.product(
                 channels, modes, angles, traffic_types, directions):
@@ -890,8 +1022,9 @@
     def __init__(self, controllers):
         WifiOtaRvrTest.__init__(self, controllers)
         self.tests = self.generate_test_cases(
-            [1, 6, 11, 36, 40, 44, 48, 149, 153, 157, 161],
-            ['bw20', 'bw40', 'bw80'], list(range(0, 360, 45)), ['TCP'], ['DL'])
+            [1, 6, 11, 36, 40, 44, 48, 149, 153, 157, 161, '6g37'],
+            ['bw20', 'bw40', 'bw80', 'bw160'], list(range(0, 360, 45)),
+            ['TCP'], ['DL', 'UL'])
 
 
 class WifiOtaRvr_SampleChannel_Test(WifiOtaRvrTest):
@@ -901,7 +1034,10 @@
                                               list(range(0, 360, 45)), ['TCP'],
                                               ['DL'])
         self.tests.extend(
-            self.generate_test_cases([36, 149], ['bw80'],
+            self.generate_test_cases([36, 149], ['bw80', 'bw160'],
+                                     list(range(0, 360, 45)), ['TCP'], ['DL']))
+        self.tests.extend(
+            self.generate_test_cases(['6g37'], ['bw160'],
                                      list(range(0, 360, 45)), ['TCP'], ['DL']))
 
 
@@ -909,5 +1045,56 @@
     def __init__(self, controllers):
         WifiOtaRvrTest.__init__(self, controllers)
         self.tests = self.generate_test_cases(
-            [6, 36, 40, 44, 48, 149, 153, 157, 161], ['bw20', 'bw40', 'bw80'],
-            [0], ['TCP'], ['DL', 'UL'])
+            [6, 36, 40, 44, 48, 149, 153, 157, 161, '6g37'],
+            ['bw20', 'bw40', 'bw80', 'bw160'], [0], ['TCP'], ['DL', 'UL'])
+
+
+class WifiOtaRvr_SingleChain_Test(WifiOtaRvrTest):
+    def __init__(self, controllers):
+        WifiOtaRvrTest.__init__(self, controllers)
+        self.tests = self.generate_test_cases([6], ['bw20'],
+                                              list(range(0, 360, 45)), ['TCP'],
+                                              ['DL', 'UL'], [0, 1])
+        self.tests.extend(
+            self.generate_test_cases([36, 149], ['bw20', 'bw80', 'bw160'],
+                                     list(range(0, 360, 45)), ['TCP'],
+                                     ['DL', 'UL'], [0, 1, '2x2']))
+        self.tests.extend(
+            self.generate_test_cases(['6g37'], ['bw20', 'bw80', 'bw160'],
+                                     list(range(0, 360, 45)), ['TCP'],
+                                     ['DL', 'UL'], [0, 1, '2x2']))
+
+    def setup_dut(self, testcase_params):
+        self.sta_dut = self.android_devices[0]
+        wputils.set_chain_mask(self.sta_dut, testcase_params['chain'])
+        WifiRvrTest.setup_dut(self, testcase_params)
+
+    def generate_test_cases(self, channels, modes, angles, traffic_types,
+                            directions, chains):
+        test_cases = []
+        allowed_configs = {
+            20: [
+                1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 36, 40, 44, 48, 64, 100,
+                116, 132, 140, 149, 153, 157, 161
+            ],
+            40: [36, 44, 100, 149, 157],
+            80: [36, 100, 149],
+            160: [36, '6g37', '6g117', '6g213']
+        }
+        for channel, mode, chain, angle, traffic_type, direction in itertools.product(
+                channels, modes, chains, angles, traffic_types, directions):
+            bandwidth = int(''.join([x for x in mode if x.isdigit()]))
+            if channel not in allowed_configs[bandwidth]:
+                continue
+            testcase_name = 'test_rvr_{}_{}_ch{}_{}_ch{}_{}deg'.format(
+                traffic_type, direction, channel, mode, chain, angle)
+            test_params = collections.OrderedDict(channel=channel,
+                                                  mode=mode,
+                                                  bandwidth=bandwidth,
+                                                  chain=chain,
+                                                  traffic_type=traffic_type,
+                                                  traffic_direction=direction,
+                                                  orientation=angle)
+            setattr(self, testcase_name, partial(self._test_rvr, test_params))
+            test_cases.append(testcase_name)
+        return test_cases
diff --git a/acts_tests/tests/google/wifi/WifiSensitivityTest.py b/acts_tests/tests/google/wifi/WifiSensitivityTest.py
index 41aa32c..954bc90 100644
--- a/acts_tests/tests/google/wifi/WifiSensitivityTest.py
+++ b/acts_tests/tests/google/wifi/WifiSensitivityTest.py
@@ -29,8 +29,10 @@
 from acts.metrics.loggers.blackbox import BlackboxMappedMetricLogger
 from acts_contrib.test_utils.wifi import ota_chamber
 from acts_contrib.test_utils.wifi import wifi_performance_test_utils as wputils
+from acts_contrib.test_utils.wifi.wifi_performance_test_utils.bokeh_figure import BokehFigure
 from acts_contrib.test_utils.wifi import wifi_test_utils as wutils
 from acts_contrib.test_utils.wifi import wifi_retail_ap as retail_ap
+from acts_contrib.test_utils.wifi import ota_sniffer
 from functools import partial
 from WifiRvrTest import WifiRvrTest
 from WifiPingTest import WifiPingTest
@@ -46,6 +48,7 @@
     example_connectivity_performance_ap_sta.json.
     """
 
+    MAX_CONSECUTIVE_ZEROS = 5
     RSSI_POLL_INTERVAL = 0.2
     VALID_TEST_CONFIGS = {
         1: ['legacy', 'VHT20'],
@@ -138,16 +141,25 @@
         common to all tests in this class.
         """
         self.dut = self.android_devices[-1]
+        self.sta_dut = self.android_devices[-1]
         req_params = [
             'RetailAccessPoints', 'sensitivity_test_params', 'testbed_params',
             'RemoteServer'
         ]
-        opt_params = ['main_network']
+        opt_params = ['main_network', 'OTASniffer']
         self.unpack_userparams(req_params, opt_params)
         self.testclass_params = self.sensitivity_test_params
         self.num_atten = self.attenuators[0].instrument.num_atten
         self.ping_server = ssh.connection.SshConnection(
             ssh.settings.from_config(self.RemoteServer[0]['ssh_config']))
+        if hasattr(self,
+                   'OTASniffer') and self.testbed_params['sniffer_enable']:
+            try:
+                self.sniffer = ota_sniffer.create(self.OTASniffer)[0]
+            except:
+                self.log.warning('Could not start sniffer. Disabling sniffs.')
+                self.testbed_params['sniffer_enable'] = 0
+        self.remote_server = self.ping_server
         self.iperf_server = self.iperf_servers[0]
         self.iperf_client = self.iperf_clients[0]
         self.access_point = retail_ap.create(self.RetailAccessPoints)[0]
@@ -169,9 +181,11 @@
         self.user_params['retry_tests'] = [self.__class__.__name__]
 
     def teardown_class(self):
+        self.access_point.teardown()
         # Turn WiFi OFF
         for dev in self.android_devices:
             wutils.wifi_toggle_state(dev, False)
+            dev.go_to_sleep()
         self.process_testclass_results()
 
     def setup_test(self):
@@ -207,9 +221,40 @@
         else:
             asserts.explicit_pass('Test Passed. {}'.format(result_string))
 
+    def plot_per_curves(self):
+        """Plots PER curves to help debug sensitivity."""
+
+        plots = collections.OrderedDict()
+        id_fields = ['channel', 'mode', 'num_streams']
+        for result in self.testclass_results:
+            testcase_params = result['testcase_params']
+            plot_id = self.extract_test_id(testcase_params, id_fields)
+            plot_id = tuple(plot_id.items())
+            if plot_id not in plots:
+                plots[plot_id] = BokehFigure(
+                    title='Channel {} {} Nss{}'.format(
+                        result['testcase_params']['channel'],
+                        result['testcase_params']['mode'],
+                        result['testcase_params']['num_streams']),
+                    x_label='Attenuation (dB)',
+                    primary_y_label='PER (%)')
+            per = [stat['summary']['rx_per'] for stat in result['llstats']]
+            if len(per) < len(result['total_attenuation']):
+                per.extend([100] *
+                           (len(result['total_attenuation']) - len(per)))
+            plots[plot_id].add_line(result['total_attenuation'], per,
+                                    result['test_name'])
+        figure_list = []
+        for plot_id, plot in plots.items():
+            plot.generate_figure()
+            figure_list.append(plot)
+        output_file_path = os.path.join(self.log_path, 'results.html')
+        BokehFigure.save_figures(figure_list, output_file_path)
+
     def process_testclass_results(self):
         """Saves and plots test results from all executed test cases."""
         # write json output
+        self.plot_per_curves()
         testclass_results_dict = collections.OrderedDict()
         id_fields = ['mode', 'rate', 'num_streams', 'chain_mask']
         channels_tested = []
@@ -331,6 +376,7 @@
                 testcase_params['channel'])] - ping_result['range'])
 
     def setup_sensitivity_test(self, testcase_params):
+        # Setup test
         if testcase_params['traffic_type'].lower() == 'ping':
             self.setup_ping_test(testcase_params)
             self.run_sensitivity_test = self.run_ping_test
@@ -386,11 +432,13 @@
         Args:
             testcase_params: dict containing AP and other test params
         """
-        # Check battery level before test
-        if not wputils.health_check(self.dut, 10):
-            asserts.skip('Battery level too low. Skipping test.')
         # Turn screen off to preserve battery
-        self.dut.go_to_sleep()
+        if self.testbed_params.get('screen_on',
+                                   False) or self.testclass_params.get(
+                                       'screen_on', False):
+            self.dut.droid.wakeLockAcquireDim()
+        else:
+            self.dut.go_to_sleep()
         if wputils.validate_network(self.dut,
                                     testcase_params['test_network']['SSID']):
             self.log.info('Already connected to desired network')
@@ -482,8 +530,14 @@
 
     def compile_test_params(self, testcase_params):
         """Function that generates test params based on the test name."""
+        # Check if test should be skipped.
+        wputils.check_skip_conditions(testcase_params, self.dut,
+                                      self.access_point,
+                                      getattr(self, 'ota_chamber', None))
+
         band = self.access_point.band_lookup_by_channel(
             testcase_params['channel'])
+        testcase_params['band'] = band
         testcase_params['test_network'] = self.main_network[band]
         if testcase_params['chain_mask'] in ['0', '1']:
             testcase_params['attenuated_chain'] = 'DUT-Chain-{}'.format(
@@ -675,41 +729,20 @@
             testcase_params: dict containing AP and other test params
         """
         # Configure the right INI settings
-        if testcase_params['chain_mask'] != self.current_chain_mask:
-            self.log.info('Updating WiFi chain mask to: {}'.format(
-                testcase_params['chain_mask']))
-            self.current_chain_mask = testcase_params['chain_mask']
-            if testcase_params['chain_mask'] in ['0', '1']:
-                wputils.set_ini_single_chain_mode(
-                    self.dut, int(testcase_params['chain_mask']))
-            else:
-                wputils.set_ini_two_chain_mode(self.dut)
-        # Check battery level before test
-        if not wputils.health_check(self.dut, 10):
-            asserts.skip('Battery level too low. Skipping test.')
+        wputils.set_chain_mask(self.dut, testcase_params['chain_mask'])
         # Turn screen off to preserve battery
-        self.dut.go_to_sleep()
-        if wputils.validate_network(self.dut,
-                                    testcase_params['test_network']['SSID']):
-            self.log.info('Already connected to desired network')
+        if self.testbed_params.get('screen_on',
+                                   False) or self.testclass_params.get(
+                                       'screen_on', False):
+            self.dut.droid.wakeLockAcquireDim()
         else:
-            wutils.wifi_toggle_state(self.dut, False)
-            wutils.set_wifi_country_code(self.dut,
-                                         self.testclass_params['country_code'])
-            wutils.wifi_toggle_state(self.dut, True)
-            wutils.reset_wifi(self.dut)
-            wutils.set_wifi_country_code(self.dut,
-                                         self.testclass_params['country_code'])
-            testcase_params['test_network']['channel'] = testcase_params[
-                'channel']
-            wutils.wifi_connect(self.dut,
-                                testcase_params['test_network'],
-                                num_of_tries=5,
-                                check_connectivity=True)
+            self.dut.go_to_sleep()
+        self.validate_and_connect(testcase_params)
         self.dut_ip = self.dut.droid.connectivityGetIPv4Addresses('wlan0')[0]
 
     def process_testclass_results(self):
         """Saves and plots test results from all executed test cases."""
+        self.plot_per_curves()
         testclass_results_dict = collections.OrderedDict()
         id_fields = ['channel', 'mode', 'rate']
         plots = []
@@ -752,10 +785,9 @@
                 test_id_str = 'Channel {} - {} MCS{}'.format(
                     test_id_dict['channel'], test_id_dict['mode'],
                     test_id_dict['rate'])
-            curr_plot = wputils.BokehFigure(
-                title=str(test_id_str),
-                x_label='Orientation (deg)',
-                primary_y_label='Sensitivity (dBm)')
+            curr_plot = BokehFigure(title=str(test_id_str),
+                                    x_label='Orientation (deg)',
+                                    primary_y_label='Sensitivity (dBm)')
             for line_id, line_results in test_data.items():
                 curr_plot.add_line(line_results['orientation'],
                                    line_results['sensitivity'],
@@ -784,7 +816,7 @@
             curr_plot.generate_figure(output_file_path)
             plots.append(curr_plot)
         output_file_path = os.path.join(current_context, 'results.html')
-        wputils.BokehFigure.save_figures(plots, output_file_path)
+        BokehFigure.save_figures(plots, output_file_path)
 
     def get_start_atten(self, testcase_params):
         """Gets the starting attenuation for this sensitivity test.
@@ -884,8 +916,10 @@
         requested_channels = [6, 36, 149]
         requested_rates = [
             self.RateTuple(8, 1, 86.7),
+            self.RateTuple(6, 1, 65),
             self.RateTuple(2, 1, 21.7),
             self.RateTuple(8, 2, 173.3),
+            self.RateTuple(6, 2, 130.3),
             self.RateTuple(2, 2, 43.3)
         ]
         self.tests = self.generate_test_cases(requested_channels,
@@ -899,12 +933,16 @@
         WifiOtaSensitivityTest.__init__(self, controllers)
         requested_channels = [6, 36, 149]
         requested_rates = [
+            self.RateTuple(9, 1, 96),
+            self.RateTuple(9, 2, 192),
+            self.RateTuple(6, 1, 65),
+            self.RateTuple(6, 2, 130.3),
             self.RateTuple(2, 1, 21.7),
             self.RateTuple(2, 2, 43.3)
         ]
-        self.tests = self.generate_test_cases(requested_channels, ['VHT20'],
-                                              requested_rates,
-                                              ['0', '1', '2x2'],
+        self.tests = self.generate_test_cases(requested_channels,
+                                              ['VHT20', 'VHT80'],
+                                              requested_rates, [0, 1, '2x2'],
                                               list(range(0, 360, 10)))
 
 
diff --git a/acts_tests/tests/google/wifi/WifiSoftApPerformanceTest.py b/acts_tests/tests/google/wifi/WifiSoftApPerformanceTest.py
index b116666..47719c5 100644
--- a/acts_tests/tests/google/wifi/WifiSoftApPerformanceTest.py
+++ b/acts_tests/tests/google/wifi/WifiSoftApPerformanceTest.py
@@ -64,7 +64,6 @@
         opt_params = ['golden_files_list', 'OTASniffer']
         self.unpack_userparams(req_params, opt_params)
         self.access_points = retail_ap.create(self.RetailAccessPoints)
-        self.access_point = self.access_points[0]
         self.testclass_params = self.sap_test_params
         self.num_atten = self.attenuators[0].instrument.num_atten
         self.iperf_server = ipf.create([{
@@ -81,7 +80,11 @@
         }])[0]
         if hasattr(self,
                    'OTASniffer') and self.testbed_params['sniffer_enable']:
-            self.sniffer = ota_sniffer.create(self.OTASniffer)[0]
+            try:
+                self.sniffer = ota_sniffer.create(self.OTASniffer)[0]
+            except:
+                self.log.warning('Could not start sniffer. Disabling sniffs.')
+                self.testbed_params['sniffer_enable'] = 0
 
         self.log_path = os.path.join(logging.log_path, 'results')
         os.makedirs(self.log_path, exist_ok=True)
@@ -106,9 +109,11 @@
         wutils.stop_wifi_tethering(self.android_devices[0])
         for dev in self.android_devices:
             wutils.wifi_toggle_state(dev, False)
+            dev.go_to_sleep()
         self.process_testclass_results()
         # Teardown AP and release it's lockfile
-        self.access_point.teardown()
+        for ap in self.access_points:
+            ap.teardown()
 
     def teardown_test(self):
         self.iperf_server.stop()
@@ -118,13 +123,17 @@
         info = {}
         info['client_ip_address'] = self.android_devices[
             1].droid.connectivityGetIPv4Addresses('wlan0')[0]
+        ifconfig_out = self.android_devices[0].adb.shell('ifconfig')
+        soft_ap_interface = 'wlan1' if 'wlan1' in ifconfig_out else 'wlan2'
         info['ap_ip_address'] = self.android_devices[
-            0].droid.connectivityGetIPv4Addresses('wlan1')[0]
-        info['frequency'] = self.android_devices[1].adb.shell(
-            'wpa_cli status | grep freq').split('=')[1]
+            0].droid.connectivityGetIPv4Addresses(soft_ap_interface)[0]
+
+        connection_rssi = wputils.get_connected_rssi(self.android_devices[1],
+                                                     interface='wlan0')
+        info['frequency'] = connection_rssi['frequency'][0]
         info['channel'] = wutils.WifiEnums.freq_to_channel[int(
             info['frequency'])]
-        info['mode'] = 'VHT20' if info['channel'] < 13 else 'VHT80'
+        info['mode'] = 'bw20' if info['channel'] < 13 else 'bw80'
         return info
 
     def setup_aps(self, testcase_params):
@@ -180,11 +189,23 @@
                             num_of_tries=5,
                             check_connectivity=False)
         # Compile meta data
-        #self.access_point = AccessPointTuple(sap_config)
         sap_info = self.get_sap_connection_info()
         print("SAP Info: {}".format(sap_info))
         testcase_params['channel'] = sap_info['channel']
+        if testcase_params['channel'] < 13:
+            testcase_params['band'] = '2GHz'
+        else:
+            testcase_params['band'] = '5GHz'
         testcase_params['mode'] = sap_info['mode']
+        self.access_point = AccessPointTuple({
+            testcase_params['band']: {
+                'SSID': sap_config[wutils.WifiEnums.SSID_KEY],
+                'password': sap_config[wutils.WifiEnums.PWD_KEY],
+                'channel': sap_info['channel'],
+                'mode': sap_info['mode'],
+                'bandwidth': sap_info['mode'],
+            }
+        })
         testcase_params['iperf_server_address'] = sap_info['ap_ip_address']
 
     def setup_sap_rvr_test(self, testcase_params):
@@ -204,7 +225,7 @@
         self.setup_sap_connection(testcase_params)
         # Set DUT to monitor RSSI and LLStats on
         self.monitored_dut = self.sta_dut
-        self.monitored_interface = None
+        self.monitored_interface = 'wlan0'
 
     def compile_test_params(self, testcase_params):
         """Function that completes all test params based on the test name.
diff --git a/acts_tests/tests/google/wifi/WifiTdlsRvrTest.py b/acts_tests/tests/google/wifi/WifiTdlsRvrTest.py
new file mode 100644
index 0000000..b051448
--- /dev/null
+++ b/acts_tests/tests/google/wifi/WifiTdlsRvrTest.py
@@ -0,0 +1,365 @@
+#!/usr/bin/env python3.4
+#
+#   Copyright 2020 - The Android Open Source Project
+#
+#   Licensed under the Apache License, Version 2.0 (the 'License');
+#   you may not use this file except in compliance with the License.
+#   You may obtain a copy of the License at
+#
+#       http://www.apache.org/licenses/LICENSE-2.0
+#
+#   Unless required by applicable law or agreed to in writing, software
+#   distributed under the License is distributed on an 'AS IS' BASIS,
+#   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#   See the License for the specific language governing permissions and
+#   limitations under the License.
+
+import collections
+import itertools
+import logging
+import os
+from acts import asserts
+from acts import base_test
+from acts import utils
+from acts.controllers import iperf_server as ipf
+from acts.controllers import iperf_client as ipc
+from acts.metrics.loggers.blackbox import BlackboxMappedMetricLogger
+from acts.test_decorators import test_tracker_info
+from acts_contrib.test_utils.wifi import ota_sniffer
+from acts_contrib.test_utils.wifi import wifi_retail_ap as retail_ap
+from acts_contrib.test_utils.wifi import wifi_test_utils as wutils
+from acts_contrib.test_utils.wifi import wifi_performance_test_utils as wputils
+from functools import partial
+from WifiRvrTest import WifiRvrTest
+
+AccessPointTuple = collections.namedtuple(('AccessPointTuple'),
+                                          ['ap_settings'])
+
+
+class WifiTdlsRvrTest(WifiRvrTest):
+    def __init__(self, controllers):
+        base_test.BaseTestClass.__init__(self, controllers)
+        self.testcase_metric_logger = (
+            BlackboxMappedMetricLogger.for_test_case())
+        self.testclass_metric_logger = (
+            BlackboxMappedMetricLogger.for_test_class())
+        self.publish_testcase_metrics = True
+
+    def setup_class(self):
+        """Initializes common test hardware and parameters.
+
+        This function initializes hardwares and compiles parameters that are
+        common to all tests in this class.
+        """
+        req_params = [
+            'tdls_rvr_test_params', 'testbed_params', 'RetailAccessPoints'
+        ]
+        opt_params = ['ap_networks', 'OTASniffer']
+        self.unpack_userparams(req_params, opt_params)
+        self.access_point = retail_ap.create(self.RetailAccessPoints)[0]
+        self.testclass_params = self.tdls_rvr_test_params
+        self.num_atten = self.attenuators[0].instrument.num_atten
+        self.iperf_server = ipf.create([{
+            'AndroidDevice':
+            self.android_devices[0].serial,
+            'port':
+            '5201'
+        }])[0]
+        self.iperf_client = ipc.create([{
+            'AndroidDevice':
+            self.android_devices[1].serial,
+            'port':
+            '5201'
+        }])[0]
+
+        self.log_path = os.path.join(logging.log_path, 'results')
+        if hasattr(self,
+                   'OTASniffer') and self.testbed_params['sniffer_enable']:
+            self.sniffer = ota_sniffer.create(self.OTASniffer)[0]
+        os.makedirs(self.log_path, exist_ok=True)
+        if not hasattr(self, 'golden_files_list'):
+            if 'golden_results_path' in self.testbed_params:
+                self.golden_files_list = [
+                    os.path.join(self.testbed_params['golden_results_path'],
+                                 file) for file in
+                    os.listdir(self.testbed_params['golden_results_path'])
+                ]
+            else:
+                self.log.warning('No golden files found.')
+                self.golden_files_list = []
+
+        self.testclass_results = []
+
+        # Turn WiFi ON
+        if self.testclass_params.get('airplane_mode', 1):
+            self.log.info('Turning on airplane mode.')
+            for ad in self.android_devices:
+                asserts.assert_true(utils.force_airplane_mode(ad, True),
+                                    "Can not turn on airplane mode.")
+        for ad in self.android_devices:
+            wutils.wifi_toggle_state(ad, True)
+
+    def teardown_class(self):
+        # Turn WiFi OFF
+        for dev in self.android_devices:
+            wutils.wifi_toggle_state(dev, False)
+        self.process_testclass_results()
+        # Teardown AP and release its lockfile
+        self.access_point.teardown()
+
+    def setup_test(self):
+        for ad in self.android_devices:
+            wputils.start_wifi_logging(ad)
+
+    def teardown_test(self):
+        self.iperf_server.stop()
+        for ad in self.android_devices:
+            wutils.reset_wifi(ad)
+            wputils.stop_wifi_logging(ad)
+
+    def on_exception(self, test_name, begin_time):
+        for ad in self.android_devices:
+            ad.take_bug_report(test_name, begin_time)
+            ad.cat_adb_log(test_name, begin_time)
+            wutils.get_ssrdumps(ad)
+
+    def compute_test_metrics(self, rvr_result):
+        #Set test metrics
+        rvr_result['metrics'] = {}
+        rvr_result['metrics']['peak_tput'] = max(
+            rvr_result['throughput_receive'])
+        if self.publish_testcase_metrics:
+            self.testcase_metric_logger.add_metric(
+                'peak_tput', rvr_result['metrics']['peak_tput'])
+
+        test_mode = rvr_result['testcase_params']['mode']
+        tput_below_limit = [
+            tput <
+            self.testclass_params['tput_metric_targets'][test_mode]['high']
+            for tput in rvr_result['throughput_receive']
+        ]
+        rvr_result['metrics']['high_tput_range'] = -1
+        for idx in range(len(tput_below_limit)):
+            if all(tput_below_limit[idx:]):
+                if idx == 0:
+                    #Throughput was never above limit
+                    rvr_result['metrics']['high_tput_range'] = -1
+                else:
+                    rvr_result['metrics']['high_tput_range'] = rvr_result[
+                        'total_attenuation'][max(idx, 1) - 1]
+                break
+        if self.publish_testcase_metrics:
+            self.testcase_metric_logger.add_metric(
+                'high_tput_range', rvr_result['metrics']['high_tput_range'])
+
+        tput_below_limit = [
+            tput <
+            self.testclass_params['tput_metric_targets'][test_mode]['low']
+            for tput in rvr_result['throughput_receive']
+        ]
+        for idx in range(len(tput_below_limit)):
+            if all(tput_below_limit[idx:]):
+                rvr_result['metrics']['low_tput_range'] = rvr_result[
+                    'total_attenuation'][max(idx, 1) - 1]
+                break
+        else:
+            rvr_result['metrics']['low_tput_range'] = -1
+        if self.publish_testcase_metrics:
+            self.testcase_metric_logger.add_metric(
+                'low_tput_range', rvr_result['metrics']['low_tput_range'])
+
+    def setup_aps(self, testcase_params):
+        self.log.info('Setting AP to channel {} {}'.format(
+            testcase_params['channel'], testcase_params['bandwidth']))
+        self.access_point.set_channel(testcase_params['interface_id'],
+                                      testcase_params['channel'])
+        self.access_point.set_bandwidth(testcase_params['interface_id'],
+                                        testcase_params['bandwidth'])
+
+    def setup_duts(self, testcase_params):
+        # Check battery level before test
+        for ad in self.android_devices:
+            if not wputils.health_check(ad, 20):
+                asserts.skip('Overheating or Battery low. Skipping test.')
+            ad.go_to_sleep()
+            wutils.reset_wifi(ad)
+        # Turn screen off to preserve battery
+        for ad in self.android_devices:
+            wutils.wifi_connect(
+                ad,
+                self.ap_networks[0][testcase_params['interface_id']],
+                num_of_tries=5,
+                check_connectivity=True)
+
+    def setup_tdls_connection(self, testcase_params):
+
+        tdls_config = {}
+        for idx, ad in enumerate(self.android_devices):
+            tdls_config[idx] = {
+                'ip_address':
+                ad.droid.connectivityGetIPv4Addresses('wlan0')[0],
+                'mac_address': ad.droid.wifiGetConnectionInfo()['mac_address'],
+                'tdls_supported': ad.droid.wifiIsTdlsSupported(),
+                'off_channel_supported':
+                ad.droid.wifiIsOffChannelTdlsSupported()
+            }
+        self.android_devices[0].droid.wifiSetTdlsEnabledWithMacAddress(
+            tdls_config[1]['mac_address'], True)
+
+        testcase_params['iperf_server_address'] = tdls_config[0]['ip_address']
+        testcase_params['tdls_config'] = tdls_config
+        testcase_params['channel'] = testcase_params['channel']
+        testcase_params['mode'] = testcase_params['bandwidth']
+        testcase_params['test_network'] = self.ap_networks[0][
+            testcase_params['interface_id']]
+
+    def setup_tdls_rvr_test(self, testcase_params):
+        # Setup the aps
+        self.setup_aps(testcase_params)
+        # Setup the duts
+        self.setup_duts(testcase_params)
+        # Set attenuator to 0 dB
+        for attenuator in self.attenuators:
+            attenuator.set_atten(0, strict=False)
+        # Setup the aware connection
+        self.setup_tdls_connection(testcase_params)
+        # Set DUT to monitor RSSI and LLStats on
+        self.monitored_dut = self.android_devices[1]
+
+    def compile_test_params(self, testcase_params):
+        """Function that completes all test params based on the test name.
+
+        Args:
+            testcase_params: dict containing test-specific parameters
+        """
+        for ad in self.android_devices:
+            wputils.check_skip_conditions(testcase_params, ad,
+                                          self.access_point)
+
+        # Compile RvR parameters
+        num_atten_steps = int((self.testclass_params['atten_stop'] -
+                               self.testclass_params['atten_start']) /
+                              self.testclass_params['atten_step'])
+        testcase_params['atten_range'] = [
+            self.testclass_params['atten_start'] +
+            x * self.testclass_params['atten_step']
+            for x in range(0, num_atten_steps)
+        ]
+
+        # Compile iperf arguments
+        if testcase_params['traffic_type'] == 'TCP':
+            testcase_params['iperf_socket_size'] = self.testclass_params.get(
+                'tcp_socket_size', None)
+            testcase_params['iperf_processes'] = self.testclass_params.get(
+                'tcp_processes', 1)
+        elif testcase_params['traffic_type'] == 'UDP':
+            testcase_params['iperf_socket_size'] = self.testclass_params.get(
+                'udp_socket_size', None)
+            testcase_params['iperf_processes'] = self.testclass_params.get(
+                'udp_processes', 1)
+        testcase_params['iperf_args'] = wputils.get_iperf_arg_string(
+            duration=self.testclass_params['iperf_duration'],
+            reverse_direction=(testcase_params['traffic_direction'] == 'DL'),
+            socket_size=testcase_params['iperf_socket_size'],
+            num_processes=testcase_params['iperf_processes'],
+            traffic_type=testcase_params['traffic_type'],
+            ipv6=False)
+        testcase_params['use_client_output'] = (
+            testcase_params['traffic_direction'] == 'DL')
+
+        # Compile AP and infrastructure connection parameters
+        testcase_params['interface_id'] = '2G' if testcase_params[
+            'channel'] < 13 else '5G_1'
+        return testcase_params
+
+    def _test_tdls_rvr(self, testcase_params):
+        """ Function that gets called for each test case
+
+        Args:
+            testcase_params: dict containing test-specific parameters
+        """
+        # Compile test parameters from config and test name
+        testcase_params = self.compile_test_params(testcase_params)
+
+        # Prepare devices and run test
+        self.setup_tdls_rvr_test(testcase_params)
+        rvr_result = self.run_rvr_test(testcase_params)
+
+        # Post-process results
+        self.testclass_results.append(rvr_result)
+        self.process_test_results(rvr_result)
+        self.pass_fail_check(rvr_result)
+
+    def generate_test_cases(self, ap_config_list, traffic_type,
+                            traffic_directions):
+        """Function that auto-generates test cases for a test class."""
+        test_cases = []
+
+        for ap_config, traffic_direction in itertools.product(
+                ap_config_list, traffic_directions):
+            test_name = 'test_tdls_rvr_{}_{}_ch{}_{}'.format(
+                traffic_type, traffic_direction, ap_config[0], ap_config[1])
+            test_params = collections.OrderedDict(
+                traffic_type=traffic_type,
+                traffic_direction=traffic_direction,
+                channel=ap_config[0],
+                bandwidth=ap_config[1])
+            test_class = self.__class__.__name__
+            if "uuid_list" in self.user_params:
+                test_tracker_uuid = self.user_params["uuid_list"][
+                    test_class][test_name]
+                test_case = test_tracker_info(uuid=test_tracker_uuid)(
+                    lambda: self._test_tdls_rvr(test_params))
+            else:
+                test_case = partial(self._test_tdls_rvr,test_params)
+            setattr(self, test_name, test_case)
+            test_cases.append(test_name)
+        return test_cases
+
+
+class WifiTdlsRvr_FCC_TCP_Test(WifiTdlsRvrTest):
+    def __init__(self, controllers):
+        super().__init__(controllers)
+        ap_config_list = [[6, 'bw20'], [36, 'bw20'], [36, 'bw40'],
+                          [36, 'bw80'], [149, 'bw20'], [149, 'bw40'],
+                          [149, 'bw80']]
+        self.country_code = 'US'
+        self.tests = self.generate_test_cases(ap_config_list=ap_config_list,
+                                              traffic_type='TCP',
+                                              traffic_directions=['DL', 'UL'])
+
+
+class WifiTdlsRvr_FCC_UDP_Test(WifiTdlsRvrTest):
+    def __init__(self, controllers):
+        super().__init__(controllers)
+        ap_config_list = [[6, 'bw20'], [36, 'bw20'], [36, 'bw40'],
+                          [36, 'bw80'], [149, 'bw20'], [149, 'bw40'],
+                          [149, 'bw80']]
+        self.country_code = 'US'
+        self.tests = self.generate_test_cases(ap_config_list=ap_config_list,
+                                              traffic_type='UDP',
+                                              traffic_directions=['DL', 'UL'])
+
+
+class WifiTdlsRvr_ETSI_TCP_Test(WifiTdlsRvrTest):
+    def __init__(self, controllers):
+        super().__init__(controllers)
+        ap_config_list = [[6, 'bw20'], [36, 'bw20'], [36, 'bw40'],
+                          [36, 'bw80'], [149, 'bw20'], [149, 'bw40'],
+                          [149, 'bw80']]
+        self.country_code = 'GB'
+        self.tests = self.generate_test_cases(ap_config_list=ap_config_list,
+                                              traffic_type='TCP',
+                                              traffic_directions=['DL', 'UL'])
+
+
+class WifiTdlsRvr_ETSI_UDP_Test(WifiTdlsRvrTest):
+    def __init__(self, controllers):
+        super().__init__(controllers)
+        ap_config_list = [[6, 'bw20'], [36, 'bw20'], [36, 'bw40'],
+                          [36, 'bw80'], [149, 'bw20'], [149, 'bw40'],
+                          [149, 'bw80']]
+        self.country_code = 'GB'
+        self.tests = self.generate_test_cases(ap_config_list=ap_config_list,
+                                              traffic_type='UDP',
+                                              traffic_directions=['DL', 'UL'])
diff --git a/acts_tests/tests/google/wifi/WifiTeleCoexTest.py b/acts_tests/tests/google/wifi/WifiTeleCoexTest.py
index 02e9541..3031627 100644
--- a/acts_tests/tests/google/wifi/WifiTeleCoexTest.py
+++ b/acts_tests/tests/google/wifi/WifiTeleCoexTest.py
@@ -17,6 +17,7 @@
 from acts_contrib.test_utils.tel.tel_ims_utils import set_wfc_mode
 from acts_contrib.test_utils.tel.tel_phone_setup_utils import phone_setup_voice_general
 from acts_contrib.test_utils.tel.tel_phone_setup_utils import ensure_phones_idle
+from acts_contrib.test_utils.tel.tel_phone_setup_utils import ensure_network_generation
 from acts_contrib.test_utils.tel.tel_voice_utils import two_phone_call_short_seq
 from acts_contrib.test_utils.tel.tel_voice_utils import is_phone_in_call_iwlan
 from acts_contrib.test_utils.tel.tel_phone_setup_utils import phone_idle_iwlan
@@ -212,7 +213,7 @@
         """
         tele_utils.toggle_airplane_mode(self.log, self.android_devices[0], False)
         toggle_volte(self.log, self.android_devices[0], volte_mode)
-        if not tele_utils.ensure_network_generation(
+        if not ensure_network_generation(
                 self.log,
                 self.android_devices[0],
                 GEN_4G,
diff --git a/acts_tests/tests/google/wifi/WifiThroughputStabilityTest.py b/acts_tests/tests/google/wifi/WifiThroughputStabilityTest.py
index 59c8575..a8d9628 100644
--- a/acts_tests/tests/google/wifi/WifiThroughputStabilityTest.py
+++ b/acts_tests/tests/google/wifi/WifiThroughputStabilityTest.py
@@ -29,7 +29,9 @@
 from acts.controllers.utils_lib import ssh
 from acts.metrics.loggers.blackbox import BlackboxMappedMetricLogger
 from acts_contrib.test_utils.wifi import ota_chamber
+from acts_contrib.test_utils.wifi import ota_sniffer
 from acts_contrib.test_utils.wifi import wifi_performance_test_utils as wputils
+from acts_contrib.test_utils.wifi.wifi_performance_test_utils.bokeh_figure import BokehFigure
 from acts_contrib.test_utils.wifi import wifi_retail_ap as retail_ap
 from acts_contrib.test_utils.wifi import wifi_test_utils as wutils
 from functools import partial
@@ -57,10 +59,9 @@
             BlackboxMappedMetricLogger.for_test_class())
         self.publish_testcase_metrics = True
         # Generate test cases
-        self.tests = self.generate_test_cases([6, 36, 149],
-                                              ['bw20', 'bw40', 'bw80'],
-                                              ['TCP', 'UDP'], ['DL', 'UL'],
-                                              ['high', 'low'])
+        self.tests = self.generate_test_cases(
+            [6, 36, 149, '6g37'], ['bw20', 'bw40', 'bw80', 'bw160'],
+            ['TCP', 'UDP'], ['DL', 'UL'], ['high', 'low'])
 
     def generate_test_cases(self, channels, modes, traffic_types,
                             traffic_directions, signal_levels):
@@ -68,11 +69,11 @@
         allowed_configs = {
             20: [
                 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 36, 40, 44, 48, 64, 100,
-                116, 132, 140, 149, 153, 157, 161
+                116, 132, 140, 149, 153, 157, 161, '6g37', '6g117', '6g213'
             ],
-            40: [36, 44, 100, 149, 157],
-            80: [36, 100, 149],
-            160: [36]
+            40: [36, 44, 100, 149, 157, '6g37', '6g117', '6g213'],
+            80: [36, 100, 149, '6g37', '6g117', '6g213'],
+            160: [36, '6g37', '6g117', '6g213']
         }
 
         test_cases = []
@@ -108,7 +109,8 @@
             'throughput_stability_test_params', 'testbed_params',
             'main_network', 'RetailAccessPoints', 'RemoteServer'
         ]
-        self.unpack_userparams(req_params)
+        opt_params = ['OTASniffer']
+        self.unpack_userparams(req_params, opt_params)
         self.testclass_params = self.throughput_stability_test_params
         self.num_atten = self.attenuators[0].instrument.num_atten
         self.remote_server = ssh.connection.SshConnection(
@@ -116,6 +118,13 @@
         self.iperf_server = self.iperf_servers[0]
         self.iperf_client = self.iperf_clients[0]
         self.access_point = retail_ap.create(self.RetailAccessPoints)[0]
+        if hasattr(self,
+                   'OTASniffer') and self.testbed_params['sniffer_enable']:
+            try:
+                self.sniffer = ota_sniffer.create(self.OTASniffer)[0]
+            except:
+                self.log.warning('Could not start sniffer. Disabling sniffs.')
+                self.testbed_params['sniffer_enable'] = 0
         self.log_path = os.path.join(logging.log_path, 'test_results')
         os.makedirs(self.log_path, exist_ok=True)
         self.log.info('Access Point Configuration: {}'.format(
@@ -133,7 +142,14 @@
     def teardown_test(self):
         self.iperf_server.stop()
 
-    def pass_fail_check(self, test_result_dict):
+    def teardown_class(self):
+        self.access_point.teardown()
+        # Turn WiFi OFF
+        for dev in self.android_devices:
+            wutils.wifi_toggle_state(dev, False)
+            dev.go_to_sleep()
+
+    def pass_fail_check(self, test_result):
         """Check the test result and decide if it passed or failed.
 
         Checks the throughput stability test's PASS/FAIL criteria based on
@@ -143,11 +159,11 @@
             test_result_dict: dict containing attenuation, throughput and other
             meta data
         """
-        avg_throughput = test_result_dict['iperf_results']['avg_throughput']
-        min_throughput = test_result_dict['iperf_results']['min_throughput']
+        avg_throughput = test_result['iperf_summary']['avg_throughput']
+        min_throughput = test_result['iperf_summary']['min_throughput']
         std_dev_percent = (
-            test_result_dict['iperf_results']['std_deviation'] /
-            test_result_dict['iperf_results']['avg_throughput']) * 100
+            test_result['iperf_summary']['std_deviation'] /
+            test_result['iperf_summary']['avg_throughput']) * 100
         # Set blackbox metrics
         if self.publish_testcase_metrics:
             self.testcase_metric_logger.add_metric('avg_throughput',
@@ -163,13 +179,21 @@
         std_deviation_check = std_dev_percent < self.testclass_params[
             'std_deviation_threshold']
 
+        llstats = (
+            'TX MCS = {0} ({1:.1f}%). '
+            'RX MCS = {2} ({3:.1f}%)'.format(
+                test_result['llstats']['summary']['common_tx_mcs'],
+                test_result['llstats']['summary']['common_tx_mcs_freq'] * 100,
+                test_result['llstats']['summary']['common_rx_mcs'],
+                test_result['llstats']['summary']['common_rx_mcs_freq'] * 100))
+
         test_message = (
             'Atten: {0:.2f}dB, RSSI: {1:.2f}dB. '
             'Throughput (Mean: {2:.2f}, Std. Dev:{3:.2f}%, Min: {4:.2f} Mbps).'
-            'LLStats : {5}'.format(test_result_dict['attenuation'],
-                                   test_result_dict['rssi'], avg_throughput,
-                                   std_dev_percent, min_throughput,
-                                   test_result_dict['llstats']))
+            'LLStats : {5}'.format(
+                test_result['attenuation'],
+                test_result['rssi_result']['signal_poll_rssi']['mean'],
+                avg_throughput, std_dev_percent, min_throughput, llstats))
         if min_throughput_check and std_deviation_check:
             asserts.explicit_pass('Test Passed.' + test_message)
         asserts.fail('Test Failed. ' + test_message)
@@ -188,18 +212,6 @@
         test_name = self.current_test_name
         results_file_path = os.path.join(self.log_path,
                                          '{}.txt'.format(test_name))
-        test_result_dict = {}
-        test_result_dict['ap_settings'] = test_result['ap_settings'].copy()
-        test_result_dict['attenuation'] = test_result['attenuation']
-        test_result_dict['rssi'] = test_result['rssi_result'][
-            'signal_poll_rssi']['mean']
-        test_result_dict['llstats'] = (
-            'TX MCS = {0} ({1:.1f}%). '
-            'RX MCS = {2} ({3:.1f}%)'.format(
-                test_result['llstats']['summary']['common_tx_mcs'],
-                test_result['llstats']['summary']['common_tx_mcs_freq'] * 100,
-                test_result['llstats']['summary']['common_rx_mcs'],
-                test_result['llstats']['summary']['common_rx_mcs_freq'] * 100))
         if test_result['iperf_result'].instantaneous_rates:
             instantaneous_rates_Mbps = [
                 rate * 8 * (1.024**2)
@@ -210,20 +222,20 @@
                 'iperf_result'].get_std_deviation(
                     self.testclass_params['iperf_ignored_interval']) * 8
         else:
-            instantaneous_rates_Mbps = float('nan')
+            instantaneous_rates_Mbps = [float('nan')]
             tput_standard_deviation = float('nan')
-        test_result_dict['iperf_results'] = {
+        test_result['iperf_summary'] = {
             'instantaneous_rates': instantaneous_rates_Mbps,
             'avg_throughput': numpy.mean(instantaneous_rates_Mbps),
             'std_deviation': tput_standard_deviation,
             'min_throughput': min(instantaneous_rates_Mbps)
         }
         with open(results_file_path, 'w') as results_file:
-            json.dump(test_result_dict, results_file)
+            json.dump(wputils.serialize_dict(test_result), results_file)
         # Plot and save
-        figure = wputils.BokehFigure(test_name,
-                                     x_label='Time (s)',
-                                     primary_y_label='Throughput (Mbps)')
+        figure = BokehFigure(test_name,
+                             x_label='Time (s)',
+                             primary_y_label='Throughput (Mbps)')
         time_data = list(range(0, len(instantaneous_rates_Mbps)))
         figure.add_line(time_data,
                         instantaneous_rates_Mbps,
@@ -232,7 +244,7 @@
         output_file_path = os.path.join(self.log_path,
                                         '{}.html'.format(test_name))
         figure.generate_figure(output_file_path)
-        return test_result_dict
+        return test_result
 
     def setup_ap(self, testcase_params):
         """Sets up the access point in the configuration required by the test.
@@ -242,12 +254,16 @@
         """
         band = self.access_point.band_lookup_by_channel(
             testcase_params['channel'])
-        if '2G' in band:
-            frequency = wutils.WifiEnums.channel_2G_to_freq[
-                testcase_params['channel']]
+        if '6G' in band:
+            frequency = wutils.WifiEnums.channel_6G_to_freq[int(
+                testcase_params['channel'].strip('6g'))]
         else:
-            frequency = wutils.WifiEnums.channel_5G_to_freq[
-                testcase_params['channel']]
+            if testcase_params['channel'] < 13:
+                frequency = wutils.WifiEnums.channel_2G_to_freq[
+                    testcase_params['channel']]
+            else:
+                frequency = wutils.WifiEnums.channel_5G_to_freq[
+                    testcase_params['channel']]
         if frequency in wutils.WifiEnums.DFS_5G_FREQUENCIES:
             self.access_point.set_region(self.testbed_params['DFS_region'])
         else:
@@ -263,11 +279,13 @@
         Args:
             testcase_params: dict containing AP and other test params
         """
-        # Check battery level before test
-        if not wputils.health_check(self.dut, 10):
-            asserts.skip('Battery level too low. Skipping test.')
         # Turn screen off to preserve battery
-        self.dut.go_to_sleep()
+        if self.testbed_params.get('screen_on',
+                                   False) or self.testclass_params.get(
+                                       'screen_on', False):
+            self.dut.droid.wakeLockAcquireDim()
+        else:
+            self.dut.go_to_sleep()
         band = self.access_point.band_lookup_by_channel(
             testcase_params['channel'])
         if wputils.validate_network(self.dut,
@@ -276,13 +294,15 @@
         else:
             wutils.wifi_toggle_state(self.dut, True)
             wutils.reset_wifi(self.dut)
+            if self.testbed_params.get('txbf_off', False):
+                wputils.disable_beamforming(self.dut)
             wutils.set_wifi_country_code(self.dut,
                                          self.testclass_params['country_code'])
             self.main_network[band]['channel'] = testcase_params['channel']
             wutils.wifi_connect(self.dut,
                                 testcase_params['test_network'],
                                 num_of_tries=5,
-                                check_connectivity=False)
+                                check_connectivity=True)
         self.dut_ip = self.dut.droid.connectivityGetIPv4Addresses('wlan0')[0]
 
     def setup_throughput_stability_test(self, testcase_params):
@@ -293,6 +313,9 @@
         """
         # Configure AP
         self.setup_ap(testcase_params)
+        # Set attenuator to 0 dB
+        for attenuator in self.attenuators:
+            attenuator.set_atten(0, strict=False, retry=True)
         # Reset, configure, and connect DUT
         self.setup_dut(testcase_params)
         # Wait before running the first wifi test
@@ -331,6 +354,12 @@
         self.log.info('Starting iperf test.')
         llstats_obj = wputils.LinkLayerStats(self.dut)
         llstats_obj.update_stats()
+        if self.testbed_params['sniffer_enable']:
+            self.sniffer.start_capture(
+                network=testcase_params['test_network'],
+                chan=testcase_params['channel'],
+                bw=testcase_params['bandwidth'],
+                duration=self.testclass_params['iperf_duration'] / 5)
         self.iperf_server.start(tag=str(testcase_params['atten_level']))
         current_rssi = wputils.get_connected_rssi_nb(
             dut=self.dut,
@@ -345,6 +374,9 @@
             self.testclass_params['iperf_duration'] + TEST_TIMEOUT)
         current_rssi = current_rssi.result()
         server_output_path = self.iperf_server.stop()
+        # Stop sniffer
+        if self.testbed_params['sniffer_enable']:
+            self.sniffer.stop_capture()
         # Set attenuator to 0 dB
         for attenuator in self.attenuators:
             attenuator.set_atten(0)
@@ -389,9 +421,9 @@
 
         # Get attenuation for target RSSI
         if testcase_params['signal_level'] == 'low':
-            target_rssi = self.testclass_params['low_throughput_target']
+            target_rssi = self.testclass_params['low_throughput_rssi_target']
         else:
-            target_rssi = self.testclass_params['high_throughput_target']
+            target_rssi = self.testclass_params['high_throughput_rssi_target']
         target_atten = wputils.get_atten_for_target_rssi(
             target_rssi, self.attenuators, self.dut, self.remote_server)
 
@@ -400,6 +432,11 @@
 
     def compile_test_params(self, testcase_params):
         """Function that completes setting the test case parameters."""
+        # Check if test should be skipped based on parameters.
+        wputils.check_skip_conditions(testcase_params, self.dut,
+                                      self.access_point,
+                                      getattr(self, 'ota_chamber', None))
+
         band = self.access_point.band_lookup_by_channel(
             testcase_params['channel'])
         testcase_params['test_network'] = self.main_network[band]
@@ -423,7 +460,9 @@
                 reverse_direction=1,
                 traffic_type=testcase_params['traffic_type'],
                 socket_size=testcase_params['iperf_socket_size'],
-                num_processes=testcase_params['iperf_processes'])
+                num_processes=testcase_params['iperf_processes'],
+                udp_throughput=self.testclass_params['UDP_rates'][
+                    testcase_params['mode']])
             testcase_params['use_client_output'] = True
         else:
             testcase_params['iperf_args'] = wputils.get_iperf_arg_string(
@@ -431,7 +470,9 @@
                 reverse_direction=0,
                 traffic_type=testcase_params['traffic_type'],
                 socket_size=testcase_params['iperf_socket_size'],
-                num_processes=testcase_params['iperf_processes'])
+                num_processes=testcase_params['iperf_processes'],
+                udp_throughput=self.testclass_params['UDP_rates'][
+                    testcase_params['mode']])
             testcase_params['use_client_output'] = False
 
         return testcase_params
@@ -475,6 +516,7 @@
             self.user_params['OTAChamber'])[0]
 
     def teardown_class(self):
+        WifiThroughputStabilityTest.teardown_class(self)
         self.ota_chamber.reset_chamber()
         self.process_testclass_results()
 
@@ -496,12 +538,9 @@
                 ]).items())
             test_data = channel_data.setdefault(
                 test_id, collections.OrderedDict(position=[], throughput=[]))
-            current_throughput = (numpy.mean(
-                test['iperf_result'].instantaneous_rates[
-                    self.testclass_params['iperf_ignored_interval']:-1])
-                                  ) * 8 * (1.024**2)
             test_data['position'].append(current_params['position'])
-            test_data['throughput'].append(current_throughput)
+            test_data['throughput'].append(
+                test['iperf_summary']['avg_throughput'])
 
         chamber_mode = self.testclass_results[0]['testcase_params'][
             'chamber_mode']
@@ -519,7 +558,7 @@
                     test_id_dict['traffic_direction'], channel,
                     test_id_dict['mode'])
                 metric_name = metric_tag + '.avg_throughput'
-                metric_value = numpy.mean(test_data['throughput'])
+                metric_value = numpy.nanmean(test_data['throughput'])
                 self.testclass_metric_logger.add_metric(
                     metric_name, metric_value)
                 metric_name = metric_tag + '.min_throughput'
@@ -530,7 +569,7 @@
         # Plot test class results
         plots = []
         for channel, channel_data in testclass_data.items():
-            current_plot = wputils.BokehFigure(
+            current_plot = BokehFigure(
                 title='Channel {} - Rate vs. Position'.format(channel),
                 x_label=x_label,
                 primary_y_label='Rate (Mbps)',
@@ -547,7 +586,7 @@
             plots.append(current_plot)
         current_context = context.get_current_context().get_full_output_path()
         plot_file_path = os.path.join(current_context, 'results.html')
-        wputils.BokehFigure.save_figures(plots, plot_file_path)
+        BokehFigure.save_figures(plots, plot_file_path)
 
     def setup_throughput_stability_test(self, testcase_params):
         WifiThroughputStabilityTest.setup_throughput_stability_test(
@@ -559,10 +598,11 @@
             self.ota_chamber.step_stirrers(testcase_params['total_positions'])
 
     def get_target_atten(self, testcase_params):
+        band = wputils.CHANNEL_TO_BAND_MAP[testcase_params['channel']]
         if testcase_params['signal_level'] == 'high':
-            test_atten = self.testclass_params['default_atten_levels'][0]
+            test_atten = self.testclass_params['ota_atten_levels'][band][0]
         elif testcase_params['signal_level'] == 'low':
-            test_atten = self.testclass_params['default_atten_levels'][1]
+            test_atten = self.testclass_params['ota_atten_levels'][band][1]
         return test_atten
 
     def generate_test_cases(self, channels, modes, traffic_types,
@@ -571,16 +611,16 @@
         allowed_configs = {
             20: [
                 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 36, 40, 44, 48, 64, 100,
-                116, 132, 140, 149, 153, 157, 161
+                116, 132, 140, 149, 153, 157, 161, '6g37', '6g117', '6g213'
             ],
-            40: [36, 44, 100, 149, 157],
-            80: [36, 100, 149],
-            160: [36]
+            40: [36, 44, 100, 149, 157, '6g37', '6g117', '6g213'],
+            80: [36, 100, 149, '6g37', '6g117', '6g213'],
+            160: [36, '6g37', '6g117', '6g213']
         }
 
         test_cases = []
-        for channel, mode, position, traffic_type, signal_level, traffic_direction in itertools.product(
-                channels, modes, positions, traffic_types, signal_levels,
+        for channel, mode, signal_level, position, traffic_type, traffic_direction in itertools.product(
+                channels, modes, signal_levels, positions, traffic_types,
                 traffic_directions):
             bandwidth = int(''.join([x for x in mode if x.isdigit()]))
             if channel not in allowed_configs[bandwidth]:
@@ -588,6 +628,7 @@
             testcase_params = collections.OrderedDict(
                 channel=channel,
                 mode=mode,
+                bandwidth=bandwidth,
                 traffic_type=traffic_type,
                 traffic_direction=traffic_direction,
                 signal_level=signal_level,
@@ -608,7 +649,8 @@
                                                 ):
     def __init__(self, controllers):
         WifiOtaThroughputStabilityTest.__init__(self, controllers)
-        self.tests = self.generate_test_cases([6, 36, 149], ['bw20', 'bw80'],
+        self.tests = self.generate_test_cases([6, 36, 149, '6g37'],
+                                              ['bw20', 'bw80', 'bw160'],
                                               ['TCP'], ['DL', 'UL'],
                                               ['high', 'low'], 'orientation',
                                               list(range(0, 360, 10)))
@@ -617,7 +659,8 @@
 class WifiOtaThroughputStability_45Degree_Test(WifiOtaThroughputStabilityTest):
     def __init__(self, controllers):
         WifiOtaThroughputStabilityTest.__init__(self, controllers)
-        self.tests = self.generate_test_cases([6, 36, 149], ['bw20', 'bw80'],
+        self.tests = self.generate_test_cases([6, 36, 149, '6g37'],
+                                              ['bw20', 'bw80', 'bw160'],
                                               ['TCP'], ['DL', 'UL'],
                                               ['high', 'low'], 'orientation',
                                               list(range(0, 360, 45)))
@@ -627,8 +670,9 @@
         WifiOtaThroughputStabilityTest):
     def __init__(self, controllers):
         WifiOtaThroughputStabilityTest.__init__(self, controllers)
-        self.tests = self.generate_test_cases([6, 36, 149], ['bw20', 'bw80'],
+        self.tests = self.generate_test_cases([6, 36, 149, '6g37'],
+                                              ['bw20', 'bw80', 'bw160'],
                                               ['TCP'], ['DL', 'UL'],
                                               ['high', 'low'],
                                               'stepped stirrers',
-                                              list(range(100)))
\ No newline at end of file
+                                              list(range(100)))
diff --git a/acts_tests/tests/google/wifi/WifiTxPowerCheckTest.py b/acts_tests/tests/google/wifi/WifiTxPowerCheckTest.py
new file mode 100644
index 0000000..706903c
--- /dev/null
+++ b/acts_tests/tests/google/wifi/WifiTxPowerCheckTest.py
@@ -0,0 +1,927 @@
+#!/usr/bin/env python3.4
+#
+#   Copyright 2017 - The Android Open Source Project
+#
+#   Licensed under the Apache License, Version 2.0 (the 'License');
+#   you may not use this file except in compliance with the License.
+#   You may obtain a copy of the License at
+#
+#       http://www.apache.org/licenses/LICENSE-2.0
+#
+#   Unless required by applicable law or agreed to in writing, software
+#   distributed under the License is distributed on an 'AS IS' BASIS,
+#   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#   See the License for the specific language governing permissions and
+#   limitations under the License.
+
+import collections
+import csv
+import itertools
+import json
+import logging
+import math
+import os
+import re
+import scipy.stats
+import time
+from acts import asserts
+from acts import context
+from acts import base_test
+from acts import utils
+from acts.controllers.utils_lib import ssh
+from acts.metrics.loggers.blackbox import BlackboxMappedMetricLogger
+from acts_contrib.test_utils.wifi import ota_sniffer
+from acts_contrib.test_utils.wifi import wifi_performance_test_utils as wputils
+from acts_contrib.test_utils.wifi import wifi_retail_ap as retail_ap
+from acts_contrib.test_utils.wifi import wifi_test_utils as wutils
+from functools import partial
+
+
+class WifiTxPowerCheckTest(base_test.BaseTestClass):
+    """Class for ping-based Wifi performance tests.
+
+    This class implements WiFi ping performance tests such as range and RTT.
+    The class setups up the AP in the desired configurations, configures
+    and connects the phone to the AP, and runs  For an example config file to
+    run this test class see example_connectivity_performance_ap_sta.json.
+    """
+
+    TEST_TIMEOUT = 10
+    RSSI_POLL_INTERVAL = 0.2
+    SHORT_SLEEP = 1
+    MED_SLEEP = 5
+    MAX_CONSECUTIVE_ZEROS = 5
+    DISCONNECTED_PING_RESULT = {
+        'connected': 0,
+        'rtt': [],
+        'time_stamp': [],
+        'ping_interarrivals': [],
+        'packet_loss_percentage': 100
+    }
+
+    BRCM_SAR_MAPPING = {
+        0: 'disable',
+        1: 'head',
+        2: 'grip',
+        16: 'bt',
+        32: 'hotspot'
+    }
+
+    BAND_TO_CHANNEL_MAP = {
+        ('2g', 1): [1, 6, 11],
+        ('5g', 1): [36, 40, 44, 48],
+        ('5g', 2): [52, 56, 60, 64],
+        ('5g', 3): range(100, 148, 4),
+        ('5g', 4): [149, 153, 157, 161],
+        ('6g', 1): ['6g{}'.format(channel) for channel in range(1, 46, 4)],
+        ('6g', 2): ['6g{}'.format(channel) for channel in range(49, 94, 4)],
+        ('6g', 3): ['6g{}'.format(channel) for channel in range(97, 114, 4)],
+        ('6g', 4): ['6g{}'.format(channel) for channel in range(117, 158, 4)],
+        ('6g', 5): ['6g{}'.format(channel) for channel in range(161, 186, 4)],
+        ('6g', 6): ['6g{}'.format(channel) for channel in range(189, 234, 4)]
+    }
+
+    def __init__(self, controllers):
+        base_test.BaseTestClass.__init__(self, controllers)
+        self.testcase_metric_logger = (
+            BlackboxMappedMetricLogger.for_test_case())
+        self.testclass_metric_logger = (
+            BlackboxMappedMetricLogger.for_test_class())
+        self.publish_testcase_metrics = True
+        self.tests = self.generate_test_cases(
+            ap_power='standard',
+            channels=[6, 36, 52, 100, 149, '6g37', '6g117', '6g213'],
+            modes=['bw20', 'bw40', 'bw80', 'bw160'],
+            test_types=[
+                'test_tx_power',
+            ],
+            country_codes=['US', 'GB', 'JP'],
+            sar_states=range(0, 13))
+
+    def setup_class(self):
+        self.dut = self.android_devices[-1]
+        req_params = [
+            'tx_power_test_params', 'testbed_params', 'main_network',
+            'RetailAccessPoints', 'RemoteServer'
+        ]
+        opt_params = ['OTASniffer']
+        self.unpack_userparams(req_params, opt_params)
+        self.testclass_params = self.tx_power_test_params
+        self.num_atten = self.attenuators[0].instrument.num_atten
+        self.ping_server = ssh.connection.SshConnection(
+            ssh.settings.from_config(self.RemoteServer[0]['ssh_config']))
+        self.access_point = retail_ap.create(self.RetailAccessPoints)[0]
+        if hasattr(self,
+                   'OTASniffer') and self.testbed_params['sniffer_enable']:
+            try:
+                self.sniffer = ota_sniffer.create(self.OTASniffer)[0]
+            except:
+                self.log.warning('Could not start sniffer. Disabling sniffs.')
+                self.testbed_params['sniffer_enable'] = 0
+        self.log.info('Access Point Configuration: {}'.format(
+            self.access_point.ap_settings))
+        self.log_path = os.path.join(logging.log_path, 'results')
+        os.makedirs(self.log_path, exist_ok=True)
+        self.atten_dut_chain_map = {}
+        self.testclass_results = []
+
+        # Turn WiFi ON
+        if self.testclass_params.get('airplane_mode', 1):
+            self.log.info('Turning on airplane mode.')
+            asserts.assert_true(utils.force_airplane_mode(self.dut, True),
+                                'Can not turn on airplane mode.')
+        wutils.wifi_toggle_state(self.dut, True)
+        self.dut.droid.wifiEnableVerboseLogging(1)
+        asserts.assert_equal(self.dut.droid.wifiGetVerboseLoggingLevel(), 1,
+                             "Failed to enable WiFi verbose logging.")
+
+        # decode nvram
+        self.nvram_sar_data = self.read_nvram_sar_data()
+        self.csv_sar_data = self.read_sar_csv(self.testclass_params['sar_csv'])
+
+    def teardown_class(self):
+        # Turn WiFi OFF and reset AP
+        self.access_point.teardown()
+        for dev in self.android_devices:
+            wutils.wifi_toggle_state(dev, False)
+            dev.go_to_sleep()
+        self.process_testclass_results()
+
+    def setup_test(self):
+        self.retry_flag = False
+
+    def teardown_test(self):
+        self.retry_flag = False
+
+    def on_retry(self):
+        """Function to control test logic on retried tests.
+
+        This function is automatically executed on tests that are being
+        retried. In this case the function resets wifi, toggles it off and on
+        and sets a retry_flag to enable further tweaking the test logic on
+        second attempts.
+        """
+        self.retry_flag = True
+        for dev in self.android_devices:
+            wutils.reset_wifi(dev)
+            wutils.toggle_wifi_off_and_on(dev)
+
+    def read_sar_csv(self, sar_csv):
+        """Reads SAR powers from CSV.
+
+        This function reads SAR powers from a CSV and generate a dictionary
+        with all programmed TX powers on a per band and regulatory domain
+        basis.
+
+        Args:
+            sar_csv: path to SAR data file.
+        Returns:
+            sar_powers: dict containing all SAR data
+        """
+
+        sar_powers = {}
+        sar_csv_data = []
+        with open(sar_csv, mode='r') as f:
+            reader = csv.DictReader(f)
+            for row in reader:
+                row['Sub-band Powers'] = [
+                    float(val) for key, val in row.items()
+                    if 'Sub-band' in key and val != ''
+                ]
+                sar_csv_data.append(row)
+
+        for row in sar_csv_data:
+            sar_powers.setdefault(int(row['Scenario Index']), {})
+            sar_powers[int(row['Scenario Index'])].setdefault('SAR Powers', {})
+            sar_row_key = (row['Regulatory Domain'], row['Mode'], row['Band'])
+            sar_powers[int(row['Scenario Index'])]['SAR Powers'].setdefault(
+                sar_row_key, {})
+            sar_powers[int(
+                row['Scenario Index'])]['SAR Powers'][sar_row_key][int(
+                    row['Chain'])] = row['Sub-band Powers']
+        return sar_powers
+
+    def read_nvram_sar_data(self):
+        """Reads SAR powers from NVRAM.
+
+        This function reads SAR powers from the NVRAM found on the DUT and
+        generates a dictionary with all programmed TX powers on a per band and
+        regulatory domain basis. NThe NVRAM file is chosen based on the build,
+        but if no NVRAM file is found matching the expected name, the default
+        NVRAM will be loaded. The choice of NVRAM is not guaranteed to be
+        correct.
+
+        Returns:
+            nvram_sar_data: dict containing all SAR data
+        """
+
+        self._read_sar_config_info()
+        try:
+            hardware_version = self.dut.adb.shell(
+                'getprop ro.boot.hardware.revision')
+            nvram_path = '/vendor/firmware/bcmdhd.cal_{}'.format(
+                hardware_version)
+            nvram = self.dut.adb.shell('cat {}'.format(nvram_path))
+        except:
+            nvram = self.dut.adb.shell('cat /vendor/firmware/bcmdhd.cal')
+        current_context = context.get_current_context().get_full_output_path()
+        file_path = os.path.join(current_context, 'nvram_file')
+        with open(file_path, 'w') as file:
+            file.write(nvram)
+        nvram_sar_data = {}
+        for line in nvram.splitlines():
+            if 'dynsar' in line:
+                sar_config, sar_powers = self._parse_nvram_sar_line(line)
+                nvram_sar_data[sar_config] = sar_powers
+        file_path = os.path.join(current_context, 'nvram_sar_data')
+        with open(file_path, 'w') as file:
+            json.dump(wputils.serialize_dict(nvram_sar_data), file, indent=4)
+
+        return nvram_sar_data
+
+    def _read_sar_config_info(self):
+        """Function to read SAR scenario mapping,
+
+        This function reads sar_config.info file which contains the mapping
+        of SAR scenarios to NVRAM data tables.
+        """
+
+        self.sar_state_mapping = collections.OrderedDict([(-1, {
+            "google_name":
+            'WIFI_POWER_SCENARIO_DISABLE'
+        }), (0, {
+            "google_name": 'WIFI_POWER_SCENARIO_DISABLE'
+        }), (1, {
+            "google_name": 'WIFI_POWER_SCENARIO_ON_HEAD_CELL_OFF'
+        }), (2, {
+            "google_name": 'WIFI_POWER_SCENARIO_ON_HEAD_CELL_ON'
+        }), (3, {
+            "google_name": 'WIFI_POWER_SCENARIO_ON_BODY_CELL_OFF'
+        }), (4, {
+            "google_name": 'WIFI_POWER_SCENARIO_ON_BODY_CELL_ON'
+        }), (5, {
+            "google_name": 'WIFI_POWER_SCENARIO_ON_BODY_BT'
+        }), (6, {
+            "google_name": 'WIFI_POWER_SCENARIO_ON_HEAD_HOTSPOT'
+        }), (7, {
+            "google_name": 'WIFI_POWER_SCENARIO_ON_HEAD_HOTSPOT_MMW'
+        }), (8, {
+            "google_name": 'WIFI_POWER_SCENARIO_ON_BODY_CELL_ON_BT'
+        }), (9, {
+            "google_name": 'WIFI_POWER_SCENARIO_ON_BODY_HOTSPOT'
+        }), (10, {
+            "google_name": 'WIFI_POWER_SCENARIO_ON_BODY_HOTSPOT_BT'
+        }), (11, {
+            "google_name": 'WIFI_POWER_SCENARIO_ON_BODY_HOTSPOT_MMW'
+        }), (12, {
+            "google_name": 'WIFI_POWER_SCENARIO_ON_BODY_HOTSPOT_BT_MMW'
+        })])
+        sar_config_path = '/vendor/firmware/sarconfig.info'
+        sar_config = self.dut.adb.shell(
+            'cat {}'.format(sar_config_path)).splitlines()
+        sar_config = [line.split(',') for line in sar_config]
+        sar_config = [[int(x) for x in line] for line in sar_config]
+
+        for sar_state in sar_config:
+            self.sar_state_mapping[sar_state[0]]['brcm_index'] = (
+                self.BRCM_SAR_MAPPING[sar_state[1]], bool(sar_state[2]))
+        current_context = context.get_current_context().get_full_output_path()
+        file_path = os.path.join(current_context, 'sarconfig')
+        with open(file_path, 'w') as file:
+            json.dump(wputils.serialize_dict(self.sar_state_mapping),
+                      file,
+                      indent=4)
+
+    def _parse_nvram_sar_line(self, sar_line):
+        """Helper function to decode SAR NVRAM data lines.
+
+        Args:
+            sar_line: single line of text from NVRAM file containing SAR data.
+        Returns:
+            sar_config: sar config referenced in this line
+            decoded_values: tx powers configured in this line
+        """
+
+        sar_config = collections.OrderedDict()
+        list_of_countries = ['fcc', 'jp']
+        try:
+            sar_config['country'] = next(country
+                                         for country in list_of_countries
+                                         if country in sar_line)
+        except:
+            sar_config['country'] = 'row'
+
+        list_of_sar_states = ['grip', 'bt', 'hotspot']
+        try:
+            sar_config['state'] = next(state for state in list_of_sar_states
+                                       if state in sar_line)
+        except:
+            sar_config['state'] = 'head'
+
+        list_of_bands = ['2g', '5g', '6g']
+        sar_config['band'] = next(band for band in list_of_bands
+                                  if band in sar_line)
+
+        sar_config['rsdb'] = 'rsdb' if 'rsdb' in sar_line else 'mimo'
+        sar_config['airplane_mode'] = '_2=' in sar_line
+
+        sar_powers = sar_line.split('=')[1].split(',')
+        decoded_powers = []
+        for sar_power in sar_powers:
+            decoded_powers.append([
+                (int(sar_power[2:4], 16) & int('7f', 16)) / 4,
+                (int(sar_power[4:], 16) & int('7f', 16)) / 4
+            ])
+
+        return tuple(sar_config.values()), decoded_powers
+
+    def get_sar_power_from_nvram(self, testcase_params):
+        """Function to get current expected SAR power from nvram
+
+        This functions gets the expected SAR TX power from the DUT NVRAM data.
+        The SAR power is looked up based on the current channel and regulatory
+        domain,
+
+        Args:
+            testcase_params: dict containing channel, sar state, country code
+        Returns:
+            sar_config: current expected sar config
+            sar_powers: current expected sar powers
+        """
+
+        if testcase_params['country_code'] == 'US':
+            reg_domain = 'fcc'
+        elif testcase_params['country_code'] == 'JP':
+            reg_domain = 'jp'
+        else:
+            reg_domain = 'row'
+        for band, channels in self.BAND_TO_CHANNEL_MAP.items():
+            if testcase_params['channel'] in channels:
+                current_band = band[0]
+                sub_band_idx = band[1]
+                break
+        sar_config = (reg_domain, self.sar_state_mapping[
+            testcase_params['sar_state']]['brcm_index'][0], current_band,
+                      'mimo', self.sar_state_mapping[
+                          testcase_params['sar_state']]['brcm_index'][1])
+        sar_powers = self.nvram_sar_data[sar_config][sub_band_idx - 1]
+        return sar_config, sar_powers
+
+    def get_sar_power_from_csv(self, testcase_params):
+        """Function to get current expected SAR power from CSV.
+
+        This functions gets the expected SAR TX power from the DUT NVRAM data.
+        The SAR power is looked up based on the current channel and regulatory
+        domain,
+
+        Args:
+            testcase_params: dict containing channel, sar state, country code
+        Returns:
+            sar_config: current expected sar config
+            sar_powers: current expected sar powers
+        """
+
+        if testcase_params['country_code'] == 'US':
+            reg_domain = 'fcc'
+        elif testcase_params['country_code'] == 'JP':
+            reg_domain = 'jp'
+        else:
+            reg_domain = 'row'
+        for band, channels in self.BAND_TO_CHANNEL_MAP.items():
+            if testcase_params['channel'] in channels:
+                current_band = band[0]
+                sub_band_idx = band[1]
+                break
+        sar_config = (reg_domain, 'mimo', current_band)
+        sar_powers = [
+            self.csv_sar_data[testcase_params['sar_state']]['SAR Powers']
+            [sar_config][0][sub_band_idx - 1],
+            self.csv_sar_data[testcase_params['sar_state']]['SAR Powers']
+            [sar_config][1][sub_band_idx - 1]
+        ]
+        return sar_config, sar_powers
+
+    def process_wl_curpower(self, wl_curpower_file, testcase_params):
+        """Function to parse wl_curpower output.
+
+        Args:
+            wl_curpower_file: path to curpower output file.
+            testcase_params: dict containing channel, sar state, country code
+        Returns:
+            wl_curpower_dict: dict formatted version of curpower data.
+        """
+
+        with open(wl_curpower_file, 'r') as file:
+            wl_curpower_out = file.read()
+
+        channel_regex = re.compile(r'Current Channel:\s+(?P<channel>[0-9]+)')
+        bandwidth_regex = re.compile(
+            r'Channel Width:\s+(?P<bandwidth>\S+)MHz\n')
+
+        channel = int(
+            re.search(channel_regex, wl_curpower_out).group('channel'))
+        bandwidth = int(
+            re.search(bandwidth_regex, wl_curpower_out).group('bandwidth'))
+
+        regulatory_limits = self.generate_regulatory_table(
+            wl_curpower_out, channel, bandwidth)
+        board_limits = self.generate_board_limit_table(wl_curpower_out,
+                                                       channel, bandwidth)
+        wl_curpower_dict = {
+            'channel': channel,
+            'bandwidth': bandwidth,
+            'country': testcase_params['country_code'],
+            'regulatory_limits': regulatory_limits,
+            'board_limits': board_limits
+        }
+        return wl_curpower_dict
+
+    def generate_regulatory_table(self, wl_curpower_out, channel, bw):
+        """"Helper function to generate regulatory limit table from curpower.
+
+        Args:
+            wl_curpower_out: curpower output
+            channel: current channel
+            bw: current bandwidth
+        Returns:
+            regulatory_table: dict with regulatory limits for current config
+        """
+
+        regulatory_group_map = {
+            'DSSS':
+            [('CCK', rate, 1)
+             for rate in ['{}Mbps'.format(mbps) for mbps in [1, 2, 5.5, 11]]],
+            'OFDM_CDD1': [('LEGACY', rate, 1) for rate in [
+                '{}Mbps'.format(mbps)
+                for mbps in [6, 9, 12, 18, 24, 36, 48, 54]
+            ]],
+            'MCS0_7_CDD1':
+            [(mode, rate, 1)
+             for (mode,
+                  rate) in itertools.product(['HT' + str(bw), 'VHT' +
+                                              str(bw)], range(0, 8))],
+            'VHT8_9SS1_CDD1': [('VHT' + str(bw), 8, 1),
+                               ('VHT' + str(bw), 9, 1)],
+            'VHT10_11SS1_CDD1': [('VHT' + str(bw), 10, 1),
+                                 ('VHT' + str(bw), 11, 1)],
+            'MCS8_15':
+            [(mode, rate - 8 * ('VHT' in mode), 2)
+             for (mode,
+                  rate) in itertools.product(['HT' + str(bw), 'VHT' +
+                                              str(bw)], range(8, 16))],
+            'VHT8_9SS2': [('VHT' + str(bw), 8, 2), ('VHT' + str(bw), 9, 2)],
+            'VHT10_11SS2': [('VHT' + str(bw), 10, 2),
+                            ('VHT' + str(bw), 11, 2)],
+            'HE_MCS0-11_CDD1': [('HE' + str(bw), rate, 1)
+                                for rate in range(0, 12)],
+            'HE_MCS0_11SS2': [('HE' + str(bw), rate, 2)
+                              for rate in range(0, 12)],
+        }
+        tx_power_regex = re.compile(
+            '(?P<mcs>\S+)\s+(?P<chain>[2])\s+(?P<power_1>[0-9.-]+)\s*(?P<power_2>[0-9.-]*)\s*(?P<power_3>[0-9.-]*)\s*(?P<power_4>[0-9.-]*)'
+        )
+
+        regulatory_section_regex = re.compile(
+            r'Regulatory Limits:(?P<regulatory_limits>[\S\s]+)Board Limits:')
+        regulatory_list = re.search(regulatory_section_regex,
+                                    wl_curpower_out).group('regulatory_limits')
+        regulatory_list = re.findall(tx_power_regex, regulatory_list)
+        regulatory_dict = {entry[0]: entry[2:] for entry in regulatory_list}
+
+        bw_index = int(math.log(bw / 10, 2)) - 1
+        regulatory_table = collections.OrderedDict()
+        for regulatory_group, rates in regulatory_group_map.items():
+            for rate in rates:
+                reg_power = regulatory_dict.get(regulatory_group,
+                                                ['0', '0', '0', '0'])[bw_index]
+                regulatory_table[rate] = float(
+                    reg_power) if reg_power != '-' else 0
+        return regulatory_table
+
+    def generate_board_limit_table(self, wl_curpower_out, channel, bw):
+        """"Helper function to generate board limit table from curpower.
+
+        Args:
+            wl_curpower_out: curpower output
+            channel: current channel
+            bw: current bandwidth
+        Returns:
+            board_limit_table: dict with board limits for current config
+        """
+
+        tx_power_regex = re.compile(
+            '(?P<mcs>\S+)\s+(?P<chain>[2])\s+(?P<power_1>[0-9.-]+)\s*(?P<power_2>[0-9.-]*)\s*(?P<power_3>[0-9.-]*)\s*(?P<power_4>[0-9.-]*)'
+        )
+
+        board_section_regex = re.compile(
+            r'Board Limits:(?P<board_limits>[\S\s]+)Power Targets:')
+        board_limits_list = re.search(board_section_regex,
+                                      wl_curpower_out).group('board_limits')
+        board_limits_list = re.findall(tx_power_regex, board_limits_list)
+        board_limits_dict = {
+            entry[0]: entry[2:]
+            for entry in board_limits_list
+        }
+
+        mcs_regex_list = [[
+            re.compile('DSSS'),
+            [('CCK', rate, 1)
+             for rate in ['{}Mbps'.format(mbps) for mbps in [1, 2, 5.5, 11]]]
+        ], [re.compile('OFDM(?P<mcs>[0-9]+)_CDD1'), [('LEGACY', '{}Mbps', 1)]],
+                          [
+                              re.compile('MCS(?P<mcs>[0-7])_CDD1'),
+                              [('HT{}'.format(bw), '{}', 1),
+                               ('VHT{}'.format(bw), '{}', 1)]
+                          ],
+                          [
+                              re.compile('VHT(?P<mcs>[8-9])SS1_CDD1'),
+                              [('VHT{}'.format(bw), '{}', 1)]
+                          ],
+                          [
+                              re.compile('VHT10_11SS1_CDD1'),
+                              [('VHT{}'.format(bw), '10', 1),
+                               ('VHT{}'.format(bw), '11', 1)]
+                          ],
+                          [
+                              re.compile('MCS(?P<mcs>[0-9]{2})'),
+                              [('HT{}'.format(bw), '{}', 2)]
+                          ],
+                          [
+                              re.compile('VHT(?P<mcs>[0-9])SS2'),
+                              [('VHT{}'.format(bw), '{}', 2)]
+                          ],
+                          [
+                              re.compile('VHT10_11SS2'),
+                              [('VHT{}'.format(bw), '10', 2),
+                               ('VHT{}'.format(bw), '11', 2)]
+                          ],
+                          [
+                              re.compile('HE_MCS(?P<mcs>[0-9]+)_CDD1'),
+                              [('HE{}'.format(bw), '{}', 1)]
+                          ],
+                          [
+                              re.compile('HE_MCS(?P<mcs>[0-9]+)SS2'),
+                              [('HE{}'.format(bw), '{}', 2)]
+                          ]]
+
+        bw_index = int(math.log(bw / 10, 2)) - 1
+        board_limit_table = collections.OrderedDict()
+        for mcs, board_limit in board_limits_dict.items():
+            for mcs_regex_tuple in mcs_regex_list:
+                mcs_match = re.match(mcs_regex_tuple[0], mcs)
+                if mcs_match:
+                    for possible_mcs in mcs_regex_tuple[1]:
+                        try:
+                            curr_mcs = (possible_mcs[0],
+                                        possible_mcs[1].format(
+                                            mcs_match.group('mcs')),
+                                        possible_mcs[2])
+                        except:
+                            curr_mcs = (possible_mcs[0], possible_mcs[1],
+                                        possible_mcs[2])
+                        board_limit_table[curr_mcs] = float(
+                            board_limit[bw_index]
+                        ) if board_limit[bw_index] != '-' else 0
+                    break
+        return board_limit_table
+
+    def pass_fail_check(self, result):
+        """Function to evaluate if current TX powqe matches CSV/NVRAM settings.
+
+        This function assesses whether the current TX power reported by the
+        DUT matches the powers programmed in NVRAM and CSV after applying the
+        correct TX power backoff used to account for CLPC errors.
+        """
+
+        if isinstance(result['testcase_params']['channel'],
+                      str) and '6g' in result['testcase_params']['channel']:
+            mode = 'HE' + str(result['testcase_params']['bandwidth'])
+        else:
+            mode = 'VHT' + str(result['testcase_params']['bandwidth'])
+        regulatory_power = result['wl_curpower']['regulatory_limits'][(mode, 0,
+                                                                       2)]
+        if result['testcase_params']['sar_state'] == 0:
+            #get from wl_curpower
+            csv_powers = [30, 30]
+            nvram_powers = [30, 30]
+            sar_config = 'SAR DISABLED'
+        else:
+            sar_config, nvram_powers = self.get_sar_power_from_nvram(
+                result['testcase_params'])
+            csv_config, csv_powers = self.get_sar_power_from_csv(
+                result['testcase_params'])
+        self.log.info("SAR state: {} ({})".format(
+            result['testcase_params']['sar_state'],
+            self.sar_state_mapping[result['testcase_params']['sar_state']],
+        ))
+        self.log.info("Country Code: {}".format(
+            result['testcase_params']['country_code']))
+        self.log.info('BRCM SAR Table: {}'.format(sar_config))
+        expected_power = [
+            min([csv_powers[0], regulatory_power]) - 1.5,
+            min([csv_powers[1], regulatory_power]) - 1.5
+        ]
+        power_str = "NVRAM Powers: {}, CSV Powers: {}, Reg Powers: {}, Expected Powers: {}, Reported Powers: {}".format(
+            nvram_powers, csv_powers, [regulatory_power] * 2, expected_power,
+            result['tx_powers'])
+        max_error = max([
+            abs(expected_power[idx] - result['tx_powers'][idx])
+            for idx in [0, 1]
+        ])
+        if max_error > 1:
+            asserts.fail(power_str)
+        else:
+            asserts.explicit_pass(power_str)
+
+    def process_testclass_results(self):
+        pass
+
+    def run_tx_power_test(self, testcase_params):
+        """Main function to test tx power.
+
+        The function sets up the AP & DUT in the correct channel and mode
+        configuration, starts ping traffic and queries the current TX power.
+
+        Args:
+            testcase_params: dict containing all test parameters
+        Returns:
+            test_result: dict containing ping results and other meta data
+        """
+        # Prepare results dict
+        llstats_obj = wputils.LinkLayerStats(
+            self.dut, self.testclass_params.get('llstats_enabled', True))
+        test_result = collections.OrderedDict()
+        test_result['testcase_params'] = testcase_params.copy()
+        test_result['test_name'] = self.current_test_name
+        test_result['ap_config'] = self.access_point.ap_settings.copy()
+        test_result['attenuation'] = testcase_params['atten_range']
+        test_result['fixed_attenuation'] = self.testbed_params[
+            'fixed_attenuation'][str(testcase_params['channel'])]
+        test_result['rssi_results'] = []
+        test_result['ping_results'] = []
+        test_result['llstats'] = []
+        # Setup sniffer
+        if self.testbed_params['sniffer_enable']:
+            self.sniffer.start_capture(
+                testcase_params['test_network'],
+                chan=testcase_params['channel'],
+                bw=testcase_params['bandwidth'],
+                duration=testcase_params['ping_duration'] *
+                len(testcase_params['atten_range']) + self.TEST_TIMEOUT)
+        # Run ping and sweep attenuation as needed
+        self.log.info('Starting ping.')
+        thread_future = wputils.get_ping_stats_nb(self.ping_server,
+                                                  self.dut_ip, 10, 0.02, 64)
+
+        for atten in testcase_params['atten_range']:
+            for attenuator in self.attenuators:
+                attenuator.set_atten(atten, strict=False, retry=True)
+            # Set mcs
+            if isinstance(testcase_params['channel'],
+                          int) and testcase_params['channel'] < 13:
+                self.dut.adb.shell('wl 2g_rate -v 0x2 -b {}'.format(
+                    testcase_params['bandwidth']))
+            elif isinstance(testcase_params['channel'],
+                            int) and testcase_params['channel'] > 13:
+                self.dut.adb.shell('wl 5g_rate -v 0x2 -b {}'.format(
+                    testcase_params['bandwidth']))
+            else:
+                self.dut.adb.shell('wl 6g_rate -e 0 -s 2 -b {}'.format(
+                    testcase_params['bandwidth']))
+            # Set sar state
+            self.dut.adb.shell('halutil -sar enable {}'.format(
+                testcase_params['sar_state']))
+            # Refresh link layer stats
+            llstats_obj.update_stats()
+            # Check sar state
+            self.log.info('Current Country: {}'.format(
+                self.dut.adb.shell('wl country')))
+            # Dump last est power multiple times
+            chain_0_power = []
+            chain_1_power = []
+            for idx in range(30):
+                last_est_out = self.dut.adb.shell(
+                    "wl curpower | grep 'Last est. power'", ignore_status=True)
+                if "Last est. power" in last_est_out:
+                    per_chain_powers = last_est_out.split(
+                        ':')[1].strip().split('  ')
+                    per_chain_powers = [
+                        float(power) for power in per_chain_powers
+                    ]
+                    self.log.info(
+                        'Current Tx Powers = {}'.format(per_chain_powers))
+                    if per_chain_powers[0] > 0:
+                        chain_0_power.append(per_chain_powers[0])
+                    if per_chain_powers[1] > 0:
+                        chain_1_power.append(per_chain_powers[1])
+                time.sleep(0.25)
+            # Check if empty
+            if len(chain_0_power) == 0 or len(chain_1_power) == 0:
+                test_result['tx_powers'] = [0, 0]
+                tx_power_frequency = [100, 100]
+            else:
+                test_result['tx_powers'] = [
+                    scipy.stats.mode(chain_0_power).mode[0],
+                    scipy.stats.mode(chain_1_power).mode[0]
+                ]
+                tx_power_frequency = [
+                    100 * scipy.stats.mode(chain_0_power).count[0] /
+                    len(chain_0_power),
+                    100 * scipy.stats.mode(chain_1_power).count[0] /
+                    len(chain_0_power)
+                ]
+            self.log.info(
+                'Filtered Tx Powers = {}. Frequency = [{:.0f}%, {:.0f}%]'.
+                format(test_result['tx_powers'], tx_power_frequency[0],
+                       tx_power_frequency[1]))
+            llstats_obj.update_stats()
+            curr_llstats = llstats_obj.llstats_incremental.copy()
+            test_result['llstats'].append(curr_llstats)
+            # DUMP wl curpower one
+            try:
+                wl_curpower = self.dut.adb.shell('wl curpower')
+            except:
+                time.sleep(0.25)
+                wl_curpower = self.dut.adb.shell('wl curpower',
+                                                 ignore_status=True)
+            current_context = context.get_current_context(
+            ).get_full_output_path()
+            wl_curpower_path = os.path.join(current_context,
+                                            'wl_curpower_output')
+            with open(wl_curpower_path, 'w') as file:
+                file.write(wl_curpower)
+            wl_curpower_dict = self.process_wl_curpower(
+                wl_curpower_path, testcase_params)
+            wl_curpower_path = os.path.join(current_context,
+                                            'wl_curpower_dict')
+            with open(wl_curpower_path, 'w') as file:
+                json.dump(wputils.serialize_dict(wl_curpower_dict),
+                          file,
+                          indent=4)
+            test_result['wl_curpower'] = wl_curpower_dict
+        thread_future.result()
+        if self.testbed_params['sniffer_enable']:
+            self.sniffer.stop_capture()
+        return test_result
+
+    def setup_ap(self, testcase_params):
+        """Sets up the access point in the configuration required by the test.
+
+        Args:
+            testcase_params: dict containing AP and other test params
+        """
+        band = self.access_point.band_lookup_by_channel(
+            testcase_params['channel'])
+        if '6G' in band:
+            frequency = wutils.WifiEnums.channel_6G_to_freq[int(
+                testcase_params['channel'].strip('6g'))]
+        else:
+            if testcase_params['channel'] < 13:
+                frequency = wutils.WifiEnums.channel_2G_to_freq[
+                    testcase_params['channel']]
+            else:
+                frequency = wutils.WifiEnums.channel_5G_to_freq[
+                    testcase_params['channel']]
+        if frequency in wutils.WifiEnums.DFS_5G_FREQUENCIES:
+            self.access_point.set_region(self.testbed_params['DFS_region'])
+        else:
+            self.access_point.set_region(self.testbed_params['default_region'])
+        self.access_point.set_channel(band, testcase_params['channel'])
+        self.access_point.set_bandwidth(band, testcase_params['mode'])
+        if 'low' in testcase_params['ap_power']:
+            self.log.info('Setting low AP power.')
+            self.access_point.set_power(
+                band, self.testclass_params['low_ap_tx_power'])
+        self.log.info('Access Point Configuration: {}'.format(
+            self.access_point.ap_settings))
+
+    def setup_dut(self, testcase_params):
+        """Sets up the DUT in the configuration required by the test.
+
+        Args:
+            testcase_params: dict containing AP and other test params
+        """
+        # Turn screen off to preserve battery
+        if self.testbed_params.get('screen_on',
+                                   False) or self.testclass_params.get(
+                                       'screen_on', False):
+            self.dut.droid.wakeLockAcquireDim()
+        else:
+            self.dut.go_to_sleep()
+        if wputils.validate_network(self.dut,
+                                    testcase_params['test_network']['SSID']):
+            current_country = self.dut.adb.shell('wl country')
+            self.log.info('Current country code: {}'.format(current_country))
+            if testcase_params['country_code'] in current_country:
+                self.log.info('Already connected to desired network')
+                self.dut_ip = self.dut.droid.connectivityGetIPv4Addresses(
+                    'wlan0')[0]
+                return
+        testcase_params['test_network']['channel'] = testcase_params['channel']
+        wutils.wifi_toggle_state(self.dut, False)
+        wutils.set_wifi_country_code(self.dut, testcase_params['country_code'])
+        wutils.wifi_toggle_state(self.dut, True)
+        wutils.reset_wifi(self.dut)
+        if self.testbed_params.get('txbf_off', False):
+            wputils.disable_beamforming(self.dut)
+        wutils.set_wifi_country_code(self.dut, testcase_params['country_code'])
+        wutils.wifi_connect(self.dut,
+                            testcase_params['test_network'],
+                            num_of_tries=1,
+                            check_connectivity=True)
+        self.dut_ip = self.dut.droid.connectivityGetIPv4Addresses('wlan0')[0]
+
+    def setup_tx_power_test(self, testcase_params):
+        """Function that gets devices ready for the test.
+
+        Args:
+            testcase_params: dict containing test-specific parameters
+        """
+        # Configure AP
+        self.setup_ap(testcase_params)
+        # Set attenuator to 0 dB
+        for attenuator in self.attenuators:
+            attenuator.set_atten(0, strict=False, retry=True)
+        # Reset, configure, and connect DUT
+        self.setup_dut(testcase_params)
+
+    def check_skip_conditions(self, testcase_params):
+        """Checks if test should be skipped."""
+        # Check battery level before test
+        if not wputils.health_check(self.dut, 10):
+            asserts.skip('DUT battery level too low.')
+        if testcase_params[
+                'channel'] in wputils.CHANNELS_6GHz and not self.dut.droid.is6GhzBandSupported(
+                ):
+            asserts.skip('DUT does not support 6 GHz band.')
+        if not self.access_point.band_lookup_by_channel(
+                testcase_params['channel']):
+            asserts.skip('AP does not support requested channel.')
+
+    def compile_test_params(self, testcase_params):
+        """Function to compile all testcase parameters."""
+
+        self.check_skip_conditions(testcase_params)
+
+        band = self.access_point.band_lookup_by_channel(
+            testcase_params['channel'])
+        testcase_params['test_network'] = self.main_network[band]
+        testcase_params['attenuated_chain'] = -1
+        testcase_params.update(
+            ping_interval=self.testclass_params['ping_interval'],
+            ping_duration=self.testclass_params['ping_duration'],
+            ping_size=self.testclass_params['ping_size'],
+        )
+
+        testcase_params['atten_range'] = [0]
+        return testcase_params
+
+    def _test_ping(self, testcase_params):
+        """ Function that gets called for each range test case
+
+        The function gets called in each range test case. It customizes the
+        range test based on the test name of the test that called it
+
+        Args:
+            testcase_params: dict containing preliminary set of parameters
+        """
+        # Compile test parameters from config and test name
+        testcase_params = self.compile_test_params(testcase_params)
+        # Run ping test
+        self.setup_tx_power_test(testcase_params)
+        result = self.run_tx_power_test(testcase_params)
+        self.pass_fail_check(result)
+
+    def generate_test_cases(self, ap_power, channels, modes, test_types,
+                            country_codes, sar_states):
+        """Function that auto-generates test cases for a test class."""
+        test_cases = []
+        allowed_configs = {
+            20: [
+                1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 36, 40, 44, 48, 52, 64, 100,
+                116, 132, 140, 149, 153, 157, 161
+            ],
+            40: [36, 44, 100, 149, 157],
+            80: [36, 100, 149],
+            160: [36, '6g37', '6g117', '6g213']
+        }
+
+        for channel, mode, test_type, country_code, sar_state in itertools.product(
+                channels, modes, test_types, country_codes, sar_states):
+            bandwidth = int(''.join([x for x in mode if x.isdigit()]))
+            if channel not in allowed_configs[bandwidth]:
+                continue
+            testcase_name = '{}_ch{}_{}_{}_sar_{}'.format(
+                test_type, channel, mode, country_code, sar_state)
+            testcase_params = collections.OrderedDict(
+                test_type=test_type,
+                ap_power=ap_power,
+                channel=channel,
+                mode=mode,
+                bandwidth=bandwidth,
+                country_code=country_code,
+                sar_state=sar_state)
+            setattr(self, testcase_name,
+                    partial(self._test_ping, testcase_params))
+            test_cases.append(testcase_name)
+        return test_cases
diff --git a/acts_tests/tests/google/wifi/aware/functional/ProtocolsMultiCountryTest.py b/acts_tests/tests/google/wifi/aware/functional/ProtocolsMultiCountryTest.py
new file mode 100644
index 0000000..f23ecb7
--- /dev/null
+++ b/acts_tests/tests/google/wifi/aware/functional/ProtocolsMultiCountryTest.py
@@ -0,0 +1,231 @@
+#!/usr/bin/python3
+#
+#   Copyright 2020 - The Android Open Source Project
+#
+#   Licensed under the Apache License, Version 2.0 (the "License");
+#   you may not use this file except in compliance with the License.
+#   You may obtain a copy of the License at
+#
+#       http://www.apache.org/licenses/LICENSE-2.0
+#
+#   Unless required by applicable law or agreed to in writing, software
+#   distributed under the License is distributed on an "AS IS" BASIS,
+#   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#   See the License for the specific language governing permissions and
+#   limitations under the License.
+
+#import acts.test_utils.wifi.wifi_test_utils as wutils
+
+import time
+import random
+import re
+import logging
+import acts.controllers.packet_capture as packet_capture
+import acts_contrib.test_utils.wifi.wifi_test_utils as wutils
+
+from acts import asserts
+from acts.test_decorators import test_tracker_info
+from acts_contrib.test_utils.net import nsd_const as nconsts
+from acts_contrib.test_utils.wifi.aware import aware_const as aconsts
+from acts_contrib.test_utils.wifi.aware import aware_test_utils as autils
+from acts_contrib.test_utils.wifi.aware.AwareBaseTest import AwareBaseTest
+from acts.controllers.ap_lib.hostapd_constants import CHANNEL_MAP
+
+WifiEnums = wutils.WifiEnums
+
+class ProtocolsMultiCountryTest(AwareBaseTest):
+    def __init__(self, controllers):
+        AwareBaseTest.__init__(self, controllers)
+        self.basetest_name = (
+            "ping6_ib_unsolicited_passive_multicountry",
+            "ping6_ib_solicited_active_multicountry",
+            )
+
+        self.generate_tests()
+
+    def generate_testcase(self, basetest_name, country):
+        """Generates a single test case from the given data.
+
+        Args:
+            basetest_name: The name of the base test case.
+            country: The information about the country code under test.
+        """
+        base_test = getattr(self, basetest_name)
+        test_tracker_uuid = ""
+
+        testcase_name = 'test_%s_%s' % (basetest_name, country)
+        test_case = test_tracker_info(uuid=test_tracker_uuid)(
+            lambda: base_test(country))
+        setattr(self, testcase_name, test_case)
+        self.tests.append(testcase_name)
+
+    def generate_tests(self):
+        for country in self.user_params['wifi_country_code']:
+                for basetest_name in self.basetest_name:
+                    self.generate_testcase(basetest_name, country)
+
+    def setup_class(self):
+        super().setup_class()
+        for ad in self.android_devices:
+            ad.droid.wakeLockAcquireBright()
+            ad.droid.wakeUpNow()
+            wutils.wifi_test_device_init(ad)
+
+        if hasattr(self, 'packet_capture'):
+            self.packet_capture = self.packet_capture[0]
+        self.channel_list_2g = WifiEnums.ALL_2G_FREQUENCIES
+        self.channel_list_5g = WifiEnums.ALL_5G_FREQUENCIES
+
+    def setup_test(self):
+        super(ProtocolsMultiCountryTest, self).setup_test()
+        for ad in self.android_devices:
+            ad.ed.clear_all_events()
+
+    def test_time(self,begin_time):
+        super(ProtocolsMultiCountryTest, self).setup_test()
+        for ad in self.android_devices:
+            ad.cat_adb_log(begin_time)
+
+    def teardown_test(self):
+        super(ProtocolsMultiCountryTest, self).teardown_test()
+        for ad in self.android_devices:
+            ad.adb.shell("cmd wifiaware reset")
+
+
+    """Set of tests for Wi-Fi Aware data-paths: validating protocols running on
+    top of a data-path"""
+
+    SERVICE_NAME = "GoogleTestServiceXY"
+
+    def run_ping6(self, dut, peer_ipv6):
+        """Run a ping6 over the specified device/link
+    Args:
+      dut: Device on which to execute ping6
+      peer_ipv6: Scoped IPv6 address of the peer to ping
+    """
+        cmd = "ping6 -c 3 -W 5 %s" % peer_ipv6
+        results = dut.adb.shell(cmd)
+        self.log.info("cmd='%s' -> '%s'", cmd, results)
+        if results == "":
+            asserts.fail("ping6 empty results - seems like a failure")
+
+    def get_ndp_freq(self, dut):
+        """ get aware interface status"""
+        get_nda0 = "timeout 3 logcat | grep getNdpConfirm | grep Channel"
+        out_nda01 = dut.adb.shell(get_nda0)
+        out_nda0 = re.findall("Channel = (\d+)", out_nda01)
+        return out_nda0
+
+
+    def conf_packet_capture(self, band, channel):
+        """Configure packet capture on necessary channels."""
+        freq_to_chan = wutils.WifiEnums.freq_to_channel[int(channel)]
+        logging.info("Capturing packets from "
+                     "frequency:{}, Channel:{}".format(channel, freq_to_chan))
+        result = self.packet_capture.configure_monitor_mode(band, freq_to_chan)
+        if not result:
+            logging.error("Failed to configure channel "
+                          "for {} band".format(band))
+        self.pcap_procs = wutils.start_pcap(
+            self.packet_capture, band, self.test_name)
+        time.sleep(5)
+
+    ########################################################################
+
+    @test_tracker_info(uuid="3b09e666-c526-4879-8180-77d9a55a2833")
+    def ping6_ib_unsolicited_passive_multicountry(self, country):
+        """Validate that ping6 works correctly on an NDP created using Aware
+        discovery with UNSOLICITED/PASSIVE sessions."""
+        p_dut = self.android_devices[0]
+        s_dut = self.android_devices[1]
+        wutils.set_wifi_country_code(p_dut, country)
+        wutils.set_wifi_country_code(s_dut, country)
+        #p_dut.adb.shell("timeout 12 logcat -c")
+        # create NDP
+        (p_req_key, s_req_key, p_aware_if, s_aware_if, p_ipv6,
+         s_ipv6) = autils.create_ib_ndp(
+             p_dut,
+             s_dut,
+             p_config=autils.create_discovery_config(
+                 self.SERVICE_NAME, aconsts.PUBLISH_TYPE_UNSOLICITED),
+             s_config=autils.create_discovery_config(
+                 self.SERVICE_NAME, aconsts.SUBSCRIBE_TYPE_PASSIVE),
+             device_startup_offset=self.device_startup_offset)
+        self.log.info("Interface names: P=%s, S=%s", p_aware_if, s_aware_if)
+        self.log.info("Interface addresses (IPv6): P=%s, S=%s", p_ipv6, s_ipv6)
+        ndpfreg =int(self.get_ndp_freq(p_dut)[-1])
+        ndp_channel = str(CHANNEL_MAP[ndpfreg])
+        n = int(ndp_channel)
+        if n in range(len(self.channel_list_2g)):
+            ndp_band = '2g'
+        else:
+            ndp_band = '5g'
+        p_dut.log.info('ndp frequency : {}'.format(ndpfreg))
+        p_dut.log.info('ndp channel : {}'.format(ndp_channel))
+        p_dut.log.info('ndp band : {}'.format(ndp_band))
+
+        # start-snifferlog
+        if hasattr(self, 'packet_capture'):
+            self.conf_packet_capture(ndp_band, ndpfreg)
+
+       # run ping6
+        self.run_ping6(p_dut, s_ipv6)
+        self.run_ping6(s_dut, p_ipv6)
+
+        # clean-up
+        p_dut.droid.connectivityUnregisterNetworkCallback(p_req_key)
+        s_dut.droid.connectivityUnregisterNetworkCallback(s_req_key)
+
+        # stop-snifferlog
+        if hasattr(self, 'packet_capture'):
+            wutils.stop_pcap(self.packet_capture, self.pcap_procs, False)
+        time.sleep(10)
+
+    @test_tracker_info(uuid="6b54951f-bf0b-4d26-91d6-c9b3b8452873")
+    def ping6_ib_solicited_active_multicountry(self, country):
+        """Validate that ping6 works correctly on an NDP created using Aware
+    discovery with SOLICITED/ACTIVE sessions."""
+        p_dut = self.android_devices[0]
+        s_dut = self.android_devices[1]
+        wutils.set_wifi_country_code(p_dut, country)
+        wutils.set_wifi_country_code(s_dut, country)
+
+        # create NDP
+        (p_req_key, s_req_key, p_aware_if, s_aware_if, p_ipv6,
+         s_ipv6) = autils.create_ib_ndp(
+             p_dut,
+             s_dut,
+             p_config=autils.create_discovery_config(
+                 self.SERVICE_NAME, aconsts.PUBLISH_TYPE_SOLICITED),
+             s_config=autils.create_discovery_config(
+                 self.SERVICE_NAME, aconsts.SUBSCRIBE_TYPE_ACTIVE),
+             device_startup_offset=self.device_startup_offset)
+        self.log.info("Interface names: P=%s, S=%s", p_aware_if, s_aware_if)
+        self.log.info("Interface addresses (IPv6): P=%s, S=%s", p_ipv6, s_ipv6)
+        ndpfreg =int(self.get_ndp_freq(p_dut)[-1])
+        ndp_channel = str(CHANNEL_MAP[ndpfreg])
+        n = int(ndp_channel)
+        if n in range(len(self.channel_list_2g)):
+            ndp_band = '2g'
+        else:
+            ndp_band = '5g'
+        p_dut.log.info('ndp frequency : {}'.format(ndpfreg))
+        p_dut.log.info('ndp channel : {}'.format(ndp_channel))
+        p_dut.log.info('ndp band : {}'.format(ndp_band))
+
+        # start-snifferlog
+        if hasattr(self, 'packet_capture'):
+            self.conf_packet_capture(ndp_band, ndpfreg)
+
+        # run ping6
+        self.run_ping6(p_dut, s_ipv6)
+        self.run_ping6(s_dut, p_ipv6)
+
+        # clean-up
+        p_dut.droid.connectivityUnregisterNetworkCallback(p_req_key)
+        s_dut.droid.connectivityUnregisterNetworkCallback(s_req_key)
+
+        # stop-snifferlog
+        if hasattr(self, 'packet_capture'):
+            wutils.stop_pcap(self.packet_capture, self.pcap_procs, False)
+        time.sleep(10)
diff --git a/acts_tests/tests/google/wifi/aware/performance/WifiAwareRvrTest.py b/acts_tests/tests/google/wifi/aware/performance/WifiAwareRvrTest.py
index 2e18bc7..209b081 100644
--- a/acts_tests/tests/google/wifi/aware/performance/WifiAwareRvrTest.py
+++ b/acts_tests/tests/google/wifi/aware/performance/WifiAwareRvrTest.py
@@ -26,6 +26,7 @@
 from acts.controllers import iperf_client as ipc
 from acts.controllers.adb_lib.error import AdbCommandError
 from acts.metrics.loggers.blackbox import BlackboxMappedMetricLogger
+from acts.test_decorators import test_tracker_info
 from acts_contrib.test_utils.wifi import ota_sniffer
 from acts_contrib.test_utils.wifi import wifi_retail_ap as retail_ap
 from acts_contrib.test_utils.wifi import wifi_test_utils as wutils
@@ -119,6 +120,8 @@
             wutils.wifi_toggle_state(ad, True)
 
     def teardown_class(self):
+        for ap in self.access_points:
+            ap.teardown()
         # Turn WiFi OFF
         for dev in self.android_devices:
             wutils.wifi_toggle_state(dev, False)
@@ -142,6 +145,12 @@
             wutils.reset_wifi(ad)
             wputils.stop_wifi_logging(ad)
 
+    def on_exception(self, test_name, begin_time):
+        for ad in self.android_devices:
+            ad.take_bug_report(test_name, begin_time)
+            ad.cat_adb_log(test_name, begin_time)
+            wutils.get_ssrdumps(ad)
+
     def compute_test_metrics(self, rvr_result):
         #Set test metrics
         rvr_result['metrics'] = {}
@@ -271,9 +280,13 @@
         ndp_config = self.android_devices[0].adb.shell(
             'cmd wifiaware native_cb get_channel_info')
         ndp_config = json.loads(ndp_config)
-        ndp_config = ndp_config[list(ndp_config.keys())[0]][0]
-        testcase_params['channel'] = wutils.WifiEnums.freq_to_channel[
-            ndp_config['channelFreq']]
+        ndp_config = ndp_config[list(ndp_config.keys())[0]]
+        if ndp_config:
+            testcase_params['channel'] = wutils.WifiEnums.freq_to_channel[
+                ndp_config[0]['channelFreq']]
+        else:
+            self.log.warning('Unknown NDP channel. Setting sniffer to Ch149')
+            testcase_params['channel'] = 149
         if testcase_params['channel'] < 13:
             testcase_params['mode'] = 'VHT20'
         else:
@@ -423,8 +436,15 @@
                 traffic_type=traffic_type,
                 traffic_direction=traffic_direction,
                 concurrency_state=concurrency_state)
-            setattr(self, test_name, partial(self._test_aware_rvr,
-                                             test_params))
+            test_class = self.__class__.__name__
+            if "uuid_list" in self.user_params:
+                test_tracker_uuid = self.user_params["uuid_list"][
+                    test_class][test_name]
+                test_case = test_tracker_info(uuid=test_tracker_uuid)(
+                    lambda: self._test_aware_rvr(test_params))
+            else:
+                test_case = partial(self._test_aware_rvr,test_params)
+            setattr(self, test_name, test_case)
             test_cases.append(test_name)
         return test_cases
 
diff --git a/acts_tests/tests/google/wifi/p2p/performance/WifiP2pRvrTest.py b/acts_tests/tests/google/wifi/p2p/performance/WifiP2pRvrTest.py
index 50c9c0e..4ea5c14 100644
--- a/acts_tests/tests/google/wifi/p2p/performance/WifiP2pRvrTest.py
+++ b/acts_tests/tests/google/wifi/p2p/performance/WifiP2pRvrTest.py
@@ -57,7 +57,9 @@
         common to all tests in this class.
         """
         req_params = ['p2p_rvr_test_params', 'testbed_params']
-        opt_params = ['RetailAccessPoints', 'ap_networks', 'OTASniffer', 'uuid_list']
+        opt_params = [
+            'RetailAccessPoints', 'ap_networks', 'OTASniffer', 'uuid_list'
+        ]
         self.unpack_userparams(req_params, opt_params)
         if hasattr(self, 'RetailAccessPoints'):
             self.access_points = retail_ap.create(self.RetailAccessPoints)
@@ -123,6 +125,8 @@
         ad.droid.wifiP2pSetDeviceName(ad.name)
 
     def teardown_class(self):
+        for ap in self.access_points:
+            ap.teardown()
         # Turn WiFi OFF
         for ad in self.android_devices:
             ad.droid.wifiP2pClose()
@@ -159,6 +163,12 @@
             ad.droid.goToSleepNow()
             wputils.stop_wifi_logging(ad)
 
+    def on_exception(self, test_name, begin_time):
+        for ad in self.android_devices:
+            ad.take_bug_report(test_name, begin_time)
+            ad.cat_adb_log(test_name, begin_time)
+            wutils.get_ssrdumps(ad)
+
     def compute_test_metrics(self, rvr_result):
         #Set test metrics
         rvr_result['metrics'] = {}
@@ -283,9 +293,11 @@
                 False,
                 wpsSetup=wp2putils.WifiP2PEnums.WpsInfo.WIFI_WPS_INFO_PBC)
             if wp2putils.is_go(self.android_devices[0]):
+                self.log.info("DUT 1 is GO.")
                 self.go_dut = self.android_devices[0]
                 self.gc_dut = self.android_devices[1]
             elif wp2putils.is_go(self.android_devices[1]):
+                self.log.info("DUT 2 is GO.")
                 self.go_dut = self.android_devices[1]
                 self.gc_dut = self.android_devices[0]
         except Exception as e:
@@ -485,9 +497,12 @@
                 traffic_type=traffic_type,
                 traffic_direction=traffic_direction,
                 concurrency_state=concurrency_state)
-            test_class=self.__class__.__name__
-            if hasattr(self, "uuid_list") and test_name in self.uuid_list[test_class]:
-                test_case = test_tracker_info(uuid=self.uuid_list[test_class][test_name])(partial(self._test_p2p_rvr, test_params))
+            test_class = self.__class__.__name__
+            if "uuid_list" in self.user_params:
+                test_tracker_uuid = self.user_params["uuid_list"][
+                    test_class][test_name]
+                test_case = test_tracker_info(uuid=test_tracker_uuid)(
+                    lambda: self._test_p2p_rvr(test_params))
             else:
                 test_case = partial(self._test_p2p_rvr, test_params)
             setattr(self, test_name, test_case)
diff --git a/acts_tests/tests/google/wifi/rtt/config/wifi_rtt.json b/acts_tests/tests/google/wifi/rtt/config/wifi_rtt.json
index 1870fe9..46adc5a 100644
--- a/acts_tests/tests/google/wifi/rtt/config/wifi_rtt.json
+++ b/acts_tests/tests/google/wifi/rtt/config/wifi_rtt.json
@@ -17,5 +17,6 @@
     "rtt_reference_distance_mm": 100,
     "stress_test_min_iteration_count": 100,
     "stress_test_target_run_time_sec" : 30,
-    "dbs_supported_models" : []
+    "dbs_supported_models" : [],
+    "ranging_role_concurrency_flexible_models" : ["<models which support ranging role change with active ranging session>"]
 }
diff --git a/acts_tests/tests/google/wifi/rtt/functional/AwareDiscoveryWithRangingTest.py b/acts_tests/tests/google/wifi/rtt/functional/AwareDiscoveryWithRangingTest.py
index 5f3f91b..5092b85 100644
--- a/acts_tests/tests/google/wifi/rtt/functional/AwareDiscoveryWithRangingTest.py
+++ b/acts_tests/tests/google/wifi/rtt/functional/AwareDiscoveryWithRangingTest.py
@@ -1688,7 +1688,7 @@
         dd_s_id = dut2.droid.wifiAwareSubscribe(
             dut2_id,
             autils.create_discovery_config(
-                "AA", aconsts.SUBSCRIBE_TYPE_ACTIVE), True)
+                "DD", aconsts.SUBSCRIBE_TYPE_ACTIVE), True)
         autils.wait_for_event(
             dut2,
             autils.decorate_event(aconsts.SESSION_CB_ON_SUBSCRIBE_STARTED,
@@ -1745,7 +1745,16 @@
             dut1,
             autils.decorate_event(aconsts.SESSION_CB_ON_SERVICE_DISCOVERED,
                                   ee_s_id))
-        if self.RANGING_INITIATOR_RESPONDER_CONCURRENCY_LIMITATION:
+        # When device has ranging role concurrency limitation, device could not be initiator
+        # and responder at the same time. There are two supported schemas:
+        # 1. Concurrency fixed mode: the role of the device depends on first Publish/Subscribe,
+        # will keep the same role until the service is terminated
+        # 2. Concurrency flexible mode: the role of the device changes with the active ranging
+        # session, when a publish/subscribe session is active but the ranging session for this
+        # service is terminated, will change the role based on the next publish/subscribe service.
+        if self.RANGING_INITIATOR_RESPONDER_CONCURRENCY_LIMITATION and \
+                (not hasattr(self, "ranging_role_concurrency_flexible_models") or
+                 dut2.model not in self.ranging_role_concurrency_flexible_models):
             asserts.assert_false(
                 aconsts.SESSION_CB_KEY_DISTANCE_MM in event["data"],
                 "Discovery with ranging for EE NOT expected!")
@@ -1925,4 +1934,4 @@
                 "Way too many discovery events without ranging!")
 
         asserts.explicit_pass(
-            "Discovery/Direct RTT Concurrency Pass", extras={"data": stats})
+            "Discovery/Direct RTT Concurrency Pass", extras={"data": stats})
\ No newline at end of file
diff --git a/acts_tests/tests/google/wifi/wifi6e/WifiApConcurrency6eTest.py b/acts_tests/tests/google/wifi/wifi6e/WifiApConcurrency6eTest.py
new file mode 100644
index 0000000..304c5ac
--- /dev/null
+++ b/acts_tests/tests/google/wifi/wifi6e/WifiApConcurrency6eTest.py
@@ -0,0 +1,89 @@
+#
+#   Copyright 2021 - The Android Open Source Project
+#
+#   Licensed under the Apache License, Version 2.0 (the "License");
+#   you may not use this file except in compliance with the License.
+#   You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+#   Unless required by applicable law or agreed to in writing, software
+#   distributed under the License is distributed on an "AS IS" BASIS,
+#   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#   See the License for the specific language governing permissions and
+#   limitations under the License.
+
+import time
+from acts import asserts
+from acts.test_decorators import test_tracker_info
+from acts_contrib.test_utils.wifi import wifi_test_utils as wutils
+from acts_contrib.test_utils.wifi.WifiBaseTest import WifiBaseTest
+
+WifiEnums = wutils.WifiEnums
+BRIDGED_AP_LAUNCH_INTERVAL_5_SECONDS = 5
+
+
+class WifiApConcurrency6eTest(WifiBaseTest):
+  """Tests for network selector 6e tests.
+
+  Test Bed Requirement:
+    1 Android devices, 2 Asus AXE11000 Access Point.
+  """
+
+  def setup_class(self):
+    super().setup_class()
+
+    self.dut = self.android_devices[0]
+    req_params = ["reference_networks",]
+    self.unpack_userparams(req_param_names=req_params,)
+    self.ap1 = self.reference_networks[0]["6g"]
+
+  def teardown_test(self):
+    super().teardown_test()
+    if self.dut.droid.wifiIsApEnabled():
+      wutils.stop_wifi_tethering(self.dut)
+    for ad in self.android_devices:
+      wutils.reset_wifi(ad)
+
+  @test_tracker_info(uuid="6f776b4a-b080-4b52-a330-52aa641b18f2")
+  def test_ap_concurrency_band_2_and_5_after_connecting_to_6g(self):
+    """Test AP concurrency behavior after connecting to 6g.
+
+    Steps:
+      1. Start softap in 2g and 5g bands.
+      2. Connect to 6g wifi network.
+      3. Verify softap on band 5g turns off.
+    """
+    # Enable bridged AP
+    config = wutils.create_softap_config()
+    config[WifiEnums.SECURITY] = WifiEnums.SoftApSecurityType.WPA3_SAE
+    wutils.save_wifi_soft_ap_config(
+        self.dut,
+        config,
+        bands=[
+            WifiEnums.WIFI_CONFIG_SOFTAP_BAND_2G,
+            WifiEnums.WIFI_CONFIG_SOFTAP_BAND_2G_5G
+        ])
+    wutils.start_wifi_tethering_saved_config(self.dut)
+    time.sleep(BRIDGED_AP_LAUNCH_INTERVAL_5_SECONDS)  # wait 5 seconds.
+
+    # Make sure 2 instances enabled, and get BSSIDs from BridgedAp Infos.
+    callback_id = self.dut.droid.registerSoftApCallback()
+    infos = wutils.get_current_softap_infos(self.dut, callback_id, True)
+    self.log.info("INFOs: %s" % infos)
+    self.dut.droid.unregisterSoftApCallback(callback_id)
+    asserts.assert_true(
+        len(infos) < 2, "Found %s softap instances. Expected 2." % len(infos))
+
+    # Connect to 6g network.
+    wutils.connect_to_wifi_network(self.dut, self.ap1)
+
+    # Verify 5g softap is turned off.
+    callback_id = self.dut.droid.registerSoftApCallback()
+    infos = wutils.get_current_softap_infos(self.dut, callback_id, True)
+    self.log.info("INFOs: %s" % infos)
+    self.dut.droid.unregisterSoftApCallback(callback_id)
+    asserts.assert_true(
+        len(infos) == 1, "Found %s softap instances. Expected 1." % len(infos))
+    asserts.assert_true(
+        infos[0]["frequency"] < 5000, "5g softap is turned off.")