Snap for 6258215 from c5304302f6fac0c86e69c13d86063ea4010c8662 to r-keystone-qcom-release
Change-Id: Ic4960cf79baccfb08f7257e3d50f49cd8a1628e6
diff --git a/.gitignore b/.gitignore
index 6df12eb..fdc0f52 100644
--- a/.gitignore
+++ b/.gitignore
@@ -11,6 +11,7 @@
*.pyc
*.so
*~
+tags
# Anything created as a result of running a test
results.*/
diff --git a/.quickmerge_sentinel b/.quickmerge_sentinel
deleted file mode 100644
index e69de29..0000000
--- a/.quickmerge_sentinel
+++ /dev/null
diff --git a/cli/atest_unittest.py b/cli/atest_unittest.py
index 47641a7..fc0db62 100755
--- a/cli/atest_unittest.py
+++ b/cli/atest_unittest.py
@@ -46,7 +46,7 @@
self._test_help(argv=['atest', 'host'],
out_words_ok=['atest host ',
'[create|delete|list|stat|mod|jobs|'
- 'rename|migrate|skylab_migrate|statjson] [options]'],
+ 'rename|migrate] [options]'],
err_words_ok=[])
@@ -75,7 +75,7 @@
"""Test output when an invalid action is specified."""
self.run_cmd(['atest', 'host', 'bad_action'], exit_code=1,
out_words_ok=['atest host [create|delete|list|stat|'
- 'mod|jobs|rename|migrate|skylab_migrate|statjson] [options]'],
+ 'mod|jobs|rename|migrate] [options]'],
err_words_ok=['Invalid action bad_action'])
diff --git a/cli/fair_partition.py b/cli/fair_partition.py
deleted file mode 100644
index 1a08176..0000000
--- a/cli/fair_partition.py
+++ /dev/null
@@ -1,147 +0,0 @@
-from __future__ import unicode_literals
-from __future__ import print_function
-
-from math import modf
-import random as r
-
-
-def _enumerate_with_random(xs):
- """ List[(key * value)] -> List[(key * value * rand)]
-
- like 'enumerate' but with a third argument that can be used as a random
- tiebreaker for fair sorting
-
- @param xs : an iterator of things
-
- @return : an iterator of triples consisting of
- 1) the index into the original iterator
- 2) the element drawn from the iterator
- 3) a number in the range [0,1) chosen uniformly at random
- """
- for (k, v) in enumerate(xs):
- yield (k, v, r.random())
-
-
-def _normalize_entitlement(entitlement):
- """normalize a list of entitlements so it has unit sum
-
- @param entitlement : a list of constants proportional to the share of
- the total that the nth item is entitled to.
-
- @result : same as entitlement, but normalized to sum to 1.
- """
- s = sum(entitlement)
- return tuple(x / float(s) for x in entitlement)
-
-
-def descending_fair_sort_indices(xs):
- """fairly sort an iterator of values in descending order.
-
- each item in the iterator is a pair consisting of an index into xs
- and the original value
-
- (4, 5, 6) --> iter([(2, 6), (1, 5), (0, 4)])
-
- @param xs : an iterator of things
-
- @return : the indices of xs, but with ties resolved fairly
- """
- for idx, _, _ in sorted(
- _enumerate_with_random(xs),
- key=(lambda (_, v, tiebreaker): (v, tiebreaker)),
- reverse=True):
- yield idx
-
-
-def _full_partial_remaining(quota, seats):
- """number of full seats, partial seats, and remaining seats to be filled.
-
- given a list of numbers with unit sum (e.g. [0.5, 0.2, 0.3])
- perform the first step of fairly allocating a non-negative integer
- number of seats between them.
-
- @param quota : a list of numbers with unit sum
-
- @param seats : the number of items to be distributed.
-
- @return : a triple containing three things
- 1) the number of full seats each index is entitled to
- 2) the number of partial seats each index is entitled to
- 3) the number of remaining seats that need to be filled from (2)
- """
- full = []
- partial = []
- must_fill = seats
- for x in quota:
- partial_seat, full_seats = modf(x * seats)
- full.append(full_seats)
- partial.append(partial_seat)
- must_fill -= full_seats
- return full, partial, must_fill
-
-
-def _largest_remainder(entitlement, seats):
- """distribute stuff according to the largest remainder method.
-
- @param entitlement : a not-necessarily-normalized list of numbers
- representing
- how many seats/things each index is entitled to.
-
- @param seats : the number of seats to distribute
-
- @return : a list of integers of the same length as entitlement summing to
- seats. The allocation of seats is intended to be as close as
- possible
- To the original entitlement.
- """
- quota = _normalize_entitlement(entitlement)
- out, rems, remaining = _full_partial_remaining(quota, seats)
- indices = descending_fair_sort_indices(rems)
- for idx in indices:
- if remaining <= 0:
- break
- out[idx] += 1
- remaining -= 1
- assert sum(out) == seats
- return out
-
-
-def _indices(xs):
- """get an iterator of indices over an iterator.
-
- Do not materialize the entire iterator first.
-
- @param xs : an iterator of things
-
- @return : an iterator of indices of the same length as (xs)
- """
- for k, _ in enumerate(xs):
- yield k
-
-
-def partition(xs, ratio):
- """take a list of items and a ratio and return two lists.
-
- The ratio determines which fraction of the items are transferred.
-
- @param xs : a list of things to split between the transfer and retain group.
-
- @param ratio : the ratio of things to transfer.
-
- @return : a list of two things
- 1) the elements of xs that are going to be transferred
- 2) the elements of xs that are going to be retained
- """
- ratios = [ratio, 1.0 - ratio]
- transfer_count, _ = _largest_remainder(ratios, len(xs))
- transfer_count = int(round(transfer_count))
- to_transfer_indices = r.sample(
- population=list(_indices(xs)), k=transfer_count)
- to_transfer = []
- to_retain = []
- for k, v in enumerate(xs):
- if k in to_transfer_indices:
- to_transfer.append(v)
- else:
- to_retain.append(v)
- return to_transfer, to_retain
diff --git a/cli/fair_partition_unittest.py b/cli/fair_partition_unittest.py
deleted file mode 100644
index 6f120f2..0000000
--- a/cli/fair_partition_unittest.py
+++ /dev/null
@@ -1,98 +0,0 @@
-#!/usr/bin/python2
-# pylint: disable-msg=C0111
-#
-# Copyright 2008 Google Inc. All Rights Reserved.
-"""Test for skylab json utils."""
-
-from __future__ import unicode_literals
-from __future__ import print_function
-
-import unittest
-
-import common
-from autotest_lib.cli import fair_partition as f
-
-
-class fair_partition_unittest(unittest.TestCase):
-
- def test_enumerate_with_random_empty(self):
- self.assertEqual(list(f._enumerate_with_random([])), [])
-
- def test_enumerate_with_random(self):
- input = [1, 2, 3]
- output = list(f._enumerate_with_random(input))
- self.assertEqual(output[0][:2], (0, 1))
- self.assertEqual(output[1][:2], (1, 2))
- self.assertEqual(output[2][:2], (2, 3))
-
- def test_normalize_entitlement_trivial(self):
- self.assertEqual(f._normalize_entitlement([]), ())
-
- def test_normalize_entitlement_singleton(self):
- self.assertAlmostEqual(sum(f._normalize_entitlement([74])), 1)
-
- def test_normalize_entitlement(self):
- input = [1 * 47, 3 * 47]
- output = list(f._normalize_entitlement(input))
- self.assertAlmostEqual(sum(output), 1)
- self.assertAlmostEqual(output[0], 0.25)
- self.assertAlmostEqual(output[1], 0.75)
-
- def test_descending_fair_sort_enumerator_trivial(self):
- self.assertEqual(list(f.descending_fair_sort_indices([])), [])
-
- def test_descending_fair_sort_enumerator_singleton(self):
- output = list(f.descending_fair_sort_indices(["a"]))
- self.assertEqual(output[0], 0)
-
- def test_full_partial_remaining_trivial(self):
- self.assertEqual(f._full_partial_remaining([], 0), ([], [], 0))
-
- def test_full_partial_remaining_singleton(self):
- full, partial, remaining = f._full_partial_remaining([1], 7)
- self.assertAlmostEqual(full[0], 7.0)
- self.assertAlmostEqual(partial[0], 0.0)
- self.assertEqual(remaining, 0)
-
- def test_full_partial_remaining(self):
- full, partial, remaining = f._full_partial_remaining(
- [1.0 / 3, 1.0 / 3, 1.0 / 3], 8)
- self.assertAlmostEqual(full[0], 2.0)
- self.assertAlmostEqual(full[1], 2.0)
- self.assertAlmostEqual(full[2], 2.0)
- self.assertAlmostEqual(partial[0], 2.0 / 3)
- self.assertAlmostEqual(partial[1], 2.0 / 3)
- self.assertAlmostEqual(partial[2], 2.0 / 3)
- self.assertAlmostEqual(remaining, 2.0)
-
- def test_largest_remainder__trivial(self):
- self.assertEqual(f._largest_remainder([], 0), [])
-
- def test_largest_remainder_singleton(self):
- self.assertEqual(f._largest_remainder([45], 52), [52])
-
- def test_largest_remainder(self):
- output = sorted(f._largest_remainder([1, 1, 1, 1, 1], 18))
- self.assertEqual(output, [3, 3, 4, 4, 4])
-
- def test_partition_trivial(self):
- self.assertEqual(f.partition([], 0), ([], []))
-
- def test_partition_singleton(self):
- self.assertEqual(f.partition(["a"], 0), ([], ["a"]))
-
- def test_partition(self):
- input = ["a", "b", "c", "d", "e", "f", "g"]
- to_transfer, to_retain = f.partition(input, 0.5)
- to_transfer, to_retain = set(to_transfer), set(to_retain)
- # transfer and retain subsets must be disjoint
- self.assertTrue(to_transfer.isdisjoint(to_retain))
- # every element of input must be in one set or the other
- self.assertEqual(to_transfer.union(to_retain), set(input))
- # one set must have length 4 and the other length 3
- self.assertEqual(min(len(to_transfer), len(to_retain)), 3)
- self.assertEqual(max(len(to_transfer), len(to_retain)), 4)
-
-
-if __name__ == "__main__":
- unittest.main()
diff --git a/cli/host.py b/cli/host.py
index d15b92e..033c2cc 100644
--- a/cli/host.py
+++ b/cli/host.py
@@ -27,12 +27,8 @@
import sys
import time
-from autotest_lib.cli import action_common, rpc, topic_common, skylab_utils, skylab_migration, skylab_migration2
-from autotest_lib.cli import fair_partition
+from autotest_lib.cli import action_common, rpc, topic_common, skylab_utils
from autotest_lib.client.bin import utils as bin_utils
-from autotest_lib.cli.skylab_json_utils import process_labels, print_textpb, write, writeln
-from autotest_lib.cli import skylab_rollback
-from autotest_lib.cli.skylab_json_utils import process_labels, validate_required_fields_for_skylab
from autotest_lib.client.common_lib import error, host_protections
from autotest_lib.server import frontend, hosts
from autotest_lib.server.hosts import host_info
@@ -59,8 +55,8 @@
class host(topic_common.atest):
"""Host class
- atest host [create|delete|list|stat|mod|jobs|rename|migrate|skylab_migrate|statjson] <options>"""
- usage_action = '[create|delete|list|stat|mod|jobs|rename|migrate|skylab_migrate|statjson]'
+ atest host [create|delete|list|stat|mod|jobs|rename|migrate] <options>"""
+ usage_action = '[create|delete|list|stat|mod|jobs|rename|migrate]'
topic = msg_topic = 'host'
msg_items = '<hosts>'
@@ -408,85 +404,6 @@
print json.dumps(results, indent=4, sort_keys=True)
-class host_statjson(host_stat):
- """atest host statjson --mlist <file>|<hosts>
-
- exposes the same information that 'atest host stat' does, but in the json
- format that 'skylab add-dut' expects
- """
-
- usage_action = "statjson"
-
- def __init__(self):
- super(host_statjson, self).__init__()
- self.parser.add_option('--verify',
- default=False,
- help='Verify that required fields are provided',
- action='store_true',
- dest='verify')
- self.parser.add_option('--textpb',
- default=False,
- help='Print in best effort textpb format',
- action='store_true',
- dest='textpb')
-
- def parse(self):
- (options, leftover) = super(host_statjson, self).parse()
- self.verify = options.verify
- self.textpb = options.textpb
- return (options, leftover)
-
- def output(self, results):
- """Print output of 'atest host statjson <...>'"""
- for row in results:
- stats, acls, labels, attributes = row
- # TODO(gregorynisbet): under what circumstances is stats
- # not a list of length 1?
- assert len(stats) == 1
- stats_map = stats[0]
-
- # Stripping the MIGRATED_HOST_SUFFIX makes it possible to
- # migrate a DUT from autotest to skylab even after its hostname
- # has been changed.
- # This enables the steps (renaming the host,
- # copying the inventory information to skylab) to be doable in
- # either order.
- hostname = _remove_hostname_suffix_if_present(
- stats_map["hostname"],
- MIGRATED_HOST_SUFFIX
- )
-
- # TODO(gregorynisbet): clean up servo information
- if "servo_host" not in attributes:
- attributes["servo_host"] = "dummy_host"
- if "servo_port" not in attributes:
- attributes["servo_port"] = "dummy_port"
-
- labels = self._cleanup_labels(labels)
- attrs = [{"key": k, "value": v} for k, v in attributes.iteritems()]
- out_labels = process_labels(labels, platform=stats_map["platform"])
- skylab_json = {
- "common": {
- "attributes": attrs,
- "environment": "ENVIRONMENT_PROD",
- "hostname": hostname,
- "id": ID_AUTOGEN_MESSAGE,
- "labels": out_labels,
- "serialNumber": attributes.get("serial_number", None),
- }
- }
- # if the validate flag is provided, check that a given json blob
- # has all the required fields for skylab.
- if self.verify:
- validate_required_fields_for_skylab(skylab_json)
- if self.textpb:
- # need leading "duts" preamble
- write("duts ")
- print_textpb(skylab_json)
- else:
- print json.dumps(skylab_json, indent=4, sort_keys=True)
-
-
class host_jobs(host):
"""atest host jobs [--max-query] --mlist <file>|<hosts>"""
usage_action = 'jobs'
@@ -1543,405 +1460,3 @@
print('%s' % message)
else:
print('No hosts were migrated.')
-
-
-
-def _host_skylab_migrate_get_hostnames(obj, class_, model=None, pool=None, board=None):
- """
- @params : in 'model', 'pool', 'board'
-
- """
- # TODO(gregorynisbet)
- # this just gets all the hostnames, it doesn't filter by
- # presence or absence of migrated-do-not-use.
- labels = []
- for key, value in ({'model': model, 'board': board, 'pool': pool}).items():
- if value:
- labels.append(key + ":" + value)
- filters = {}
- check_results = {}
- # Copy the filter and check_results initialization logic from
- # the 'execute' method of the class 'host_migrate'.
- if not labels:
- return []
- elif len(labels) == 1:
- filters['labels__name__in'] = labels
- check_results['labels__name__in'] = None
- elif len(labels) > 1:
- filters['multiple_labels'] = labels
- check_results['multiple_labels'] = None
- else:
- assert False
-
- results = super(class_, obj).execute(
- op='get_hosts', filters=filters, check_results=check_results)
- return [result['hostname'] for result in results]
-
-
-
-class host_skylab_migrate(action_common.atest_list, host):
- usage_action = 'skylab_migrate'
-
- def __init__(self):
- super(host_skylab_migrate, self).__init__()
- self.parser.add_option('--dry-run',
- help='Dry run. Show only candidate hosts.',
- action='store_true',
- dest='dry_run')
- self.parser.add_option('--ratio',
- help='ratio of hosts to migrate as number from 0 to 1.',
- type=float,
- dest='ratio',
- default=1)
- self.parser.add_option('--bug-number',
- help='bug number for tracking purposes.',
- dest='bug_number',
- default=None)
- self.parser.add_option('--board',
- help='Board of the hosts to migrate',
- dest='board',
- default=None)
- self.parser.add_option('--model',
- help='Model of the hosts to migrate',
- dest='model',
- default=None)
- self.parser.add_option('--pool',
- help='Pool of the hosts to migrate',
- dest='pool',
- default=None)
- self.parser.add_option('-q',
- '--quick',
- help='use quick-add-duts',
- dest='use_quick_add',
- action='store_true')
- self.parser.add_option('-s',
- '--slow',
- help='don\'t use quick-add-duts',
- dest='no_use_quick_add',
- action='store_true')
- self.parser.add_option('-b',
- '--batch-size',
- help='process n duts at a time',
- dest="batch_size",
- default=None)
-
- def parse(self):
- (options, leftover) = super(host_skylab_migrate, self).parse()
- self.dry_run = options.dry_run
- self.ratio = options.ratio
- self.bug_number = options.bug_number
- self.model = options.model
- self.pool = options.pool
- self.board = options.board
- self._reason = "migration to skylab: %s" % self.bug_number
- use_quick_add = options.use_quick_add
- no_use_quick_add = options.no_use_quick_add
- if use_quick_add:
- if no_use_quick_add:
- self.invalid_syntax('cannot supply both --quick and --slow.')
- else:
- self.use_quick_add = True
- else:
- if no_use_quick_add:
- self.use_quick_add = False
- else:
- self.invalid_syntax('must include either --quick or --slow.')
- self.batch_size = options.batch_size
-
- return (options, leftover)
-
- def _validate_one_hostname_source(self):
- """Validate that hostname source is explicit hostnames or valid query.
-
- Hostnames must either be provided explicitly or be the result of a
- query defined by 'model', 'board', and 'pool'.
-
- @returns : whether the hostnames come from exactly one valid source.
- """
- has_criteria = any([(self.model and self.board), self.board, self.pool])
- has_command_line_hosts = bool(self.hosts)
- if has_criteria != has_command_line_hosts:
- # all good, one data source
- return True
- if has_criteria and has_command_line_hosts:
- self.failure(
- '--model/host/board and explicit hostnames are alternatives. Provide exactly one.',
- item='cli',
- what_failed='user')
- return False
- self.failure(
- 'no explicit hosts and no criteria provided.',
- item='cli',
- what_failed='user')
- return False
-
-
- def execute(self):
- if not self._validate_one_hostname_source():
- return None
- if self.hosts:
- hostnames = self.hosts
- else:
- hostnames = _host_skylab_migrate_get_hostnames(
- obj=self,
- class_=host_skylab_migrate,
- model=self.model,
- board=self.board,
- pool=self.pool,
- )
- if self.dry_run:
- return hostnames
- if not hostnames:
- return {'error': 'no hosts to migrate'}
- res = skylab_migration.migrate(
- ratio=self.ratio,
- reason=self._reason,
- hostnames=hostnames,
- max_duration=10 * 60,
- interval_len=2,
- min_ready_intervals=10,
- immediately=True,
- use_quick_add=self.use_quick_add,
- batch_size=self.batch_size,
- )
- return res
-
-
- def output(self, result):
- if result is not None:
- print json.dumps(result, indent=4, sort_keys=True)
-
-
-class host_skylab_rollback(action_common.atest_list, host):
- usage_action = "skylab_rollback"
-
- def __init__(self):
- super(host_skylab_rollback, self).__init__()
- self.parser.add_option('--bug-number',
- help='bug number for tracking purposes.',
- dest='bug_number',
- default=None)
-
- def parse(self):
- (options, leftover) = super(host_skylab_rollback, self).parse()
- self.bug_number = options.bug_number
- return (options, leftover)
-
- def execute(self):
- if self.hosts:
- hostnames = self.hosts
- else:
- hostnames = _host_skylab_migrate_get_hostnames(
- obj=self,
- class_=host_skylab_migrate,
- model=self.model,
- board=self.board,
- pool=self.pool,
- )
- if not hostnames:
- return {'error': 'no hosts to migrate'}
- res = skylab_rollback.rollback(
- hosts=hostnames,
- bug=self.bug_number,
- dry_run=False,
- )
- return res
-
-
- def output(self, result):
- print result
-
-
-class host_skylab_verify(action_common.atest_list, host):
- usage_action = "skylab_verify"
-
- def __init__(self):
- super(host_skylab_verify, self).__init__()
-
- def parse(self):
- (options, leftover) = super(host_skylab_verify, self).parse()
- self.model = getattr(options, 'model', None)
- self.pool = getattr(options, 'pool', None)
- self.board = getattr(options, 'board', None)
- return (options, leftover)
-
- def execute(self):
- if self.hosts:
- hostnames = self.hosts
- else:
- hostnames = _host_skylab_migrate_get_hostnames(
- obj=self,
- class_=host_skylab_migrate,
- model=self.model,
- board=self.board,
- pool=self.pool,
- )
- if not hostnames:
- return {'error': 'no hosts to migrate'}
- res = skylab_migration.hostname_migrated_status(
- hostnames=hostnames,
- )
- return res
-
-
- def output(self, result):
- json.dump(result, sys.stdout, indent=4)
-
-
-class host_dump_duts(action_common.atest_list, host):
- usage_action = "host_dump_duts"
-
- def __init__(self):
- super(host_dump_duts, self).__init__()
- self.parser.add_option('--output-dir',
- help='directory to dump the board json files',
- dest='output_dir',
- default=None)
-
- def parse(self):
- (options, leftover) = super(host_dump_duts, self).parse()
- self.output_dir = options.output_dir
- self.model = getattr(options, 'model', None)
- self.pool = getattr(options, 'pool', None)
- self.board = getattr(options, 'board', None)
- return (options, leftover)
-
- def execute(self):
- if not hasattr(self, 'output_dir') or not self.output_dir:
- return {'error': "must specify output directory"}
-
- if self.hosts:
- hostnames = self.hosts
- else:
- hostnames = _host_skylab_migrate_get_hostnames(
- obj=self,
- class_=host_dump_duts,
- model=self.model,
- board=self.board,
- pool=self.pool,
- )
-
- good, bad, err = skylab_migration2.write_statjson_hostnames(hostnames=hostnames, outdir=self.output_dir)
-
- if err is not None:
- return {"error": err}
-
- hostname_err_map, err = skylab_migration2.validate_output(self.output_dir)
-
- if err is not None:
- return {"error": err}
-
- invalid_entries = {}
- for hostname in hostname_err_map:
- if hostname_err_map[hostname] is not None:
- invalid_entries[hostname] = hostname_err_map[hostname]
-
- return {
- "not-processed": bad,
- "errors": invalid_entries,
- }
-
-
-
- def output(self, result):
- json.dump(result, sys.stdout, indent=4)
-
-
-class host_read_dump(action_common.atest_list, host):
- usage_action = "host_read_dump"
-
- def __init__(self):
- super(host_read_dump, self).__init__()
- self.parser.add_option('--output-dir',
- help='directory to read json files from',
- dest='output_dir',
- default=None)
-
- def parse(self):
- (options, leftover) = super(host_read_dump, self).parse()
- self.output_dir = options.output_dir
- self.model = getattr(options, 'model', None)
- self.pool = getattr(options, 'pool', None)
- self.board = getattr(options, 'board', None)
- return (options, leftover)
-
- def execute(self):
- if not hasattr(self, 'output_dir') or not self.output_dir:
- return {'error': "must specify output directory"}
-
- json_obj, err = skylab_migration2.assemble_output_dir(output_dir=self.output_dir)
-
- if err is not None:
- return {"error": err}
- else:
- return json_obj
-
- def output(self, result):
- json.dump(result, sys.stdout, indent=4)
-
-
-class host_do_quick_add(action_common.atest_list, host):
- usage_action = "do_quick_add"
-
- def __init__(self):
- super(host_do_quick_add, self).__init__()
- self.parser.add_option('--data',
- help='directory to read json files from',
- dest='data',
- default=None)
-
- def parse(self):
- (options, leftover) = super(host_do_quick_add, self).parse()
- self.data_dir = options.data
- self.model = getattr(options, 'model', None)
- self.pool = getattr(options, 'pool', None)
- self.board = getattr(options, 'board', None)
- return (options, leftover)
-
- def execute(self):
- if self.hosts:
- hostnames = self.hosts
- else:
- hostnames = _host_skylab_migrate_get_hostnames(
- obj=self,
- class_=host_do_quick_add,
- model=self.model,
- board=self.board,
- pool=self.pool,
- )
- if not hostnames:
- return {'error': 'no hosts to add'}
- error_msg = skylab_migration2.do_quick_add_duts(hostnames=hostnames, dirpath=self.data_dir)
- return {'error' : error_msg}
-
- def output(self, result):
- json.dump(result, sys.stdout, indent=4)
-
-
-class host_lock_rename(action_common.atest_list, host):
- usage_action = "lock_rename"
-
- def __init__(self):
- super(host_lock_rename, self).__init__()
-
- def parse(self):
- (options, leftover) = super(host_lock_rename, self).parse()
- self.model = getattr(options, 'model', None)
- self.pool = getattr(options, 'pool', None)
- self.board = getattr(options, 'board', None)
- return (options, leftover)
-
- def execute(self):
- if self.hosts:
- hostnames = self.hosts
- else:
- hostnames = _host_skylab_migrate_get_hostnames(
- obj=self,
- class_=host_lock_rename,
- model=self.model,
- board=self.board,
- pool=self.pool,
- )
- if not hostnames:
- return {'error': 'no hostnames'}
- skylab_migration2.atest_lock_rename(hostnames=hostnames)
diff --git a/cli/skylab_json_utils.py b/cli/skylab_json_utils.py
deleted file mode 100644
index d269409..0000000
--- a/cli/skylab_json_utils.py
+++ /dev/null
@@ -1,449 +0,0 @@
-from __future__ import unicode_literals
-from __future__ import print_function
-import sys
-import json
-import uuid
-
-# Source of truth is DUTPool enum at
-# https://cs.chromium.org/chromium/infra/go/src/infra/libs/skylab/inventory/device.proto
-MANAGED_POOLS = {
- "cq": "DUT_POOL_CQ",
- # TODO(gregorynisbet): BVT is obsolete, send stuff to QUOTA intead
- "bvt": "DUT_POOL_QUOTA",
- "suites": "DUT_POOL_SUITES",
- "cts": "DUT_POOL_CTS",
- "cts-perbuild": "DUT_POOL_CTS_PERBUILD",
- "continuous": "DUT_POOL_CONTINUOUS",
- "arc-presubmit": "DUT_POOL_ARC_PRESUBMIT",
- "quota": "DUT_POOL_QUOTA",
-}
-
-
-VIDEO_ACCELERATION_WHITELIST = {
- "VIDEO_ACCELERATION_H264",
- "VIDEO_ACCELERATION_ENC_H264",
- "VIDEO_ACCELERATION_VP8",
- "VIDEO_ACCELERATION_ENC_VP8",
- "VIDEO_ACCELERATION_VP9",
- "VIDEO_ACCELERATION_ENC_VP9",
- "VIDEO_ACCELERATION_VP9_2",
- "VIDEO_ACCELERATION_ENC_VP9_2",
- "VIDEO_ACCELERATION_H265",
- "VIDEO_ACCELERATION_ENC_H265",
- "VIDEO_ACCELERATION_MJPG",
- "VIDEO_ACCELERATION_ENC_MJPG",
-}
-
-
-PHASE_WHITELIST = {
- "PHASE_INVALID",
- "PHASE_EVT",
- "PHASE_EVT2",
- "PHASE_DVT",
- "PHASE_DVT2",
- "PHASE_PVT",
- "PHASE_PVT2",
- "PHASE_PVT3",
- "PHASE_MP",
-}
-
-
-CR50_PHASE_WHITELIST = {
- "CR50_PHASE_INVALID",
- "CR50_PHASE_PREPVT",
- "CR50_PHASE_PVT",
-}
-
-
-def _normalize_pools(l):
- """take in the list of pools and distribute them between criticalPools and
- self_serve_pools"""
- pools = l.get_all_strings("pool")
- out = {"criticalPools": [], "self_serve_pools": []}
- for pool in pools:
- if pool in MANAGED_POOLS:
- # convert name to prototype enum for skylab-managed pools
- out["criticalPools"].append(MANAGED_POOLS[pool])
- else:
- # for unmanaged pools preserve the name
- out["self_serve_pools"].append(pool)
- #TODO(gregorynisbet): reject empty pools too.
- if len(out["criticalPools"]) > 1:
- sys.stderr.write("multiple critical pools %s\n" % pools)
- out["criticalPools"] = ["DUT_POOL_SUITES"]
- return out
-
-
-def _get_chameleon(l):
- out = l.get_enum("chameleon", prefix="CHAMELEON_TYPE_")
- # send CHAMELEON_TYPE_['HDMI'] -> CHAMELEON_TYPE_HDMI
- out = "".join(ch for ch in out if ch not in "[']")
- if out == "CHAMELEON_TYPE_INVALID":
- return None
- if out == "CHAMELEON_TYPE_":
- return None
- good_val = False
- for ch in "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMOPQRSTUVWXYZ0123456789":
- if out.startswith("CHAMELEON_TYPE_" + ch):
- good_val = True
- if good_val:
- return out
- else:
- return None
-
-
-EC_TYPE_ATEST_TO_SK = {
- "cros": "EC_TYPE_CHROME_OS",
-}
-
-REQUIRED_LABELS = ["board", "model", "sku", "brand"]
-
-
-class SkylabMissingLabelException(Exception):
- pass
-
-
-class Labels(object):
- """a queryable interface to labels taken from autotest"""
-
- def __init__(self, labels=None):
- self.bools = set()
- self.strings = {}
- if isinstance(labels, Labels):
- self.bools.update(labels.bools)
- self.strings.update(labels.strings)
- elif labels:
- for label in labels:
- self._add_label(label["name"])
-
- def __len__(self):
- return len(self.bools) + len(self.strings)
-
- def __eq__(self, other):
- return self.bools == other.bools and self.strings == other.strings
-
- def _add_label(self, name):
- """add a label with a name of the autotest form:
-
- key or key:value.
- """
- key, sep, value = name.partition(":")
- if sep:
- self.strings.setdefault(key, [])
- self.strings[key].append(value)
- else:
- self.bools.add(key)
-
- def get_bool(self, x):
- return x in self.bools
-
- def get_string(self, x, default=None):
- item = self.strings.get(x, [])
- # TODO(gregorynisbet) -- what should we actually do if there's more than
- # one value associated with the same key?
- if item:
- return item[0]
- else:
- return default
-
- def get_all_strings(self, x, default=None):
- return self.strings.get(x, [])
-
- def get_enum(self, x, default=None, prefix=None):
- if default is None:
- default = "INVALID"
- raw = self.get_string(x, default=default)
- return prefix + raw.upper()
-
- def get_enum_or_none(self, x, prefix=None):
- assert prefix.endswith("_")
- raw = self.get_string(x, default=None)
- if raw is None:
- return None
- else:
- return prefix + raw.upper()
-
- def bool_keys_starting_with(self, prefix):
- """get the boolean keys beginning with a certain prefix.
-
- Takes time proportional to the number of boolean keys.
- """
- for x in self.bools:
- if x.startswith(prefix):
- yield x
-
-def _cr50_phase(l):
- inferred_cr50_phase = l.get_enum("cr50", prefix="CR50_PHASE_")
- if inferred_cr50_phase in CR50_PHASE_WHITELIST:
- return inferred_cr50_phase
- else:
- return "CR50_PHASE_INVALID"
-
-def _conductive(l):
- out = l.get_string("conductive")
- if out is None:
- return False
- if out in ("False", "false", 0, None, "0", "None", "no", "Flase"):
- return False
- else:
- return True
-
-def _cts_abi(l):
- """The ABI has the structure cts_abi_x86 and cts_abi_arm
-
- instead of the expected cts_abi:x86 and cts_abi_arm
- """
- out = []
- for abi in ["cts_abi_x86", "cts_abi_arm"]:
- if l.get_bool(abi):
- out.append(abi.upper())
- return out
-
-
-def _cts_cpu(l):
- out = []
- for abi in ["cts_cpu_x86", "cts_cpu_arm"]:
- if l.get_bool(abi):
- out.append(abi.upper())
- return out
-
-
-def _os_type(l):
- """Get the operating system type"""
- return l.get_enum("os", prefix="OS_TYPE_")
-
-def _ec_type(l):
- """Get the ec type."""
- name = l.get_string("ec")
- return EC_TYPE_ATEST_TO_SK.get(name, "EC_TYPE_INVALID")
-
-
-def _video_acceleration(l):
- """produce a list of enums corresponding
-
- to the video_acc_ keys in the atest format
- """
- out = []
- for prefix in ["video_acc", "hw_video_acc"]:
- for key in l.bool_keys_starting_with(prefix=prefix):
- _, delim, suffix = key.rpartition("video_acc_")
- assert delim == "video_acc_"
- new_label = "VIDEO_ACCELERATION" + "_" + suffix.upper()
- if new_label in VIDEO_ACCELERATION_WHITELIST:
- out.append(new_label)
- return out
-
-
-def _platform(l):
- return l.get_string("platform") or l.get_string("Platform")
-
-
-def _phase(l):
- inferred_phase = l.get_enum("phase", prefix="PHASE_")
- if inferred_phase in PHASE_WHITELIST:
- return inferred_phase
- else:
- return "PHASE_INVALID"
-
-
-
-def validate_required_fields_for_skylab(skylab_fields):
- """Does 'skylab_fields' have all required fields to add a DUT?
-
- Throw a SkylabMissingLabelException if any mandatory field is not present
-
- @param skylab_fields : a DUT description to be handed to 'skylab add-dut'
- @returns: Nothing
- """
- try:
- labels = skylab_fields["common"]["labels"]
- except (KeyError, TypeError, ValueError):
- raise ValueError(
- 'skylab_fields["common"]["labels"] = { ... } is not present')
- for label in REQUIRED_LABELS:
- if label not in labels or labels[label] is None:
- raise SkylabMissingLabelException(label)
- return
-
-
-def process_labels(labels, platform):
- """produce a JSON object of the kind accepted by skylab add-dut
-
- for the labels from autotest
- """
- l = Labels(labels)
-
- pools = _normalize_pools(l)
-
- # The enum-type keys below default to None
- # except for 'telephony' and 'modem', which default to ''
- # This is intentional.
- # This function will always return a json-like Python data object,
- # even in cases where some normally required fields are missing.
- # The explicit None is there as an explicit placeholder.
- out = {
- # boolean keys in label
- "arc": l.get_bool("arc"),
- # string keys in label
- "board": l.get_string("board", default=None),
- "brand": l.get_string("brand-code", default=None),
- "cr50Phase": _cr50_phase(l),
- "hwidSku": l.get_string("sku", default=None),
- "model": l.get_string("model", default=None),
- "platform": platform,
- "referenceDesign": l.get_string("reference_design"),
- # NOTE: the autotest label corresponding to "sku" is
- # "device-sku", not "sku"
- "sku": l.get_string("device-sku", default=None),
- # enum keys
- "ecType": _ec_type(l),
- "osType": _os_type(l),
- "phase": _phase(l),
- # list of enum keys
- "criticalPools": pools["criticalPools"],
- "ctsAbi": _cts_abi(l),
- "ctsCpu": _cts_cpu(l),
- # list of string keys
- "self_serve_pools": pools["self_serve_pools"],
- # capabilities substructure
- "capabilities": {
- # boolean keys in capabilities
- "atrus": l.get_bool("atrus"),
- "bluetooth": l.get_bool("bluetooth"),
- "detachablebase": l.get_bool("detachablebase"),
- "flashrom": l.get_bool("flashrom"),
- "hotwording": l.get_bool("hotwording"),
- "internalDisplay": l.get_bool("internal_display"),
- "lucidsleep": l.get_bool("lucidsleep"),
- "touchpad": l.get_bool("touchpad"),
- "webcam": l.get_bool("webcam"),
- # string keys in capabilities
- "graphics": l.get_string("graphics", default=None),
- "gpuFamily": l.get_string("gpu_family", default=None),
- "modem": l.get_string("modem", default=""),
- "power": l.get_string("power", default=None),
- "storage": l.get_string("storage", default=None),
- "telephony": l.get_string("telephony", default=""),
- # enum keys in capabilities
- "carrier": l.get_enum("carrier", prefix="CARRIER_"),
- # video acceleration is its own thing.
- "videoAcceleration": _video_acceleration(l),
- },
- # peripherals substructure
- "peripherals": {
- "audioBoard": l.get_bool("audio_board"),
- "audioBox": l.get_bool("audio_box"),
- "audioLoopbackDongle": l.get_bool("audio_loopback_dongle"),
- "chameleon": l.get_bool("chameleon"),
- "chameleonType": _get_chameleon(l),
- "conductive": _conductive(l),
- "huddly": l.get_bool("huddly"),
- "mimo": l.get_bool("mimo"),
- "servo": l.get_bool("servo"),
- "stylus": l.get_bool("stylus"),
- "wificell": l.get_bool("wificell"),
- },
- # test hints substructure
- "testCoverageHints": {
- "chaosDut": l.get_bool("chaos_dut"),
- "chromesign": l.get_bool("chromesign"),
- "hangoutApp": l.get_bool("hangout_app"),
- "meetApp": l.get_bool("meet_app"),
- "recoveryTest": l.get_bool("recovery_test"),
- "testAudiojack": l.get_bool("test_audio_jack"),
- "testHdmiaudio": l.get_bool("test_hdmiaudio"),
- "testUsbprinting": l.get_bool("test_usbprinting"),
- "usbDetect": l.get_bool("usb_detect"),
- },
- }
-
- if not out["criticalPools"]:
- del out["criticalPools"]
-
- if not out["self_serve_pools"]:
- del out["self_serve_pools"]
-
- return out
-
-
-
-# accepts: string possibly in camelCase
-# returns: string in snake_case
-def to_snake_case(str):
- out = []
- for i, x in enumerate(str):
- if i == 0:
- out.append(x.lower())
- continue
- if x.isupper():
- out.append("_")
- out.append(x.lower())
- else:
- out.append(x.lower())
- return "".join(out)
-
-
-def write(*args, **kwargs):
- print(*args, sep="", end="", **kwargs)
-
-def writeln(*args, **kwargs):
- print(*args, sep="", end="\n", **kwargs)
-
-
-# accepts: key, value, indentation level
-# returns: nothing
-# emits: textual protobuf format, best effort
-def print_textpb_keyval(key, val, level=0):
- # repeated field, repeat the key in every stanza
- if isinstance(val, (list, tuple)):
- for x in val:
- # TODO(gregorynisbet): nested lists?
- print_textpb_keyval(to_snake_case(key), x, level=level)
- # if the value is a dictionary, don't print :
- elif isinstance(val, dict):
- write((level * " "), to_snake_case(key), " ")
- print_textpb(val, level=level)
- else:
- write((level * " "), to_snake_case(key), ":", " ")
- print_textpb(val, level=0)
-
-
-
-
-
-# accepts: obj, indentation level
-# returns: nothing
-# emits: textual protobuf format, best effort
-def print_textpb(obj, level=0):
- # not sure what we want for None
- # an empty string seems like a good choice
- if obj is None:
- writeln((level * " "), '""')
- elif isinstance(obj, (bytes, unicode)) and obj.startswith("[IGNORED]"):
- writeln((level * " "), json.dumps(str(uuid.uuid4())))
- elif isinstance(obj, (int, long, float, bool)):
- writeln((level * " "), json.dumps(obj))
- elif isinstance(obj, (bytes, unicode)):
- # guess that something is not an enum if it
- # contains at least one lowercase letter or a space
- # or does not contain an underscore
- is_enum = True
- for ch in obj:
- if ch.islower() or ch == " ":
- is_enum = False
- break
- # check for the underscore
- is_enum = is_enum and "_" in obj
- if is_enum:
- writeln((level * " "), obj)
- else:
- writeln((level * " "), json.dumps(obj))
- elif isinstance(obj, dict):
- writeln("{")
- for key in sorted(obj):
- print_textpb_keyval(key=key, val=obj[key], level=(2 + level))
- writeln((level * " "), "}")
- elif isinstance(obj, (list, tuple)):
- raise RuntimeError("No sequences on toplevel")
- else:
- raise RuntimeError("Unsupported type (%s)" % type(obj))
diff --git a/cli/skylab_json_utils_unittest.py b/cli/skylab_json_utils_unittest.py
deleted file mode 100755
index acdf152..0000000
--- a/cli/skylab_json_utils_unittest.py
+++ /dev/null
@@ -1,715 +0,0 @@
-#!/usr/bin/python2
-# pylint: disable-msg=C0111
-#
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file
-"""Test for skylab json utils."""
-
-from __future__ import unicode_literals
-
-import unittest
-
-import common
-#TODO(gregorynisbet): remove renamed import of skylab_json_utils
-from autotest_lib.cli import skylab_json_utils as sky
-from autotest_lib.cli import skylab_json_utils
-
-basic_labels = sky.Labels()
-basic_labels._add_label("key1:value1")
-basic_labels._add_label("key2")
-basic_labels._add_label("key4")
-basic_labels._add_label("key6")
-
-
-class skylab_json_utils_unittest(unittest.TestCase):
- def test_label_empty(self):
- self.assertFalse(sky.Labels().bools)
- self.assertFalse(sky.Labels().strings)
- self.assertFalse(sky.Labels())
-
- def test_label_copy(self):
- basic_labels2 = sky.Labels(basic_labels)
- self.assertEqual(basic_labels, basic_labels2)
-
- def test_bool_label_present(self):
- self.assertTrue(basic_labels.get_bool("key2"))
-
- def test_bool_label_absent(self):
- self.assertFalse(basic_labels.get_bool("nonexistent-key"))
-
- def test_string_label_present(self):
- self.assertEqual(basic_labels.get_string("key1"), "value1")
-
- def test_string_label_absent(self):
- self.assertIsNone(basic_labels.get_string("nonexistent-key"))
-
- def test_enum_label_present(self):
- """the value in a key:value pair into a string that resembles a
-
- protobuf constant.
-
- The skylab add-dut JSON API expects certain fields which are
- protobuf enums to be strings of this form.
- """
- self.assertEqual(
- basic_labels.get_enum("key1", prefix="PREFIX_"), "PREFIX_VALUE1")
-
- def test_enum_label_absent(self):
- """by convention, many of the 'zero values' protobuf constants
-
- are named TYPE_INVALID.
-
- e.g. 'CARRIER_INVALID'
- """
- self.assertEqual(
- basic_labels.get_enum("nonexistent-key", prefix="THING_"),
- "THING_INVALID")
-
- def test_bool_keys_starting_with(self):
- self.assertEqual(
- set(basic_labels.bool_keys_starting_with("k")),
- {"key2", "key4", "key6"})
-
- def test_arc_present(self):
- l = sky.Labels()
- l._add_label("arc")
- out = sky.process_labels(l, platform=None)
- self.assertEqual(out["arc"], True)
-
- def test_arc_absent(self):
- l = sky.Labels()
- out = sky.process_labels(l, platform=None)
- self.assertEqual(out["arc"], False)
-
- def test_board_present(self):
- l = sky.Labels()
- l._add_label("board:nami")
- out = sky.process_labels(l, platform=None)
- self.assertEqual(out["board"], "nami")
-
- def test_board_absent(self):
- l = sky.Labels()
- out = sky.process_labels(l, platform=None)
- self.assertEqual(out["board"], None)
-
- def test_cr50phase_present(self):
- l = sky.Labels()
- l._add_label("cr50:0.3.18")
- out = sky.process_labels(l, platform=None)
- # TODO(gregorynisbet): note! strictly speaking this is wrong,
- # but skylab does not support version numbers in the CR50_PHASE
- self.assertEqual(out["cr50Phase"], "CR50_PHASE_INVALID")
-
- def test_cr50phase_absent(self):
- l = sky.Labels()
- out = sky.process_labels(l, platform=None)
- self.assertEqual(out["cr50Phase"], "CR50_PHASE_INVALID")
-
- def test_board_present(self):
- l = sky.Labels()
- l._add_label("model:syndra")
- out = sky.process_labels(l, platform=None)
- self.assertEqual(out["model"], "syndra")
-
- def test_board_absent(self):
- l = sky.Labels()
- out = sky.process_labels(l, platform=None)
- self.assertEqual(out["model"], None)
-
- def test_platform(self):
- l = None
- out = sky.process_labels(None, platform=47)
- self.assertEqual(out["platform"], 47)
-
- def test_reference_design_present(self):
- l = sky.Labels()
- l._add_label("reference_design:Google_Nami")
- out = sky.process_labels(l, platform=None)
- self.assertEqual(out["referenceDesign"], "Google_Nami")
-
- def test_reference_design_absent(self):
- l = sky.Labels()
- out = sky.process_labels(l, platform=None)
- self.assertEqual(out["referenceDesign"], None)
-
- def test_ec_present(self):
- l = sky.Labels()
- l._add_label("ec:cros")
- out = sky.process_labels(l, platform=None)
- self.assertEqual(out["ecType"], "EC_TYPE_CHROME_OS")
-
- def test_ec_absent(self):
- l = sky.Labels()
- out = sky.process_labels(l, platform=None)
- self.assertEqual(out["ecType"], "EC_TYPE_INVALID")
-
- def test_os_present(self):
- l = sky.Labels()
- l._add_label("os:cros")
- out = sky.process_labels(l, platform=None)
- # NOTE: the type is OS_TYPE_CROS not OS_TYPE_CHROME_OS
- self.assertEqual(out["osType"], "OS_TYPE_CROS")
-
- def test_os_absent(self):
- l = sky.Labels()
- out = sky.process_labels(l, platform=None)
- self.assertEqual(out["osType"], "OS_TYPE_INVALID")
-
- def test_critical_pool_present(self):
- l = sky.Labels()
- # note: use suites rather than another pool because
- # suites will always be mapped to DUT_POOL_SUITES
- l._add_label("pool:suites")
- out = sky.process_labels(l, platform=None)
- self.assertEqual(out["criticalPools"], ["DUT_POOL_SUITES"])
-
- def test_critical_pool_absent(self):
- l = sky.Labels()
- out = sky.process_labels(l, platform=None)
- self.assertFalse("criticalPools" in out)
-
- def test_hwid_sku_present(self):
- l = sky.Labels()
- l._add_label("sku:TEST")
- out = sky.process_labels(l, platform=None)
- self.assertEqual(out["hwidSku"], "TEST")
-
- def test_hwid_sku_absent(self):
- l = sky.Labels()
- out = sky.process_labels(l, platform=None)
- self.assertEqual(out["hwidSku"], None)
-
- def test_cts_abi_present(self):
- l = sky.Labels()
- l._add_label("cts_abi_arm")
- out = sky.process_labels(l, platform=None)
- self.assertEqual(out["ctsAbi"], ["CTS_ABI_ARM"])
-
- def test_cts_abi_absent(self):
- l = sky.Labels()
- out = sky.process_labels(l, platform=None)
- self.assertEqual(out["ctsAbi"], [])
-
- def test_cts_cpu_present(self):
- l = sky.Labels()
- l._add_label("cts_cpu_arm")
- out = sky.process_labels(l, platform=None)
- self.assertEqual(out["ctsCpu"], ["CTS_CPU_ARM"])
-
- def test_cts_cpu_absent(self):
- l = sky.Labels()
- out = sky.process_labels(l, platform=None)
- self.assertEqual(out["ctsCpu"], [])
-
- def test_atrus_present(self):
- l = sky.Labels()
- l._add_label("atrus")
- out = sky.process_labels(l, platform=None)
- self.assertEqual(out["capabilities"]["atrus"], True)
-
- def test_atrus_absent(self):
- l = sky.Labels()
- out = sky.process_labels(l, platform=None)
- self.assertEqual(out["capabilities"]["atrus"], False)
-
- def test_bluetooth_present(self):
- l = sky.Labels()
- l._add_label("bluetooth")
- out = sky.process_labels(l, platform=None)
- self.assertEqual(out["capabilities"]["bluetooth"], True)
-
- def test_bluetooth_absent(self):
- l = sky.Labels()
- out = sky.process_labels(l, platform=None)
- self.assertEqual(out["capabilities"]["bluetooth"], False)
-
- def test_detachablebase_present(self):
- l = sky.Labels()
- l._add_label("detachablebase")
- out = sky.process_labels(l, platform=None)
- self.assertEqual(out["capabilities"]["detachablebase"], True)
-
- def test_detachablebase_absent(self):
- l = sky.Labels()
- out = sky.process_labels(l, platform=None)
- self.assertEqual(out["capabilities"]["detachablebase"], False)
-
- def test_flashrom_present(self):
- l = sky.Labels()
- l._add_label("flashrom")
- out = sky.process_labels(l, platform=None)
- self.assertEqual(out["capabilities"]["flashrom"], True)
-
- def test_flashrom_absent(self):
- l = sky.Labels()
- out = sky.process_labels(l, platform=None)
- self.assertEqual(out["capabilities"]["flashrom"], False)
-
- def test_hotwording_present(self):
- l = sky.Labels()
- l._add_label("hotwording")
- out = sky.process_labels(l, platform=None)
- self.assertEqual(out["capabilities"]["hotwording"], True)
-
- def test_hotwording_absent(self):
- l = sky.Labels()
- out = sky.process_labels(l, platform=None)
- self.assertEqual(out["capabilities"]["hotwording"], False)
-
- def test_internal_display_present(self):
- l = sky.Labels()
- l._add_label("internal_display")
- out = sky.process_labels(l, platform=None)
- self.assertEqual(out["capabilities"]["internalDisplay"], True)
-
- def test_internal_display_absent(self):
- l = sky.Labels()
- out = sky.process_labels(l, platform=None)
- self.assertEqual(out["capabilities"]["internalDisplay"], False)
-
- def test_lucidsleep_present(self):
- l = sky.Labels()
- l._add_label("lucidsleep")
- out = sky.process_labels(l, platform=None)
- self.assertEqual(out["capabilities"]["lucidsleep"], True)
-
- def test_lucidsleep_absent(self):
- l = sky.Labels()
- out = sky.process_labels(l, platform=None)
- self.assertEqual(out["capabilities"]["lucidsleep"], False)
-
- def test_touchpad_present(self):
- l = sky.Labels()
- l._add_label("touchpad")
- out = sky.process_labels(l, platform=None)
- self.assertEqual(out["capabilities"]["touchpad"], True)
-
- def test_touchpad_absent(self):
- l = sky.Labels()
- out = sky.process_labels(l, platform=None)
- self.assertEqual(out["capabilities"]["touchpad"], False)
-
- def test_webcam_present(self):
- l = sky.Labels()
- l._add_label("webcam")
- out = sky.process_labels(l, platform=None)
- self.assertEqual(out["capabilities"]["webcam"], True)
-
- def test_webcam_absent(self):
- l = sky.Labels()
- out = sky.process_labels(l, platform=None)
- self.assertEqual(out["capabilities"]["webcam"], False)
-
- def test_graphics_present(self):
- l = sky.Labels()
- l._add_label("graphics:graphicsval")
- out = sky.process_labels(l, platform=None)
- self.assertEqual(out["capabilities"]["graphics"], "graphicsval")
-
- def test_graphics_absent(self):
- l = sky.Labels()
- out = sky.process_labels(l, platform=None)
- self.assertEqual(out["capabilities"]["graphics"], None)
-
- def test_gpu_family(self):
- l = sky.Labels()
- l._add_label("gpu_family:gpu_family_val")
- out = sky.process_labels(l, platform=None)
- self.assertEqual(out["capabilities"]["gpuFamily"], "gpu_family_val")
-
- def test_graphics_absent(self):
- l = sky.Labels()
- out = sky.process_labels(l, platform=None)
- self.assertEqual(out["capabilities"]["gpuFamily"], None)
-
- def test_modem_present(self):
- l = sky.Labels()
- l._add_label("modem:gobi2k")
- out = sky.process_labels(l, platform=None)
- self.assertEqual(out["capabilities"]["modem"], "gobi2k")
-
- def test_modem_absent(self):
- l = sky.Labels()
- out = sky.process_labels(l, platform=None)
- self.assertEqual(out["capabilities"]["modem"], "")
-
- def test_power_present(self):
- l = sky.Labels()
- l._add_label("power:battery")
- out = sky.process_labels(l, platform=None)
- self.assertEqual(out["capabilities"]["power"], "battery")
-
- def test_power_absent(self):
- l = sky.Labels()
- out = sky.process_labels(l, platform=None)
- self.assertEqual(out["capabilities"]["power"], None)
-
- def test_storage_present(self):
- l = sky.Labels()
- l._add_label("storage:nmve")
- out = sky.process_labels(l, platform=None)
- self.assertEqual(out["capabilities"]["storage"], "nmve")
-
- def test_storage_absent(self):
- l = sky.Labels()
- out = sky.process_labels(l, platform=None)
- self.assertIsNone(out["capabilities"]["storage"])
-
- def test_telephony_present(self):
- l = sky.Labels()
- l._add_label("telephony:volte")
- out = sky.process_labels(l, platform=None)
- self.assertEqual(out["capabilities"]["telephony"], "volte")
-
- def test_telephony_absent(self):
- l = sky.Labels()
- out = sky.process_labels(l, platform=None)
- self.assertEqual(out["capabilities"]["telephony"], "")
-
- def test_carrier_present(self):
- l = sky.Labels()
- l._add_label("carrier:att")
- out = sky.process_labels(l, platform=None)
- self.assertEqual(out["capabilities"]["carrier"], "CARRIER_ATT")
-
- def test_carrier_absent(self):
- l = sky.Labels()
- out = sky.process_labels(l, platform=None)
- self.assertEqual(out["capabilities"]["carrier"], "CARRIER_INVALID")
-
- def test_video_accleration_present(self):
- l = sky.Labels()
- l._add_label("hw_video_acc_enc_vp9")
- out = sky.process_labels(l, platform=None)
- self.assertEqual(out["capabilities"]["videoAcceleration"], ["VIDEO_ACCELERATION_ENC_VP9"])
-
- def test_video_accleration_absent(self):
- l = sky.Labels()
- out = sky.process_labels(l, platform=None)
- self.assertEqual(out["capabilities"]["videoAcceleration"], [])
-
- def test_audio_board_present(self):
- l = sky.Labels()
- l._add_label("audio_board")
- out = sky.process_labels(l, platform=None)
- self.assertEqual(out["peripherals"]["audioBoard"], True)
-
- def test_audio_board_absent(self):
- l = sky.Labels()
- out = sky.process_labels(l, platform=None)
- self.assertEqual(out["peripherals"]["audioBoard"], False)
-
- def test_audio_box_present(self):
- l = sky.Labels()
- l._add_label("audio_box")
- out = sky.process_labels(l, platform=None)
- self.assertEqual(out["peripherals"]["audioBox"], True)
-
- def test_audio_box_absent(self):
- l = sky.Labels()
- out = sky.process_labels(l, platform=None)
- self.assertEqual(out["peripherals"]["audioBox"], False)
-
- def test_audio_loopback_dongle_present(self):
- l = sky.Labels()
- l._add_label("audio_loopback_dongle")
- out = sky.process_labels(l, platform=None)
- self.assertEqual(out["peripherals"]["audioLoopbackDongle"], True)
-
- def test_audio_loopback_dongle_absent(self):
- l = sky.Labels()
- out = sky.process_labels(l, platform=None)
- self.assertEqual(out["peripherals"]["audioLoopbackDongle"], False)
-
- def test_chameleon_present(self):
- l = sky.Labels()
- l._add_label("chameleon")
- out = sky.process_labels(l, platform=None)
- self.assertEqual(out["peripherals"]["chameleon"], True)
-
- def test_chameleon_absent(self):
- l = sky.Labels()
- out = sky.process_labels(l, platform=None)
- self.assertEqual(out["peripherals"]["chameleon"], False)
-
- def test_chameleon_type_present(self):
- l = sky.Labels()
- # the chameleon type field is named chameleon:something
- # NOT chameleon_type:something
- l._add_label("chameleon:hdmi")
- out = sky.process_labels(l, platform=None)
- self.assertEqual(out["peripherals"]["chameleonType"],
- "CHAMELEON_TYPE_HDMI")
-
- def test_chameleon_type_absent(self):
- l = sky.Labels()
- out = sky.process_labels(l, platform=None)
- self.assertIsNone(out["peripherals"]["chameleonType"])
-
- def test_conductive_present(self):
- l = sky.Labels()
- l._add_label("conductive:True")
- out = sky.process_labels(l, platform=None)
- self.assertEqual(out["peripherals"]["conductive"], True)
-
- def test_conductive_absent(self):
- l = sky.Labels()
- out = sky.process_labels(l, platform=None)
- self.assertEqual(out["peripherals"]["conductive"], False)
-
- def test_conductive_false(self):
- l = sky.Labels()
- l._add_label("conductive:False")
- out = sky.process_labels(l, platform=None)
- self.assertEqual(out["peripherals"]["conductive"], False)
-
- def test_huddly_present(self):
- l = sky.Labels()
- l._add_label("huddly")
- out = sky.process_labels(l, platform=None)
- self.assertEqual(out["peripherals"]["huddly"], True)
-
- def test_huddly_absent(self):
- l = sky.Labels()
- out = sky.process_labels(l, platform=None)
- self.assertEqual(out["peripherals"]["huddly"], False)
-
- def test_mimo_present(self):
- l = sky.Labels()
- l._add_label("mimo")
- out = sky.process_labels(l, platform=None)
- self.assertEqual(out["peripherals"]["mimo"], True)
-
- def test_mimo_absent(self):
- l = sky.Labels()
- out = sky.process_labels(l, platform=None)
- self.assertEqual(out["peripherals"]["mimo"], False)
-
- def test_servo_present(self):
- l = sky.Labels()
- l._add_label("servo")
- out = sky.process_labels(l, platform=None)
- self.assertEqual(out["peripherals"]["servo"], True)
-
- def test_servo_absent(self):
- l = sky.Labels()
- out = sky.process_labels(l, platform=None)
- self.assertEqual(out["peripherals"]["servo"], False)
-
- def test_stylus_present(self):
- l = sky.Labels()
- l._add_label("stylus")
- out = sky.process_labels(l, platform=None)
- self.assertEqual(out["peripherals"]["stylus"], True)
-
- def test_stylus_absent(self):
- l = sky.Labels()
- out = sky.process_labels(l, platform=None)
- self.assertEqual(out["peripherals"]["stylus"], False)
-
- def test_wificell_present(self):
- l = sky.Labels()
- l._add_label("pool:bvt")
- l._add_label("wificell")
- out = sky.process_labels(l, platform=None)
- self.assertEqual(out["peripherals"]["wificell"], True)
-
- def test_wificell_absent(self):
- l = sky.Labels()
- l._add_label("pool:bvt")
- out = sky.process_labels(l, platform=None)
- self.assertEqual(out["peripherals"]["wificell"], False)
-
- def test_chaos_dut_present(self):
- l = sky.Labels()
- l._add_label("chaos_dut")
- out = sky.process_labels(l, platform=None)
- self.assertEqual(out["testCoverageHints"]["chaosDut"], True)
-
- def test_chaos_dut_absent(self):
- l = sky.Labels()
- out = sky.process_labels(l, platform=None)
- self.assertEqual(out["testCoverageHints"]["chaosDut"], False)
-
- def test_chaos_dut_present(self):
- l = sky.Labels()
- l._add_label("chaos_dut")
- out = sky.process_labels(l, platform=None)
- self.assertEqual(out["testCoverageHints"]["chaosDut"], True)
-
- def test_chaos_dut_absent(self):
- l = sky.Labels()
- out = sky.process_labels(l, platform=None)
- self.assertEqual(out["testCoverageHints"]["chaosDut"], False)
-
- def test_chromesign_present(self):
- l = sky.Labels()
- l._add_label("chromesign")
- out = sky.process_labels(l, platform=None)
- self.assertEqual(out["testCoverageHints"]["chromesign"], True)
-
- def test_chromesign_absent(self):
- l = sky.Labels()
- out = sky.process_labels(l, platform=None)
- self.assertEqual(out["testCoverageHints"]["chromesign"], False)
-
- def test_hangout_app_present(self):
- l = sky.Labels()
- l._add_label("hangout_app")
- out = sky.process_labels(l, platform=None)
- self.assertEqual(out["testCoverageHints"]["hangoutApp"], True)
-
- def test_hangout_app_absent(self):
- l = sky.Labels()
- out = sky.process_labels(l, platform=None)
- self.assertEqual(out["testCoverageHints"]["hangoutApp"], False)
-
- def test_meet_app_present(self):
- l = sky.Labels()
- l._add_label("meet_app")
- out = sky.process_labels(l, platform=None)
- self.assertEqual(out["testCoverageHints"]["meetApp"], True)
-
- def test_meet_app_absent(self):
- l = sky.Labels()
- out = sky.process_labels(l, platform=None)
- self.assertEqual(out["testCoverageHints"]["meetApp"], False)
-
- def test_recovery_test_present(self):
- l = sky.Labels()
- l._add_label("recovery_test")
- out = sky.process_labels(l, platform=None)
- self.assertEqual(out["testCoverageHints"]["recoveryTest"], True)
-
- def test_recovery_test_absent(self):
- l = sky.Labels()
- out = sky.process_labels(l, platform=None)
- self.assertEqual(out["testCoverageHints"]["recoveryTest"], False)
-
- def test_test_audio_jack_present(self):
- # NOTE: test_audio_jack maps to testAudiojack
- # instead of the expected *testAudioJack
- l = sky.Labels()
- l._add_label("test_audio_jack")
- out = sky.process_labels(l, platform=None)
- self.assertEqual(out["testCoverageHints"]["testAudiojack"], True)
-
- def test_test_audio_jack_absent(self):
- l = sky.Labels()
- out = sky.process_labels(l, platform=None)
- self.assertEqual(out["testCoverageHints"]["testAudiojack"], False)
-
- def test_test_hdmiaudio_present(self):
- l = sky.Labels()
- l._add_label("test_hdmiaudio")
- out = sky.process_labels(l, platform=None)
- self.assertEqual(out["testCoverageHints"]["testHdmiaudio"], True)
-
- def test_test_hdmiaudio_absent(self):
- l = sky.Labels()
- out = sky.process_labels(l, platform=None)
- self.assertEqual(out["testCoverageHints"]["testHdmiaudio"], False)
-
- def test_test_usbprinting_present(self):
- l = sky.Labels()
- l._add_label("test_usbprinting")
- out = sky.process_labels(l, platform=None)
- self.assertEqual(out["testCoverageHints"]["testUsbprinting"], True)
-
- def test_test_usbprinting_absent(self):
- l = sky.Labels()
- out = sky.process_labels(l, platform=None)
- self.assertEqual(out["testCoverageHints"]["testUsbprinting"], False)
-
- def test_usb_detect_present(self):
- l = sky.Labels()
- l._add_label("usb_detect")
- out = sky.process_labels(l, platform=None)
- self.assertEqual(out["testCoverageHints"]["usbDetect"], True)
-
- def test_usb_detect_absent(self):
- l = sky.Labels()
- out = sky.process_labels(l, platform=None)
- self.assertEqual(out["testCoverageHints"]["usbDetect"], False)
-
- def test_validate_fields_smoke_test(self):
- with self.assertRaises(ValueError):
- skylab_json_utils.validate_required_fields_for_skylab(47)
-
- def test_validate_fields_no_common(self):
- with self.assertRaises(ValueError):
- skylab_json_utils.validate_required_fields_for_skylab({})
-
- def test_validate_fields_no_labels(self):
- with self.assertRaises(ValueError):
- skylab_json_utils.validate_required_fields_for_skylab(
- {"common": None})
-
- def test_validate_fields_no_board(self):
- with self.assertRaises(
- skylab_json_utils.SkylabMissingLabelException) as ctx:
- skylab_json_utils.validate_required_fields_for_skylab(
- {"common": {
- "labels": []
- }})
- e = ctx.exception
- self.assertEqual(e.message, "board")
-
- def test_validate_fields_no_model(self):
- with self.assertRaises(
- skylab_json_utils.SkylabMissingLabelException) as ctx:
- skylab_json_utils.validate_required_fields_for_skylab(
- {"common": {
- "labels": {
- "board": True
- }
- }})
- e = ctx.exception
- self.assertEqual(e.message, "model")
-
- def test_validate_fields_no_sku(self):
- with self.assertRaises(
- skylab_json_utils.SkylabMissingLabelException) as ctx:
- skylab_json_utils.validate_required_fields_for_skylab(
- {"common": {
- "labels": {
- "board": True,
- "model": True
- }
- }})
- e = ctx.exception
- self.assertEqual(e.message, "sku")
-
- def test_validate_fields_no_brand(self):
- with self.assertRaises(
- skylab_json_utils.SkylabMissingLabelException) as ctx:
- skylab_json_utils.validate_required_fields_for_skylab({
- "common": {
- "labels": {
- "board": True,
- "model": True,
- "sku": True
- }
- }
- })
- e = ctx.exception
- self.assertEqual(e.message, "brand")
-
- def test_validate_fields_pass(self):
- item = {
- "common": {
- "labels": {
- "board": True,
- "model": True,
- "sku": True,
- "brand": True
- }
- }
- }
- # should complete without throwing exception
- skylab_json_utils.validate_required_fields_for_skylab(item)
-
-
-if __name__ == "__main__":
- unittest.main()
diff --git a/cli/skylab_migration.py b/cli/skylab_migration.py
deleted file mode 100644
index ddd63fb..0000000
--- a/cli/skylab_migration.py
+++ /dev/null
@@ -1,1082 +0,0 @@
-#!/usr/bin/env python2
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file
-from __future__ import unicode_literals
-from __future__ import print_function
-
-import collections
-import datetime
-import io
-import json
-import os
-import subprocess
-import tempfile
-import time
-import shutil
-import sys
-import types
-import itertools
-
-import common
-
-_THIS_FILE = os.path.abspath(__file__)
-_THIS_DIR = os.path.dirname(_THIS_FILE)
-
-_SKYLAB_EXE = 'skylab'
-
-__all__ = ['migrate', 'setup']
-
-_TEMPPATH = object()
-
-_FAILED_STEP_SENTINEL = object()
-
-_LITERAL_MAP = {
- 'True': True,
- 'False': False,
- 'None': None,
-}
-
-
-TEXT = (unicode, str)
-
-
-def find_atest_path():
- """Get the path to the 'atest' executable.
-
- @return : path to 'atest' executable
- """
- atest_exe = os.path.join(_THIS_DIR, 'atest')
- assert os.path.exists(atest_exe)
- return atest_exe
-
-
-_ATEST_EXE = find_atest_path()
-
-
-def strip_suffix(str, suffix):
- if str.endswith(suffix):
- return str[:-len(suffix)]
- else:
- return str
-
-
-def call_with_tempfile(cmd, lines):
- """Execute command requiring a temporary file and return a CommandOutput struct.
-
- @param cmd : the components of the argv to be executed.
- The magical value _TEMPPATH will be replaced with the path
- to the temporary file.
- @param lines : the lines of content to write to the temporary file
-
- @returns : CommandOutput struct containing output as list of lines
- and the exit status
- """
- if isinstance(cmd, (str, unicode)):
- raise TypeError('cmd cannot be str or unicode')
- assert not isinstance(lines, (str, unicode))
- with tempfile.NamedTemporaryFile() as fh:
- for line in lines:
- fh.write(line)
- if line.endswith('\n'):
- pass
- else:
- fh.write('\n')
- fh.flush()
- assert os.path.exists(fh.name)
- cmd = [(x if x is not _TEMPPATH else fh.name) for x in cmd]
- try:
- output = subprocess.check_output(cmd)
- if isinstance(output, (bytes, unicode)):
- output = output.splitlines()
- return CommandOutput(
- exit_code=0, output=[x.decode('utf-8') for x in output])
- except subprocess.CalledProcessError as e:
- return CommandOutput(
- exit_code=e.returncode,
- output=[x.decode('utf-8') for x in e.output.splitlines()])
-
-
-
-# accepts: string
-# returns: string but with exactly one trailing newline
-def _one_trailing_newline(s):
- s = s.rstrip("\n")
- return s + "\n"
-
-# accepts: shell command, rest of args
-# returns: exit_status, stdout, stderr
-def shell_capture_all(cmd, *rest):
- shellcmd = ("bash", "-c", cmd, "bash",) + rest
- pr = subprocess.Popen(
- shellcmd,
- stdout=subprocess.PIPE,
- stderr=subprocess.PIPE,
- )
- stdout, stderr = pr.communicate()
- return pr.returncode, stdout, stderr
-
-
-# accepts: shell command, rest of args
-# returns: exit_status, stdout, stderr
-def shell_capture_all_no_stdin(cmd, *rest):
- with open(os.devnull) as null:
- shellcmd = ("bash", "-c", cmd, "bash",) + rest
- pr = subprocess.Popen(
- shellcmd,
- stdout=subprocess.PIPE,
- stderr=subprocess.PIPE,
- stdin=null,
- )
- stdout, stderr = pr.communicate()
- return pr.returncode, stdout, stderr
-
-
-# accepts: shell command, lines of temporary file
-# returns: exit_status, stdout, stderr
-def shell_capture_all_with_tempfile(cmd, lines):
- exit_status = stdout = stderr = None
- if lines in (bytes, unicode):
- raise TypeError("lines must not be text-like (%s)" % type(lines))
- filename = None
- with tempfile.NamedTemporaryFile(delete=False) as fh:
- filename = fh.name
- for line in lines:
- fh.write(_one_trailing_newline(line))
- try:
- exit_status, stdout, stderr = shell_capture_all_no_stdin(cmd, filename)
- finally:
- os.unlink(filename)
- return exit_status, stdout, stderr
-
-
-CommandOutput = collections.namedtuple('CommandOutput', ['output', 'exit_code'])
-
-
-def _nontrivially_pairwise_disjoint(*sets):
- """If there are any items present in more than one set, then 'sets' is not pairwise disjoint.
-
- If there are exactly zero or one sets, then there are no pairs of sets
- and therefore the pairwise disjoint condition will always hold
- regardless of the set contents. Therefore, calling
- _nontrivially_pairwise_disjoint
- with fewer than 2 sets probably indicates a logic error and will result
- in an exception being thrown.
-
- Example: [{1}, {2}, set(), {3, 4, 5}, set()]
- CounterExample: [{1, 2}, {2, 3}]
-
- @param sets: a sequence of sets
- @return: whether the sets are pairwise disjoint
- """
- if len(sets) in (0, 1):
- raise ValueError(
- 'a collection of 0 or 1 sets is trivially pairwise disjoint.')
- combined = set()
- sum_len_set = 0
- for set_ in sets:
- combined.update(set_)
- sum_len_set += len(set_)
- assert len(combined) <= sum_len_set
- return len(combined) == sum_len_set
-
-
-MigrateDutCommandStatus = collections.namedtuple('MigrateDutCommandStatus', [
- 'success', 'failure', 'needs_add_to_skylab', 'needs_drone', 'needs_rename'
-])
-
-AddToSkylabInventoryAndDroneStatus = collections.namedtuple(
- 'AddToSkylabInventoryAndDroneStatus',
- ['complete', 'without_drone', 'not_started'])
-
-RenameCommandStatus = collections.namedtuple('RenameCommandStatus',
- ['renamed', 'not_renamed'])
-
-LockCommandStatus = collections.namedtuple('LockCommandStatus',
- ['locked', 'not_locked', 'tries'])
-
-MigrationPlan = collections.namedtuple('MigrationPlan', ['transfer', 'retain'])
-
-
-class MigrationException(Exception):
- """Raised when migration fails"""
- pass
-
-
-def stderr_log(*args, **kwargs):
- return print(*args, file=sys.stderr, **kwargs)
-
-
-def _humantime():
- return tuple(datetime.datetime.now().timetuple())[:6]
-
-
-def _migration_json_summary(failed_step=_FAILED_STEP_SENTINEL,
- plan=None,
- not_locked=None,
- migrate_status=None,
- unconditionally_migrate_status=None):
- assert isinstance(plan, MigrationPlan)
- assert not isinstance(not_locked, (str, unicode))
- assert isinstance(failed_step, (types.NoneType, unicode))
- assert isinstance(migrate_status, (types.NoneType, MigrateDutCommandStatus))
- assert isinstance(unconditionally_migrate_status, MigrateDutCommandStatus)
-
- def merge_attrs(fieldname, struct1, struct2=None):
- merged = set()
- if struct1:
- merged.update(getattr(struct1, fieldname))
- if struct2:
- merged.update(getattr(struct2, fieldname))
- return sorted(merged)
-
-
- out = {
- 'locked_success': (failed_step is None),
- 'failed_step': failed_step,
- 'plan': {
- 'transfer': merge_attrs('transfer', plan),
- 'retain': merge_attrs('retain', plan),
- },
- 'duts': {
- 'migrated':
- merge_attrs('success', migrate_status, unconditionally_migrate_status),
- 'not_locked':
- list(sorted(set(not_locked))),
- 'needs_add_to_skylab':
- merge_attrs('needs_add_to_skylab', migrate_status, unconditionally_migrate_status),
- 'needs_drone':
- merge_attrs('needs_drone', migrate_status, unconditionally_migrate_status),
- 'needs_rename':
- merge_attrs('needs_rename', migrate_status, unconditionally_migrate_status),
- }
- }
- return out
-
-
-class AtestCmd(object):
- """Helper functions for executing 'atest' commands"""
-
- @staticmethod
- def brief_info_cmd():
- """Command line for getting per-host info.
-
- @return : list of strings to be executed as external command
- """
- return [_ATEST_EXE, 'host', 'list', '--parse', '-M', _TEMPPATH]
-
- @staticmethod
- def brief_info(hostnames=None):
- """Run brief info command.
-
- @return : iterator of dictionaries describing each hostname
- """
- hostnames = hostnames or set()
- items = call_with_tempfile(AtestCmd.brief_info_cmd(), hostnames).output
- for item in AtestCmd.brief_info_filter(items):
- yield item
-
- @staticmethod
- def brief_info_filter(stream):
- """Filter lines of output from 'atest host list...'.
-
- @return : iterator of fields
- """
- for line in stream:
- line = line.rstrip()
- if line:
- fields = line.split('|')
- # if the line of output has exactly zero or one
- # |-delimited sections, then it is not a description
- # of a DUT. Silently discard such lines.
- if len(fields) in (0, 1):
- continue
- # trim labels entry if it exists
- if fields[-1].startswith('Labels='):
- fields.pop()
- d = {}
- for f in fields:
- k, _, v = f.partition('=')
- # if the value associated with a key is a Python literal
- # such as True, False, or None, replace it with the
- # corresponding Python value.
- # otherwise, use the original string.
- d[k] = _LITERAL_MAP.get(v, v)
- yield d
-
- @staticmethod
- def rename_cmd(for_migration=True):
- """Generate command line arguments for 'rename'.
-
- @return : command line arguments
- """
- name_flag = '--for-migration' if for_migration else '--for-rollback'
- return [
- _ATEST_EXE, 'host', 'rename', '--non-interactive', name_flag,
- '--parse', '-M', _TEMPPATH
- ]
-
- @staticmethod
- def rename(hostnames=None, for_migration=True):
- """Rename a list of hosts.
-
- @return : iterator of successfully renamed hosts
- """
- hostnames = hostnames or set()
-
- to_migrate_hostnames = set()
- already_migrated_hostnames = set()
-
- for hostname in hostnames:
- if hostname.endswith("-migrated-do-not-use"):
- already_migrated_hostnames.add(hostname)
- else:
- to_migrate_hostnames.add(hostname)
-
- stderr_log('begin rename', time.time(), _humantime())
- items = call_with_tempfile(
- AtestCmd.rename_cmd(for_migration=for_migration),
- lines=to_migrate_hostnames).output
-
- out = list(AtestCmd.rename_filter(items))
- out_seen = set(out)
-
- # out and already_migrated_hostnames should be disjoint
- # but if they aren't we still don't want to list the same
- # hostname twice
- for hostname in already_migrated_hostnames:
- if hostname not in out_seen:
- out.append(hostname)
-
- stderr_log('end rename', time.time(), _humantime())
- return out
-
- @staticmethod
- def rename_filter(stream):
- """Process each item of output from `atest host rename...`.
-
- @return : iterator of successfully renamed hosts
- """
- for item in stream:
- row = [x.strip() for x in item.strip().split()]
- if len(row) == 3:
- src, sep, dest = row
- # dest has the 'migrated-do-not-use' suffix
- # use src!
- if sep != 'to':
- continue
- yield src
-
- @staticmethod
- def statjson_cmd(hostname=None):
- """Command line for generating json for hostname.
-
- @return : command line
- """
- return [_ATEST_EXE, 'host', 'statjson', '--', hostname]
-
- @staticmethod
- def statjson(hostname=None):
- """Run the command for getting the host json.
-
- @return : 'atest host statjson' output as parsed json.
- """
- cmd = AtestCmd.statjson_cmd(hostname=hostname)
- (out, err, exit_status) = capture_all(cmd)
- if exit_status == 0:
- try:
- return json.loads(out.decode('utf-8'))
- except ValueError:
- sys.stderr.write(out)
- sys.stderr.write("\n\n")
- return None
- else:
- if exit_status:
- if "Failed to stat:" in err:
- assert "Unknown host" in err
- return None
- else:
- assert "unexpected failure"
-
- @staticmethod
- def atest_lock_cmd(reason=None):
- """Generate command for 'atest host mod --lock'.
-
- @return : command line
- """
- return [
- _ATEST_EXE, 'host', 'mod', '--lock', '-r', reason, '-M', _TEMPPATH
- ]
-
- @staticmethod
- def atest_lock(reason=None, hostnames=None):
- """Try to lock hostnames via 'atest host mod --lock'.
-
- @return : Nothing
- """
- hostnames = hostnames or set()
- assert isinstance(reason, unicode)
- cmd = AtestCmd.atest_lock_cmd(reason=reason)
- # NOTE: attempting to lock a host can fail because the host
- # is already locked. Therefore, atest_lock always succeeds
- # regardless of the exit status of the command.
- call_with_tempfile(cmd, hostnames)
-
- @staticmethod
- def atest_lock_filter(stream):
- """Take lines from 'atest host mod --lock' and emit a stream of hostnames.
-
- The first line "Locked hosts:" is removed. We trim the whitespace of the
- other lines.
-
- Input:
- Locked Hosts:
- A
- B
- C
-
- Output:
- A
- B
- C
- """
- for x in stream:
- if x.lower().startswith('locked host'):
- continue
- else:
- yield x.strip()
-
- @staticmethod
- def atest_unlock_cmd():
- """Generate command for 'atest host mod --unlock'."""
- return [_ATEST_EXE, 'host', 'mod', '--unlock', '-M', _TEMPPATH]
-
- @staticmethod
- def atest_unlock(reason=None, hostnames=None):
- """Unlock hostnames via 'atest host mod --unlock'.
-
- @return : iterator of successfully unlocked hosts
- """
- hostnames = hostnames or set()
- cmd = AtestCmd.atest_unlock_cmd()
- items = call_with_tempfile(cmd, hostnames).output
- for item in AtestCmd.atest_unlock_filter(items):
- yield item
-
- @staticmethod
- def atest_unlock_filter(stream):
- """Take lines from 'atest host mod --unlock' and emit a stream of hostnames.
-
- The first line "Unlocked hosts:" is removed. We trim the whitespace of
- the other lines.
-
- Input:
- Unlocked Hosts:
- A
- B
- C
-
- Output:
- A
- B
- C
- """
- for x in stream:
- if x.lower().startswith('unlocked host'):
- continue
- else:
- yield x.strip()
-
- @staticmethod
- def atest_get_migration_plan_cmd(ratio):
- """Generate command for 'atest host get_migration_plan --mlist ...'"""
- return [
- _ATEST_EXE, 'host', 'get_migration_plan', '--ratio',
- unicode(ratio), '--mlist', _TEMPPATH
- ]
-
- @staticmethod
- def atest_get_migration_plan(ratio, hostnames=[]):
- # optimizations in case the ratio is 1 or 0
- hostnames = hostnames or set()
- if ratio == 0:
- return {
- 'transfer': [],
- 'retain': hostnames,
- }
- if ratio == 1:
- return {
- 'transfer': hostnames,
- 'retain': [],
- }
- cmd = AtestCmd.atest_get_migration_plan_cmd(ratio)
- output = call_with_tempfile(cmd, hostnames).output
- out = json.loads(''.join(output))
- return out
-
-
-def trywith(exn_type, f, *args, **kwargs):
- out = None
- exn = None
- try:
- out = f(*args, **kwargs)
- except exn_type as e:
- exn = e
- return (out, exn)
-
-
-
-def backtick(*args, **kwargs):
- output = None
- exit_status = None
- out, exn = trywith(subprocess.CalledProcessError, subprocess.check_output, *args, **kwargs)
- if exn is None:
- output = out
- exit_status = 0
- else:
- output = exn.output
- exit_status = exn.returncode
- return (output, exit_status)
-
-
-def capture_all(*args, **kwargs):
- proc = subprocess.Popen(
- *args,
- stdout=subprocess.PIPE,
- stderr=subprocess.PIPE,
- **kwargs
- )
- out, err = proc.communicate()
- return (out, err, proc.returncode)
-
-
-
-# accepts: iterable of hostnames
-# returns: {
-# good: hostnames in autotest
-# bad: hostnames not in autotest
-# }
-def autotest_status(hostnames):
- os.environ["ATEST"] = _ATEST_EXE
- status, out, err = shell_capture_all_with_tempfile('"${ATEST}" host list --hostnames-only --mlist "$1"', hostnames)
- good = []
- bad = []
- # process the bad lines
- for errline in err:
- # skip preamble
- if errline.startswith("Unknown host"):
- continue
- bad.append(errline.strip())
- # process the good lines
- for goodline in good:
- good.append(good.strip())
- return {
- "good": good,
- "bad": bad
- }
-
-# accepts: iterable of hostnames
-# returns: {
-# good: hostnames in skylab
-# bad: hostnames not in skylab
-# }
-def skylab_status(hostnames):
- os.environ["SKYLAB"] = _SKYLAB_EXE
- good = []
- bad = []
- for hostname in hostnames:
- os.environ["HOSTNAME"] = hostname
- status, out, err = shell_capture_all_no_stdin('"${SKYLAB}" dut-info "${HOSTNAME}"')
- # TODO(gregorynisbet): make error checking more robust here to see why exactly
- # we couldn't get info on the DUT.
- if status == 0:
- good.append(hostname)
- else:
- bad.append(hostname)
- return {
- "good": good,
- "bad": bad,
- }
-
-
-
-# accepts: iterable of hostnames
-# returns: {
-# good: hostnames with no issues
-# not_renamed: hostnames in skylab but not renamed in autotest
-# not_in_skylab: hostnames that are not in skylab
-# not_renamed_not_in_skylab: hostnames that aren't renamed or in skylab
-# }
-def hostname_migrated_status(hostnames):
- migrated_map = {}
- for hostname in hostnames:
- migrated_map[hostname + "-migrated-do-not-use"] = hostname
-
- atest_out = autotest_status(hostnames)
- atest_out_good = set(atest_out["good"])
- atest_out_bad = set(atest_out["bad"])
-
- skylab_out = skylab_status(hostnames)
- skylab_out_good = set(skylab_out["good"])
- skylab_out_bad = set(skylab_out["bad"])
-
- atest_renamed_out = autotest_status(list(migrated_map))
- atest_renamed_out_good = set(atest_renamed_out["good"])
- atest_renamed_out_bad = set(atest_renamed_out["bad"])
-
- good = []
- not_renamed = []
- not_in_skylab = []
- not_renamed_not_in_skylab = []
-
- for hostname in hostnames:
- # hostname flags is a string listing the undesirable properties
- # associated with that particular hostname
- # A -- host has bad autotest status, either not renamed or old hostname present
- # S -- not migrated to skylab
- hostname_flags = set()
- if hostname in atest_out_good:
- hostname_flags.add("A")
- if hostname in atest_renamed_out_bad:
- hostname_flags.add("A")
- if hostname in skylab_out_bad:
- hostname_flags.add("S")
-
- if hostname_flags == set():
- good.append(hostname)
- elif hostname_flags == {"A"}:
- not_renamed.append(hostname)
- elif hostname_flags == {"S"}:
- not_in_skylab.append(hostname)
- elif hostname_flags == {"A", "S"}:
- not_renamed_not_in_skylab.append(hostname)
- else:
- assert False, ("impossible, unexpected set %s" % hostname_flags)
-
- return {
- "good": good,
- "not_renamed": not_renamed,
- "not_in_skylab": not_in_skylab,
- "not_renamed_not_in_skylab": not_renamed_not_in_skylab,
- }
-
-
-class SkylabCmd(object):
- """Helper functions for executing Skylab commands"""
-
- ADD_MANY_DUTS_CMD = (_SKYLAB_EXE, 'quick-add-duts')
-
- @staticmethod
- def add_one_dut_cmd():
- """Create the skylab command line invocation for adding a single DUT."""
- return [
- _SKYLAB_EXE,
- 'add-dut',
- '-skip-image-download',
- '-skip-install-firmware',
- '-skip-install-os',
- '-specs-file',
- _TEMPPATH,
- ]
-
- @staticmethod
- def add_one_dut(add_dut_content):
- """Add one dut to skylab."""
- stderr_log('begin add_one_dut', time.time(), _humantime())
- cmd = SkylabCmd.add_one_dut_cmd()
- out = call_with_tempfile(cmd, [json.dumps(add_dut_content)])
- stderr_log('end add_one_dut', time.time(), _humantime())
- return out
-
- @staticmethod
- def assign_one_dut_cmd(hostname=None):
- """Command line for assigning a single DUT to a randomly chosen drone."""
- # by default, skylab assign-dut will pick a random drone
- return [_SKYLAB_EXE, 'assign-dut', '--', hostname]
-
- @staticmethod
- def add_many_duts(dut_contents):
- stderr_log('begin add_many_duts', time.time(), _humantime())
- for dut_content in dut_contents:
- stderr_log("add many DUTs: ", str(dut_content)[:80] + "...")
- """Add multiple DUTs to skylab at once.
-
- @param dut_contents: a sequence of JSON-like objects describing DUTs as
- used by `skylab add-dut` and `skylab quick-add-dut`
-
- @returns : nothing
- """
- # TODO(gregorynisbet) -- how fine-grained does the error reporting need
- # to be? is it possible for some duts to be
- # successfully migrated and others not?
- # The action performed by `skylab quick-add-duts`
- # is idempotent, so trying multiple times is not
- # necessarily a problem.
- td = tempfile.mkdtemp()
- try:
- paths = []
- for i in range(len(dut_contents)):
- path_ = os.path.join(td, str(i))
- with open(path_, 'w') as fh:
- json.dump(dut_contents[i], fh)
- paths.append(path_)
- cmd = list(SkylabCmd.ADD_MANY_DUTS_CMD) + paths
- print("log command")
- stderr_log(cmd)
- print("capture_all")
- # ignore cases where the hostname doesn't exist
- (out, err, exit_status) = capture_all(cmd)
- if exit_status != 0:
- if "Failed to stat:" in err:
- assert "Unknown host" in err
- # then do nothing
-
- # shutil.rmtree(td, ignore_errors=True)
- finally:
- stderr_log('end add_many_duts', time.time(), _humantime())
-
-
-class Migration(object):
-
- @staticmethod
- def migration_plan(ratio, hostnames=None):
- hostnames = hostnames or set()
- plan = AtestCmd.atest_get_migration_plan(
- ratio=ratio, hostnames=hostnames)
- return MigrationPlan(transfer=plan['transfer'], retain=plan['retain'])
-
- @staticmethod
- def lock(hostnames=None, reason=None, retries=3):
- """Lock a list of hostnames with retries.
- """
- hostnames = hostnames or set()
- assert isinstance(reason, unicode)
- to_lock = set(hostnames)
- for _ in range(retries):
- AtestCmd.atest_lock(hostnames=to_lock.copy(), reason=reason)
-
- @staticmethod
- def ensure_lock(hostnames=None):
- """Without changing the state of a DUT, determine which are locked.
-
- @return : LockCommandStatus
- """
- hostnames = hostnames or set()
- dut_infos = AtestCmd.brief_info(hostnames=hostnames)
- all_hosts = set(hostnames)
- confirmed_locked = set()
- for dut_info in dut_infos:
- locked = dut_info['Locked']
- assert locked in (True, False)
- if locked:
- confirmed_locked.add(dut_info['Host'])
- return LockCommandStatus(
- locked=confirmed_locked,
- not_locked=(all_hosts - confirmed_locked),
- tries=None,
- )
-
- @staticmethod
- def rename(hostnames=None, for_migration=True, retries=1):
- """Rename a list of hosts with retry.
-
- @return : {"renamed": renamed hosts, "not-renamed": not renamed
- hosts}
- """
- hostnames = hostnames or set()
- all_hosts = set(hostnames)
- needs_rename = all_hosts.copy()
- for _ in range(retries):
- for successfully_renamed in AtestCmd.rename(
- hostnames=needs_rename.copy(), for_migration=for_migration):
- needs_rename.discard(successfully_renamed)
- out = RenameCommandStatus(
- renamed=(all_hosts - needs_rename),
- not_renamed=needs_rename,
- )
- return out
-
- @staticmethod
- def add_to_skylab_inventory_and_drone(use_quick_add, hostnames=None, rename_retries=3):
- """@returns : AddToSkylabInventoryAndDroneStatus"""
- hostnames = hostnames or set()
- assert not isinstance(hostnames, (unicode, bytes))
- stderr_log('begin add hostnames to inventory', time.time(),
- _humantime())
- all_hosts = set(hostnames)
- moved = set()
- with_drone = set()
-
- if use_quick_add:
- stderr_log("quick add path", time.time(), _humantime())
- dut_contents = []
- good_hostnames = []
- for hostname in hostnames:
- out_json = dut_contents.append(AtestCmd.statjson(hostname=hostname))
- if out_json is None:
- pass
- else:
- good_hostnames.append(out_json)
- # SkylabCmd.add_many_duts does not check for whether the action was successful
- # we use hostname_migrated_status to check whether the duts we were supposed to migrate
- # were actually migrated
- SkylabCmd.add_many_duts(dut_contents=dut_contents)
-
- # strip the migrated suffix when checking the status of each of the hostnames
- truncated_hostnames = [strip_suffix(hostname, "-migrated-do-not-use") for hostname in hostnames]
- status_out = hostname_migrated_status(truncated_hostnames)
-
- # anything in the good state or missing rename is fine
- complete = status_out["good"] + status_out["not_renamed"]
- # anything where the status indicates that the entity is not in skylab yet is not fine
- not_started = status_out["not_in_skylab"] + status_out["not_renamed_not_in_skylab"]
-
- return AddToSkylabInventoryAndDroneStatus(
- complete=complete,
- without_drone=set(),
- not_started=not_started,
- )
-
- else:
- stderr_log("slow add path", time.time(), _humantime())
- for hostname in hostnames:
- if hostname not in moved:
- skylab_dut_descr = AtestCmd.statjson(hostname=hostname)
- stderr_log("processing hostname", hostname)
- status = SkylabCmd.add_one_dut(add_dut_content=skylab_dut_descr)
- if status.exit_code != 0:
- continue
- moved.add(hostname)
- with_drone.add(hostname)
-
- out = AddToSkylabInventoryAndDroneStatus(
- complete=with_drone,
- without_drone=(moved - with_drone),
- not_started=((all_hosts - moved) - with_drone),
- )
- stderr_log('end add hostnames to inventory', time.time(), _humantime())
- return out
-
- @staticmethod
- def migrate_known_good_duts_until_max_duration_sync(use_quick_add,
- hostnames=None,
- max_duration=60 * 60,
- min_ready_intervals=10,
- interval_len=0):
- """Take a list of DUTs and attempt to migrate them when they aren't busy.
-
- @param hostnames : list of hostnames
- @param max_duration : when to stop trying to safely migrate duts
- @param min_ready_intervals : the minimum number of intervals that a DUT
- must have a good status
- @param interval_len : the length in seconds of interval
- @param use_quick_add : whether to use skylab quick-add-duts.
-
- @returns : {"success": successfuly migrated DUTS, "failure":
- non-migrated DUTS}
- """
- hostnames = hostnames or set()
- assert interval_len is not None
- stderr_log('begin migrating only ready DUTs', time.time(), _humantime())
- start = time.time()
- stop = start + max_duration
- good_intervals = collections.Counter()
- need_to_move = set(hostnames)
- successfully_moved = set()
- needs_add_to_skylab = set()
- needs_drone = set()
- needs_rename = set()
- while time.time() < stop:
- if not need_to_move:
- break
- ready_to_move = set()
- # determine which duts have been in a good state for min_ready_intervals
- for record in AtestCmd.brief_info(hostnames=need_to_move.copy()):
- hostname = record['Host']
- if record['Status'] not in {'Running', 'Provisioning'}:
- good_intervals[hostname] += 1
- else:
- del good_intervals[hostname]
- if good_intervals[hostname] >= min_ready_intervals:
- ready_to_move.add(hostname)
- need_to_move.discard(hostname)
- # move the ready to move duts now
- # any dut that is declared ready to move at this point will definitely
- # reach a terminal state
- skylab_summary = Migration.add_to_skylab_inventory_and_drone(
- hostnames=ready_to_move, use_quick_add=use_quick_add)
- needs_add_to_skylab.update(skylab_summary.not_started)
- needs_drone.update(skylab_summary.without_drone)
- # rename the autotest entry all at once
- rename_summary = Migration.rename(
- hostnames=skylab_summary.complete, for_migration=True)
- needs_rename.update(rename_summary.not_renamed)
- successfully_moved.update(rename_summary.renamed)
- time.sleep(interval_len)
- out = MigrateDutCommandStatus(
- success=successfully_moved,
- failure=(need_to_move | needs_add_to_skylab | needs_drone
- | needs_rename),
- needs_add_to_skylab=needs_add_to_skylab,
- needs_drone=needs_drone,
- needs_rename=needs_rename,
- )
- stderr_log('end migrating only ready DUTs', time.time(), _humantime())
- return out
-
- @staticmethod
- def migrate_duts_unconditionally(hostnames, use_quick_add):
- """regardless of the DUTs' status, forcibly migrate all the DUTs to skylab.
-
- @returns: MigrateDutCommandStatus
- """
- hostnames = hostnames or set()
- assert not isinstance(hostnames, (unicode, bytes))
- stderr_log('begin unconditional migration', time.time(), _humantime())
- successfully_moved = set()
- needs_add_to_skylab = set()
- needs_drone = set()
- needs_rename = set()
- skylab_summary = Migration.add_to_skylab_inventory_and_drone(
- hostnames=hostnames, use_quick_add=use_quick_add)
- needs_add_to_skylab.update(skylab_summary.not_started)
- needs_drone.update(skylab_summary.without_drone)
- rename_summary = Migration.rename(
- hostnames=skylab_summary.complete, for_migration=True)
- successfully_moved.update(rename_summary.renamed)
- needs_rename.update(rename_summary.not_renamed)
- needs_rename.discard(rename_summary.not_renamed)
- out = MigrateDutCommandStatus(
- success=successfully_moved,
- failure=(needs_drone | needs_rename | needs_add_to_skylab),
- needs_add_to_skylab=needs_add_to_skylab,
- needs_drone=needs_drone,
- needs_rename=needs_rename,
- )
- stderr_log('end unconditional migration', time.time(), _humantime())
- return out
-
- @staticmethod
- def migrate(hostnames=None,
- ratio=1,
- reason=None,
- max_duration=None,
- interval_len=None,
- min_ready_intervals=10,
- immediately=None,
- use_quick_add=False):
- """Migrate duts from autotest to skylab.
-
- @param ratio : ratio of DUTs in hostnames to migrate.
- @param hostnames : hostnames to migrate
- @param reason : the reason to give for providing the migration
- @param interval_len : length of time between checks for DUT readiness
- @param max_duration : the grace period to allow DUTs to finish their
- tasks
- @param min_ready_intervals : minimum number of intervals before a device
- is healthy
-
- @return : nothing
- """
- hostnames = hostnames or set()
- assert isinstance(reason, (unicode, bytes))
- assert interval_len is not None
- assert max_duration is not None
- assert immediately is not None
- reason = reason if isinstance(reason,
- unicode) else reason.decode('utf-8')
- # log the parameters of the migration
- stderr_log('begin migrate', time.time(), _humantime())
- stderr_log('number of hostnames', len(hostnames), time.time(), _humantime())
- stderr_log('ratio', ratio, time.time(), _humantime())
- stderr_log('max_duration', max_duration, time.time(), _humantime())
- stderr_log('atest', _ATEST_EXE, time.time(), _humantime())
- stderr_log('skylab', _SKYLAB_EXE, time.time(), _humantime())
- stderr_log('minimum number of intervals', min_ready_intervals, time.time(), _humantime())
- stderr_log('immediately', immediately, time.time(), _humantime())
- stderr_log('use_quick_add', use_quick_add, time.time(), _humantime())
-
- all_hosts = tuple(hostnames)
- plan = Migration.migration_plan(ratio=ratio, hostnames=all_hosts)
- Migration.lock(hostnames=plan.transfer, reason=reason)
- failed_step = _FAILED_STEP_SENTINEL
- ensure_lock_status = Migration.ensure_lock(hostnames=plan.transfer)
- if ensure_lock_status.not_locked:
- failed_step = 'lock'
- to_migrate = plan.transfer
- migrate_status = None
- if not immediately:
- migrate_status = \
- Migration.migrate_known_good_duts_until_max_duration_sync(
- hostnames=to_migrate,
- max_duration=max_duration,
- min_ready_intervals=min_ready_intervals,
- interval_len=interval_len,
- use_quick_add=use_quick_add)
- to_migrate = migrate_status.failure
- unconditionally_migrate_status = Migration.migrate_duts_unconditionally(
- use_quick_add=use_quick_add,
- hostnames=to_migrate,
- )
- failed_step = None
- out = _migration_json_summary(
- failed_step=failed_step,
- plan=plan,
- not_locked=ensure_lock_status.not_locked,
- migrate_status=migrate_status,
- unconditionally_migrate_status=unconditionally_migrate_status,
- )
- stderr_log('end migrate', time.time(), _humantime())
- return out
-
-
-
-# accepts: iterable
-# returns: item or None, ok (true if item is real, false otherwise)
-def next_safe(it):
- it = iter(it)
- try:
- return next(it), True
- except StopIteration:
- return None, False
-
-
-# accepts: n (stride length), it (iterable)
-# returns: iterator of arrays of n items each
-def natatime(n, it):
- it = iter(it)
- while True:
- out = []
- for i in range(n):
- item, ok = next_safe(it)
- if ok:
- out.append(item)
- else:
- break
- if len(out):
- yield out
- else:
- return
-
-
-
-
-def migrate(batch_size=None, hostnames=None, **kwargs):
- if batch_size is None:
- return Migration.migrate(hostnames=hostnames, **kwargs)
- if batch_size is not None:
- it = natatime(n=batch_size, it=hostnames)
- out = []
- for batch in it:
- res = Migration.migrate(hostnames=hostnames, **kwargs)
- json.dumps(res, sys.stderr, indent=4)
- out.append(res)
- return out
-
-
-def setup(atest_exe=None, skylab_exe=None):
- """Configure the module-scoped path to atest and skylab executables."""
- if atest_exe is not None:
- _ATEST_EXE = atest_exe
- if skylab_exe is not None:
- _SKYLAB_EXE = skylab_exe
diff --git a/cli/skylab_migration2.py b/cli/skylab_migration2.py
deleted file mode 100644
index eb927c6..0000000
--- a/cli/skylab_migration2.py
+++ /dev/null
@@ -1,524 +0,0 @@
-#!/usr/bin/env python2
-
-
-# user facing commands:
-#
-# -- write_statjson_hostnames
-# -- do_quick_add
-
-
-
-# pretty_dump_board:
-
-# Given the name of a board, dump it to a directory with
-# the following structure:
-
-# data.dir
-# - hostname1
-# - hostname2
-# - hostname3
-
-# NEXT, report all the ways in which hostnames of that board are not valid
-# jsonified protobufs for adding a new DUT.
-#
-# this involves shelling out to skylab validate-new-dut-json
-
-
-
-# adding a dut that's already present
-# $ skylab quick-add-dut /tmp/json
-# Deployment ID: d604b46b-87c7-4a4d-8b8a-ed97565c4797
-# Status: DUT_DEPLOYMENT_STATUS_FAILED
-# Inventory change URL:
-# Deploy task URL:
-# Message: failed to add DUT(s) to fleet: add dut to fleet: inventory store commit: nothing to commit
-
-
-# adding a dut successfully
-# $ skylab quick-add-dut /tmp/json
-# Deployment ID: 8e27b3ff-39ad-4b92-9bea-3f804a9d7bf9
-# Status: DUT_DEPLOYMENT_STATUS_FAILED
-# Inventory change URL: https://chrome-internal-review.googlesource.com/c/chromeos/infra_internal/skylab_inventory/+/1940714
-# Deploy task URL:
-# Message: missing deploy task ID in deploy request entry
-
-
-
-# $ atest host rename --for-migration --non-interactive chromeos2-row1-rack7-host1
-# Successfully renamed:
-# chromeos2-row1-rack7-host1 to chromeos2-row1-rack7-host1-migrated-do-not-use
-
-
-# unsuccessful lock
-
-# $ atest host mod --lock -r 'migration to skylab' chromeos2-row1-rack7-host1-migrated-do-not-use
-# Operation modify_host failed:
-# ValidationError: {'locked': u'Host chromeos2-row1-rack7-host1-migrated-do-not-use already locked by pprabhu on 2019-08-29 16:48:51.'}
-# 1
-
-
-# successful lock
-
-
-
-from __future__ import print_function
-from __future__ import unicode_literals
-import os
-import sys
-import subprocess
-import pipes
-import os.path
-import warnings
-import json
-import tempfile
-import shutil
-
-TEXT = (type(b""), type(u""))
-NONETYPE = type(None)
-
-
-def flush_sync(fh):
- fh.flush()
- os.fsync(fh)
- return
-
-
-# accepts: shell command, rest of args
-# returns: exit_status, stdout, stderr
-def shell_capture_all(cmd, *rest):
- shellcmd = ("bash", "-c", cmd, "bash",) + rest
- pr = subprocess.Popen(
- shellcmd,
- stdout=subprocess.PIPE,
- stderr=subprocess.PIPE,
- )
- stdout, stderr = pr.communicate()
- return pr.returncode, stdout, stderr
-
-
-# accepts: name of board
-# returns: list of hostnames, error message (None if no error)
-def get_all_hosts_for_board(board):
- # extract just the hostnames for all the hosts in the board
- cmd = "set -o pipefail; ( atest host list --label=%%%LABEL%%% | awk '{print $1}' )"
- cmd = cmd.replace("%%%LABEL%%%", pipes.quote("board:%s" % board))
- returncode, out, err = shell_capture_all(cmd)
- if returncode != 0:
- return None, "get_all_hosts_for_board: " + err
- hostnames = [x.strip() for x in out.split()]
- if hostnames and hostnames[0] == "Host":
- hostnames.pop(0)
- return hostnames, None
-
-
-# make a directory and test if we can write to it as defensively as possible
-# free vars: %%%DEST%%%
-MAKE_DIR_CMD = r"""
-mkdir -p %%%DEST%%%
-cd %%%DEST%%% && touch ./writeable && rm ./writeable
-"""
-
-
-# free vars: %%%HOSTNAME%%% %%%DEST%%%
-STATJSON_CMD = r"""
-atest host statjson %%%HOSTNAME%%% 1>%%%DEST%%%/%%%HOSTNAME%%%
-"""
-
-# accepts: path to directory
-# returns: error message (None if successfully made dir)
-def mkdirp(dirpath):
- cmd = MAKE_DIR_CMD.replace('%%%DEST%%%', pipes.quote(dirpath))
- returncode, out, err = shell_capture_all(cmd)
- if returncode == 0:
- return None
- else:
- return err
-
-
-# accepts: list of hostnames, output directory (path)
-# returns: successful hosts, failed hosts, error message (None if no error)
-def write_statjson_hostnames(hostnames, outdir):
- successful_hosts = []
- failed_hosts = []
- err = mkdirp(outdir)
- if err is not None:
- return None, None, err
- for hostname in hostnames:
- cmd = STATJSON_CMD
- cmd = cmd.replace('%%%HOSTNAME%%%', pipes.quote(hostname))
- cmd = cmd.replace('%%%DEST%%%', pipes.quote(outdir))
- returncode, out, err = shell_capture_all(cmd)
- if returncode == 0:
- successful_hosts.append(hostname)
- else:
- failed_hosts.append(hostname)
- return successful_hosts, failed_hosts, None
-
-
-# accepts: board name, output directory
-# returns: successful hosts, failed hosts, error message
-def write_statjson_board(board, outdir):
- hostnames, err = get_all_hosts_for_board(board)
- if err is not None:
- return None, None, err
- successful_hosts, failed_hosts, err = write_statjson_hostnames(hostnames, outdir)
- return successful_hosts, failed_hosts, err
-
-
-# free vars: %%%PATH%%%
-VALIDATE_CMD = r"""
-skylab validate-new-dut-json %%%PATH%%%
-"""
-
-
-# accepts: directory with hostname files
-# returns: dictionary of the form below, error message (or None if no error)
-#
-# {
-# hostname: error message (or None if no error)
-# }
-def validate_output(hostname_dir):
- try:
- paths = os.listdir(hostname_dir)
- except OSError:
- return None, ("bad directory: %s" % hostname_dir)
-
- # defensively populate result dictionary with errors
- # so that we don't erroneously conclude that an unvisited
- # hostname was validated
- result = {}
- for path in paths:
- result[path] = "DID NOT PROCESS"
-
- for path in paths:
- cmd = VALIDATE_CMD
- cmd = cmd.replace('%%%PATH%%%', pipes.quote(os.path.join(hostname_dir, path)))
- returncode, out, err = shell_capture_all(cmd)
- if returncode != 0:
- result[path] = "failed to validate (errcode %s): %s" % (returncode, err)
- elif os.path.exists(os.path.join(hostname_dir, path)):
- result[path] = None
- else:
- warnings.warn("nonexistent path %s" % path)
- result[path] = "file does not exist"
-
- return result, None
-
-
-
-# accepts: name of board, output directory
-# returns: dictionary of form below, error message (or None if no error)
-#
-# {
-# hostname: error-message or NOFILE if no file or None if no error
-# }
-def process_board(board, output_dir):
- queried_hosts, unqueried_hosts, err = write_statjson_board(board, output_dir)
- if err is not None:
- return None, err
- validate_result, err = validate_output(output_dir)
- if err is not None:
- return None, err
- result = {}
-
- for hostname in queried_hosts:
- try:
- result[hostname] = validate_result[hostname]
- except KeyError:
- warnings.warn("hostname (%s) not present in validate_result")
-
- for hostname in unqueried_hosts:
- result["hostname"] = "NOFILE"
-
- return result, None
-
-
-# accepts: name of board, output directory
-# returns: number of bad hosts
-# emits: prints error message for every bad host
-def pretty_process_board(board, output_dir):
- result, err = process_board(board, output_dir)
- if err is not None:
- print(err)
- return 1
- bad_results = {}
- for k in result:
- if result[k] is not None:
- bad_results[k] = result[k]
- # no bad results --> nothing printed
- for k in bad_result:
- print(k, result[k])
- return len(bad_results)
-
-
-# accepts: path to output directory
-# returns: combined json object, error message (None if no error)
-# NOTE: the directory not existing is a fatal error
-# processing a file that is invalid json AFTER the output directory
-# has been validated produces a warning. the invalid json situation
-# should be impossible, but also isn't enough to prevent assemble_output_dir
-# from doing something reasonable.
-def assemble_output_dir(output_dir):
- _, err = validate_output(output_dir)
- if err is not None:
- return None, err
- out = []
- items = None
- try:
- items = os.listdir(output_dir)
- except OSError:
- return None, ("directory %s does not exist or is not readable" % output_dir)
- for item in items:
- obj = None
- try:
- with open(os.path.join(output_dir, item), "r") as fh:
- try:
- obj = json.load(fh)
- except ValueError:
- warnings.warn("file %s does not contain valid JSON" % item)
- continue
- except IOError:
- warnings.warn("file %s somehow doesn't exist" % item)
- continue
- out.append(obj)
- return out, None
-
-
-# accepts: single json dictionary
-# returns: error message (None if valid)
-def validate_single_dut_json(obj):
- with tempfile.NamedTemporaryFile(delete=True) as fh:
- json.dump(obj, fh)
- flush_sync(fh)
- cmd = VALIDATE_CMD
- cmd = cmd.replace('%%%PATH%%%', pipes.quote(fh.name))
- returncode, out, err = shell_capture_all(cmd)
- if returncode == 0:
- return None
- else:
- return err
-
-
-# accepts: json obj
-# returns: hostname, error message (None if no error)
-def get_hostname_from_dut_json(obj):
- try:
- common = obj["common"]
- except KeyError:
- return None, "dut has no common element"
- if not isinstance(common, dict):
- return None, ("common block must be dict not %s" % type(common))
- try:
- return common["hostname"], None
- except KeyError:
- return None, "common block has no hostname element"
-
-
-# accepts: path to file
-# returns: hostname mapping, error message
-# hostname mapping has the following form
-#
-# {
-# hostname -> new_dut_info_json
-# }
-#
-# malformed entries are not included in the map
-def load_hostname_map_file(filepath):
- obj = None
- try:
- with open(filepath, "r") as fh:
- try:
- obj = json.load(fh)
- except ValueError:
- return None, ("file does not contain JSON %s" % filepath)
- except IOError:
- return None, ("cannot load hostname map from nonexistent file %s" % filepath)
-
- out = {}
-
- # return a singleton map if the toplevel entry is a dictionary
- if isinstance(obj, dict):
- err = validate_single_dut_json(obj)
- if err is not None:
- return None, err
- hostname, err = get_hostname_from_dut_json(obj)
- if err is not None:
- return None, err
- return {hostname: obj}, None
-
- # validate all elements if the toplevel entry is a list
- elif isinstance(obj, list):
- for subobj in obj:
- err = validate_single_dut_json(subobj)
- if err is not None:
- warnings.warn(err)
- continue
- hostname, err = get_hostname_from_dut_json(subobj)
- if err is not None:
- warnings.warn(err)
- continue
- # warn if we get a duplicate, but don't halt execution
- if hostname in out:
- warnings.warn(("duplicate hostname %s" % hostname))
- out[hostname] = subobj
-
- if len(out) == 0:
- return out, "out cannot be empty"
-
- return out, None
-
-
-# accepts: path to directory
-# returns: hostnaming mapping, error message
-# hostname mapping has the following form
-#
-# {
-# hostname -> new_dut_info_json
-# }
-#
-# malformed entries are not included in the map
-def load_hostname_map(dirpath):
- items = None
- try:
- items = os.listdir(dirpath)
- except OSError:
- return None, ("cannot load from nonexistent directory %s" % dirpath)
- if len(items) == 0:
- return None, ("nothing in directory %s" % dirpath)
- out = {}
- for item in items:
- hostname_map, err = load_hostname_map_file(os.path.join(dirpath, item))
- if err is not None:
- warnings.warn(err)
- continue
-
- for hostname in hostname_map:
- if hostname in out:
- warnings.warn("load_hostname_map: duplicate hostname %s" % hostname)
- continue
- out[hostname] = hostname_map[hostname]
-
- return out, None
-
-
-# free vars: %%%DIR%%%
-QUICK_ADD_DUTS_CMD = r"""
-skylab quick-add-duts %%%DIR%%%/*
-"""
-
-
-# accepts: list of hostnames, hostname_dirpath
-# returns: error message (None if successful)
-# NOTE: the quick-add-duts API is atomic
-# emits: missing hostnames when there are missing hostnames
-def do_quick_add_duts(hostnames, dirpath):
- # validation
- if isinstance(hostnames, TEXT):
- return "hostnames cannot be %s" % type(hostnames)
- # defensively prevent hostnames from being modified
- hostnames = tuple(hostnames)
- for hostname in hostnames:
- if hostname.startswith("."):
- return "hostname cannot start with '.' (%s)" % hostname
- if not hostname:
- return "hostname cannot be falsey (%s)" % hostname
-
-
- hostnames_map, err = load_hostname_map(dirpath)
- if err is not None:
- return err
-
- # check that every hostname is in the map before trying
- missing_hostnames = set([])
- for hostname in hostnames:
- if hostname not in hostnames_map:
- missing_hostnames.add(hostname)
-
- if missing_hostnames:
- for hostname in sorted(missing_hostnames):
- print(("MISSING %s" % hostname))
- return "%s missing hostnames" % len(missing_hostnames)
-
- try:
- # construct temporary directory of dut files.
- tdir = tempfile.mkdtemp()
- for hostname in hostnames:
- newpath = os.path.join(tdir, hostname)
- with open(newpath, "w") as fh:
- json.dump(obj=hostnames_map[hostname], fp=fh)
-
- # paranoia, check number of files.
- num_files = len(os.listdir(tdir))
- if num_files != len(hostnames):
- return "internal error. hostnames: %s, files: %s, tdir: %s" % (len(hostnames), num_files, tdir)
-
- # validate directory contents before proceeding
- _, err = validate_output(tdir)
- if err is not None:
- return err
-
- # execute the command, transfer file contents to skylab
- # NOTE: this step is atomic
- # note... skylab quick-add-duts will claim there's no deploy task ID
- # when it is successful
- # we need to parse the output to tell what happened
- #
- # right now, I use the presence of the magic strings 'nothing to commit'
- # and 'missing deploy task ID in request entry' to diagnose what happened
- # but this is fragile.
- cmd = QUICK_ADD_DUTS_CMD
- cmd = cmd.replace('%%%DIR%%%', pipes.quote(tdir))
- returncode, out, err = shell_capture_all(cmd)
- if " nothing to commit" in out:
- warnings.warn("nothing to commit ... no change made to inventory")
- return None
- elif "missing deploy task ID in deploy request entry" in out:
- print("SUCCESS!")
- print(out)
- return None
- else:
- return ("%s\n%s" % (out, err))
- finally:
- print(("path to tempdir: %s" % tdir))
-
-
-# free vars: %%%MLIST%%%
-ATEST_LOCK_RENAME_CMD = r"""
-
-tdir="$(mktemp -d)"
-test -d "$tdir" || exit 10
-
-# produce the unmangled hostnames
-cat -- %%%MLIST%%% | sed -e 's/-migrated-do-not-use$//' > "$tdir"/unmangled
-cat -- "$tdir"/unmangled | sed -e 's/$/-migrated-do-not-use//' > "$tdir"/mangled
-
-# just attempt to lock, discard results
-# first process unmangled names
-atest host mod --lock -r 'migration to skylab' --mlist "$tdir"/unmangled 2>&1
-# then process mangled names
-atest host mod --lock -r 'migration to skylab' --mlist "$tdir"/mangled 2>&1
-
-# next attempt to rename only the unmangled names
-atest host rename --for-migration --non-interactive --mlist "$tdir"/unmangled 2>&1
-
-"""
-
-
-# accepts: hostnames
-# returns: error message (None if no error)
-# emits: output
-def atest_lock_rename(hostnames):
- if isinstance(hostnames, TEXT):
- return "hostnames cannot be text"
-
- with tempfile.NamedTemporaryFile(delete=True) as fh:
- for hostname in hostnames:
- fh.write(hostname.strip())
- fh.write("\n")
- flush_sync(fh)
- cmd = ATEST_LOCK_RENAME_CMD
- cmd = cmd.replace('%%%MLIST%%%', pipes.quote(fh.name))
- returncode, out, err = shell_capture_all(cmd)
-
- print(out)
- return None
diff --git a/cli/skylab_migration_unittest.py b/cli/skylab_migration_unittest.py
deleted file mode 100644
index d49a827..0000000
--- a/cli/skylab_migration_unittest.py
+++ /dev/null
@@ -1,485 +0,0 @@
-#!/usr/bin/python2
-# pylint: disable-msg=C0111
-#
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file
-"""Test for skylab migration unittest."""
-
-from __future__ import unicode_literals
-from __future__ import print_function
-
-import copy
-import json
-import os.path
-import subprocess
-import tempfile
-import unittest
-import shutil
-
-import mock
-
-import common
-from autotest_lib.cli import skylab_migration
-
-
-class ExecuteWithTempfileUnittest(unittest.TestCase):
-
- def test_call_with_tempfile(self):
- with mock.patch('subprocess.check_output') as check_output:
- check_output.return_value = b'\n'.join([b'x', b'y', b'z'])
- commandOutput = skylab_migration.call_with_tempfile([], [])
- self.assertEqual(commandOutput.output, ['x', 'y', 'z'])
-
- def test_call_with_tempfile_real(self):
- commandOutput = skylab_migration.call_with_tempfile(
- ['/bin/cat', skylab_migration._TEMPPATH], ['a', 'b', 'c'])
- self.assertEqual(commandOutput.output, ['a', 'b', 'c'])
-
-
-class MigrationUnittest(unittest.TestCase):
-
- def setUp(self):
- super(MigrationUnittest, self).setUp()
- self._tempdir = tempfile.mkdtemp()
-
- def do_nothing(*args, **kwargs):
- pass
-
- self.__patches = {
- 'call_with_tempfile':
- mock.patch.object(
- skylab_migration, 'call_with_tempfile', new=None),
- 'popen':
- mock.patch('subprocess.Popen', new=None),
- 'check_call':
- mock.patch.object(subprocess, 'check_call', new=None),
- 'call':
- mock.patch.object(subprocess, 'call', new=None),
- 'check_output':
- mock.patch.object(subprocess, 'check_output', new=None),
- 'mkdtemp':
- mock.patch.object(tempfile, 'mkdtemp', new=None),
- 'mkstemp':
- mock.patch.object(tempfile, 'mkstemp', new=None),
- 'NamedTemporaryFile':
- mock.patch('tempfile.NamedTemporaryFile', new=None),
- 'stderr_log':
- mock.patch.object(
- skylab_migration, 'stderr_log', new=do_nothing)
- }
- for x in self.__patches.values():
- x.start()
-
- def tearDown(self):
- for x in self.__patches.values():
- x.stop()
- shutil.rmtree(self._tempdir, ignore_errors=True)
- super(MigrationUnittest, self).tearDown()
-
- def test_find_atest(self):
- atest_exe = skylab_migration.find_atest_path()
- self.assertTrue(os.path.exists(atest_exe))
-
- def test_brief_info_cmd(self):
- return self.assertEqual(skylab_migration.AtestCmd.brief_info_cmd()[:-1],
- [skylab_migration._ATEST_EXE] +
- 'host list --parse -M'.split())
-
- def test_brief_info(self):
- with mock.patch.object(skylab_migration, 'call_with_tempfile') as call_:
- call_.return_value = skylab_migration.CommandOutput(
- exit_code=0,
- output=[
- 'key1=a|Labels=x', 'key1=b|Labels=y', 'key1=c|Labels=z'
- ])
- items = list(
- skylab_migration.AtestCmd.brief_info(hostnames=['a', 'b', 'c']))
- self.assertEqual(items, [
- {
- 'key1': 'a'
- },
- {
- 'key1': 'b'
- },
- {
- 'key1': 'c'
- },
- ])
-
- def test_rename_cmd_for_migration(self):
- cmd = skylab_migration.AtestCmd.rename_cmd(for_migration=True)
- self.assertEqual(cmd, [
- skylab_migration._ATEST_EXE,
- 'host',
- 'rename',
- '--non-interactive',
- '--for-migration',
- '--parse',
- '-M',
- skylab_migration._TEMPPATH,
- ])
-
- def test_rename_cmd_for_rollback(self):
- cmd = skylab_migration.AtestCmd.rename_cmd(for_migration=False)
- self.assertEqual(cmd, [
- skylab_migration._ATEST_EXE,
- 'host',
- 'rename',
- '--non-interactive',
- '--for-rollback',
- '--parse',
- '-M',
- skylab_migration._TEMPPATH,
- ])
-
- def test_rename_filter(self):
- expected = ['10', '20']
- actual = list(
- skylab_migration.AtestCmd.rename_filter(['10 to 1', '20 to 2']))
- self.assertEqual(expected, actual)
-
- def test_rename(self):
- with mock.patch.object(skylab_migration, 'call_with_tempfile') as call_:
- output = skylab_migration.CommandOutput(
- exit_code=0,
- output=[
- 'a to a.suffix', 'b to b.suffix', 'c to c.suffix',
- 'd to d.suffix'
- ])
- expected = ['a', 'b', 'c', 'd']
- call_.return_value = output
- actual = list(skylab_migration.AtestCmd.rename(hostnames=[]))
- self.assertEqual(expected, actual)
-
- def test_statjson_cmd(self):
- self.assertEqual(
- skylab_migration.AtestCmd.statjson_cmd(hostname='H'),
- [skylab_migration._ATEST_EXE, 'host', 'statjson', '--', 'H'])
-
- def test_atest_lock_cmd(self):
- self.assertEqual(
- skylab_migration.AtestCmd.atest_lock_cmd(reason='R'), [
- skylab_migration._ATEST_EXE, 'host', 'mod', '--lock', '-r', 'R',
- '-M', skylab_migration._TEMPPATH
- ])
-
- def test_atest_lock(self):
- # just check that traversing the body of atest_lock doesn't throw an exception
- with mock.patch.object(skylab_migration, 'call_with_tempfile') as call_:
- call_.return_value = skylab_migration.CommandOutput(
- exit_code=0, output=['a', 'b'])
-
- def test_atest_unlock_cmd(self):
- self.assertEqual(skylab_migration.AtestCmd.atest_unlock_cmd(), [
- skylab_migration._ATEST_EXE, 'host', 'mod', '--unlock', '-M',
- skylab_migration._TEMPPATH
- ])
-
- def test_atest_unlock(self):
- with mock.patch.object(skylab_migration.AtestCmd,
- 'atest_unlock') as atest_unlock:
- atest_unlock.return_value = ['a', 'b']
- expected = ['a', 'b']
- actual = list(
- skylab_migration.AtestCmd.atest_unlock(hostnames=['a', 'b']))
- self.assertEqual(expected, actual)
-
- def test_add_one_dut_cmd(self):
- expected = [
- skylab_migration._SKYLAB_EXE, 'add-dut', '-skip-image-download',
- '-skip-install-firmware', '-skip-install-os', '-specs-file',
- skylab_migration._TEMPPATH
- ]
- actual = skylab_migration.SkylabCmd.add_one_dut_cmd()
- self.assertEqual(expected, actual)
-
- def test_atest_get_migration_plan_cmd(self):
- expected = [
- skylab_migration._ATEST_EXE, 'host', 'get_migration_plan',
- '--ratio', '0.1', '--mlist', skylab_migration._TEMPPATH
- ]
- actual = skylab_migration.AtestCmd.atest_get_migration_plan_cmd(
- ratio=0.1)
- self.assertEqual(expected, actual)
-
- def test_atest_get_migration_plan(self):
- with mock.patch.object(skylab_migration,
- 'call_with_tempfile') as call_with_tempfile:
- call_with_tempfile.return_value = skylab_migration.CommandOutput(
- exit_code=0,
- output=[json.dumps({
- 'transfer': [],
- 'retain': []
- })])
- out = skylab_migration.AtestCmd.atest_get_migration_plan(
- ratio=0.4, hostnames=[])
- self.assertEqual(out['transfer'], [])
- self.assertEqual(out['retain'], [])
-
- def test_lock_smoke_test(self):
- # just make sure Migration.lock doesn't throw an exception
- with mock.patch.object(skylab_migration, 'call_with_tempfile') as call_:
- call_.return_value = skylab_migration.CommandOutput(
- exit_code=0, output=['a', 'b'])
- skylab_migration.Migration.lock(
- hostnames=[], reason='reason', retries=3)
-
- def test_lock_single_host(self):
- pass
- # def atest_lock(hostnames=[], **kwargs):
- # """successfully lock every hostname"""
- # for item in hostnames:
- # yield item
-
- # with mock.patch.object(skylab_migration, 'AtestCmd') as atest_cmd:
- # atest_cmd.atest_lock = atest_lock
- # summary = skylab_migration.Migration.lock(
- # hostnames=['HHH'], reason=None, retries=1)
- # self.assertEqual(summary.locked, {'HHH'})
- # self.assertEqual(summary.not_locked, set())
- # self.assertEqual(list(summary.tries), ['HHH'])
-
- def test_lock_one_good_one_bad(self):
- # TODO(gregorynisbet): effectively just a smoke test
- def atest_lock(hostnames=[], **kwargs):
- return Nothing
-
- with mock.patch.object(skylab_migration, 'AtestCmd') as atest_cmd:
- atest_cmd.atest_lock = atest_lock
-
- def test_ensure_lock_smoke_test(self):
-
- def brief_info(hostnames=[], **kwargs):
- if False:
- yield 42
-
- with mock.patch.object(skylab_migration, 'AtestCmd') as atest_cmd:
- atest_cmd.brief_info = brief_info
- summary = skylab_migration.Migration.ensure_lock(hostnames=[])
- self.assertEqual(summary.locked, set())
- self.assertEqual(summary.not_locked, set())
-
- def test_ensure_lock_one_good_one_bad(self):
-
- def brief_info(**kwargs):
- yield {'Host': 'a', 'Locked': True}
- yield {'Host': 'b', 'Locked': False}
-
- with mock.patch.object(skylab_migration, 'AtestCmd') as atest_cmd:
- atest_cmd.brief_info = brief_info
- summary = skylab_migration.Migration.ensure_lock(
- hostnames=['a', 'b'])
- self.assertEqual(summary.locked, {'a'})
- self.assertEqual(summary.not_locked, {'b'})
-
- def test_rename_smoke_test(self):
-
- def atest_cmd_rename(**kwargs):
- if False:
- yield 42
-
- with mock.patch.object(skylab_migration, 'AtestCmd') as atest_cmd:
- atest_cmd.rename = atest_cmd_rename
- summary = skylab_migration.Migration.rename(hostnames=[])
- self.assertEqual(summary.renamed, set())
- self.assertEqual(summary.not_renamed, set())
-
- def test_rename_one_good_one_bad(self):
-
- def atest_cmd_rename(**kwargs):
- yield 'GOOD'
-
- with mock.patch.object(skylab_migration, 'AtestCmd') as atest_cmd:
- atest_cmd.rename = atest_cmd_rename
- summary = skylab_migration.Migration.rename(
- hostnames=['GOOD', 'BAD'])
- self.assertEqual(summary.renamed, set(['GOOD']))
- self.assertEqual(summary.not_renamed, set(['BAD']))
-
- def test_add_to_skylab_inventory_and_drone_smoke_test(self):
- summary = skylab_migration.Migration.add_to_skylab_inventory_and_drone(
- hostnames=[],
- use_quick_add=False)
- self.assertEqual(summary.complete, set())
- self.assertEqual(summary.without_drone, set())
- self.assertEqual(summary.not_started, set())
-
- def test_add_to_skylab_inventory_and_drone_one_of_each(self):
-
- @staticmethod
- def atest_statjson(hostname=None):
- return hostname
-
- @staticmethod
- def add_one_dut(add_dut_content=None):
- if add_dut_content in ('GOOD', 'MEDIUM'):
- return skylab_migration.CommandOutput(output=[], exit_code=0)
- else:
- return skylab_migration.CommandOutput(output=[], exit_code=1)
-
- @staticmethod
- def assign_one_dut(hostname=None):
- if hostname == 'GOOD':
- return skylab_migration.CommandOutput(output=[], exit_code=0)
- else:
- return skylab_migration.CommandOutput(output=[], exit_code=1)
-
- with mock.patch.object(
- skylab_migration.AtestCmd, 'statjson', new=atest_statjson):
- with mock.patch.object(
- skylab_migration.SkylabCmd, 'add_one_dut', new=add_one_dut):
- summary = skylab_migration.Migration.add_to_skylab_inventory_and_drone(
- use_quick_add=False,
- hostnames=['GOOD', 'MEDIUM', 'BAD'])
- self.assertEqual(summary.complete, {'GOOD', 'MEDIUM'})
- self.assertEqual(summary.not_started, {'BAD'})
-
- def test_migrate_known_good_duts_until_max_duration_sync_smoke_test(self):
-
- def brief_info(**kwargs):
- if False:
- yield 42
-
- def rename(**kwargs):
- if False:
- yield 42
-
- with mock.patch.object(skylab_migration, 'AtestCmd') as atest_cmd:
- atest_cmd.brief_info = brief_info
- atest_cmd.rename = rename
- summary = skylab_migration.Migration.migrate_known_good_duts_until_max_duration_sync(
- use_quick_add=False,
- hostnames=[])
- self.assertEqual(summary.success, set())
- self.assertEqual(summary.failure, set())
-
- def test_migrate_known_good_duts_until_max_duration_one_good_one_bad(self):
-
- def brief_info(**kwargs):
- return [
- {
- 'Host': 'GOOD',
- 'Status': 'Ready'
- },
- {
- 'Host': 'BAD',
- 'Status': 'Ready'
- },
- ]
-
- inventory_return = skylab_migration.AddToSkylabInventoryAndDroneStatus(
- complete=['GOOD', 'BAD'],
- not_started=[],
- without_drone=[],
- )
-
- def atest_cmd_rename(hostname=None, **kwargs):
- yield 'GOOD'
-
- with mock.patch.object(skylab_migration, 'AtestCmd') as atest_cmd:
- atest_cmd.brief_info = brief_info
- atest_cmd.rename = atest_cmd_rename
- with mock.patch.object(skylab_migration, 'SkylabCmd') as skylab_cmd:
- with mock.patch.object(skylab_migration.Migration,
- 'add_to_skylab_inventory_and_drone'
- ) as add_to_skylab_obj:
- add_to_skylab_obj.return_value = inventory_return
- summary = skylab_migration.Migration.migrate_known_good_duts_until_max_duration_sync(
- use_quick_add=False,
- hostnames=['GOOD', 'BAD'])
- self.assertEqual(summary.success, set(['GOOD']))
- self.assertEqual(summary.failure, set(['BAD']))
- self.assertEqual(summary.needs_add_to_skylab, set())
- self.assertEqual(summary.needs_drone, set())
- self.assertEqual(summary.needs_rename, set(['BAD']))
- add_to_skylab_obj.assert_called()
-
- def test_migrate_duts_unconditionally_smoke_test(self):
-
- def brief_info(**kwargs):
- if False:
- yield 42
-
- def rename(**kwargs):
- if False:
- yield 42
-
- with mock.patch.object(skylab_migration, 'AtestCmd') as atest_cmd:
- atest_cmd.brief_info = brief_info
- atest_cmd.rename = rename
- summary = skylab_migration.Migration.migrate_duts_unconditionally(
- use_quick_add=False,
- hostnames=[])
- self.assertEqual(summary.success, set())
- self.assertEqual(summary.failure, set())
-
- def test_migrate_duts_unconditionally_one_good_one_bad(self):
-
- def brief_info(**kwargs):
- return [
- {
- 'Host': 'GOOD',
- 'Status': 'Ready'
- },
- {
- 'Host': 'BAD',
- 'Status': 'Ready'
- },
- ]
-
- inventory_retval = skylab_migration.AddToSkylabInventoryAndDroneStatus(
- complete=['GOOD', 'BAD'],
- not_started=[],
- without_drone=[],
- )
-
- def atest_cmd_rename(hostname=None, **kwargs):
- yield 'GOOD'
-
- with mock.patch.object(skylab_migration, 'AtestCmd') as atest_cmd:
- atest_cmd.brief_info = brief_info
- atest_cmd.rename = atest_cmd_rename
- with mock.patch.object(skylab_migration, 'SkylabCmd') as skylab_cmd:
- with mock.patch.object(skylab_migration.Migration,
- 'add_to_skylab_inventory_and_drone'
- ) as add_to_skylab_obj:
- add_to_skylab_obj.return_value = inventory_retval
- summary = skylab_migration.Migration.migrate_duts_unconditionally(
- use_quick_add=False,
- hostnames=['GOOD', 'BAD'])
- self.assertEqual(summary.success, set(['GOOD']))
- self.assertEqual(summary.failure, set(['BAD']))
-
- @mock.patch.object(skylab_migration.Migration, 'migration_plan')
- @mock.patch.object(skylab_migration.Migration,
- 'migrate_known_good_duts_until_max_duration_sync')
- @mock.patch.object(skylab_migration.Migration,
- 'migrate_duts_unconditionally')
- @mock.patch.object(skylab_migration.Migration, 'ensure_lock')
- @mock.patch.object(skylab_migration.Migration, 'lock')
- def test_migrate_smoke_test(self, lock, ensure_lock,
- migrate_duts_unconditionally, known_good,
- migration_plan):
- lock.return_value = skylab_migration.LockCommandStatus(
- locked=[], not_locked=[], tries=None)
- ensure_lock.return_value = skylab_migration.LockCommandStatus(
- locked=[], not_locked=[], tries=None)
- known_good.return_value = migrate_duts_unconditionally.return_value = skylab_migration.MigrateDutCommandStatus(
- success=[],
- failure=[],
- needs_add_to_skylab=[],
- needs_drone=[],
- needs_rename=[])
- migration_plan.return_value = skylab_migration.MigrationPlan(
- transfer=[], retain=[])
- skylab_migration.Migration.migrate(
- hostnames=[],
- reason='test',
- interval_len=0,
- max_duration=10,
- immediately=True)
-
-
-if __name__ == '__main__':
- unittest.main()
diff --git a/cli/skylab_rollback.py b/cli/skylab_rollback.py
deleted file mode 100644
index da6289a..0000000
--- a/cli/skylab_rollback.py
+++ /dev/null
@@ -1,60 +0,0 @@
-import os
-import subprocess
-
-
-BATCH_SIZE = 50
-
-
-# delete all the duts and un-migrate the corresponding entries in
-# autotest. We achieve un-migrated by renaming a hostname with the suffix
-# -migrated-do-not-use so it does not have the suffix.
-# Then, for good measure, we unlock everything.
-ROLLBACK_CMD = r"""
-bug="${ROLLBACK_BUG:-b/7}"
-
-skylab remove-duts -delete -bug b/7 "$@"
-
-declare -a mangled
-
-for item in "$@"; do
- mangled+=("$item"-migrated-do-not-use)
-done
-
-atest host rename --for-rollback --non-interactive "${mangled[@]}"
-
-atest host mod --unlock --no-confirmation "$@"
-"""
-
-
-def _batches(xs, batch_size=BATCH_SIZE):
- """yield batches of a given size"""
- out = []
- for x in xs:
- out.append(x)
- if len(out) >= batch_size:
- yield out
- out = []
- if out:
- yield out
-
-
-def rollback(hosts, bug=None, dry_run=False):
- """rollback a collection of hosts"""
- assert isinstance(bug, (int, str, float, long, type(None)))
- old_environ = os.environ.copy()
- out = []
- if bug:
- os.environ["ROLLBACK_BUG"] = str(bug)
- try:
- for slice_ in _batches(hosts):
- cmd = ["bash", "-c", ROLLBACK_CMD, "bash"] + slice_
- if dry_run:
- out.append(cmd)
- else:
- subprocess.call(cmd)
- finally:
- os.environ.clear()
- os.environ.update(old_environ)
- if not dry_run:
- out = None
- return out
diff --git a/cli/skylab_rollback_unittest.py b/cli/skylab_rollback_unittest.py
deleted file mode 100755
index e990a42..0000000
--- a/cli/skylab_rollback_unittest.py
+++ /dev/null
@@ -1,31 +0,0 @@
-#!/usr/bin/python2
-# pylint: disable-msg=C0111
-#
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file
-"""Test for skylab json utils."""
-
-from __future__ import unicode_literals
-
-import unittest
-
-import common
-from autotest_lib.cli import skylab_rollback
-
-
-class skylab_rollback_unittest(unittest.TestCase):
- def test_batches(self):
- xs = list(range(40))
- expected = [list(range(20)), list(range(20, 40))]
- actual = list(skylab_rollback._batches(xs, batch_size=20))
- self.assertEqual(expected, actual)
-
- def test_rollback(self):
- actual = skylab_rollback.rollback(["a", "b"], dry_run=True)
- expected = [["bash", "-c", skylab_rollback.ROLLBACK_CMD, "bash", "a", "b"]]
- self.assertEqual(expected, actual)
-
-
-if __name__ == "__main__":
- unittest.main()
diff --git a/client/common_lib/cros/cr50_utils.py b/client/common_lib/cros/cr50_utils.py
index 1cc95c3..deb90fd 100644
--- a/client/common_lib/cros/cr50_utils.py
+++ b/client/common_lib/cros/cr50_utils.py
@@ -74,6 +74,7 @@
UPDATE_TIMEOUT = 60
UPDATE_OK = 1
+MP_BID_FLAGS = 0x7f80
ERASED_BID_INT = 0xffffffff
ERASED_BID_STR = hex(ERASED_BID_INT)
# With an erased bid, the flags and board id will both be erased
@@ -572,6 +573,31 @@
return board_id, board_id_inv, flags
+def GetChipBIDFromImageBID(image_bid, brand):
+ """Calculate a chip bid that will work with the image bid.
+
+ Returns:
+ A tuple of integers (bid type, ~bid type, bid flags)
+ """
+ image_bid_tuple = GetBoardIdInfoTuple(image_bid)
+ # GetBoardIdInfoTuple returns None if the image isn't board id locked.
+ # Generate a Tuple of all 0s the rest of the function can use.
+ if not image_bid_tuple:
+ image_bid_tuple = (0, 0, 0)
+
+ image_bid, image_mask, image_flags = image_bid_tuple
+ if image_mask:
+ new_brand = GetSymbolicBoardId(image_bid)
+ else:
+ new_brand = brand
+ new_flags = image_flags or MP_BID_FLAGS
+ bid_type = GetIntBoardId(new_brand)
+ # If the board id type is erased, type_inv should also be unset.
+ if bid_type == ERASED_BID_INT:
+ return (ERASED_BID_INT, ERASED_BID_INT, new_flags)
+ return bid_type, 0xffffffff & ~bid_type, new_flags
+
+
def CheckChipBoardId(client, board_id, flags, board_id_inv=None):
"""Compare the given board_id and flags to the running board_id and flags
diff --git a/client/common_lib/cros/enrollment.py b/client/common_lib/cros/enrollment.py
index 0e3719a..18c1df5 100644
--- a/client/common_lib/cros/enrollment.py
+++ b/client/common_lib/cros/enrollment.py
@@ -16,24 +16,6 @@
oobe.ExecuteJavaScript(cmd)
-def SwitchToRemora(browser):
- """Switch to Remora enrollment.
-
- @param browser: telemetry browser object.
- """
- logging.info('Attempting to switch to Meet enrollment')
- try:
- chrome.Chrome.wait_for_browser_restart(
- lambda: _ExecuteOobeCmd(browser,
- 'Oobe.remoraRequisitionForTesting();'),
- browser)
- except utils.TimeoutError:
- logging.warning('Timeout waiting for browser to restart after switching enrollment modes')
- logging.warning('DUT may have started in Meet enrollment -- attempting to continue')
-
- utils.poll_for_condition(lambda: browser.oobe_exists, timeout=30)
-
-
def RemoraEnrollment(browser, user_id, password):
"""Enterprise login for a Remora device.
diff --git a/client/common_lib/hosts/repair.py b/client/common_lib/hosts/repair.py
index 809badd..bcc58fe 100644
--- a/client/common_lib/hosts/repair.py
+++ b/client/common_lib/hosts/repair.py
@@ -359,6 +359,7 @@
self._verify_dependencies(host, silent)
logging.info('Verifying this condition: %s', self.description)
try:
+ logging.debug('Start verify task: %s.', type(self).__name__)
self.verify(host)
self._record_good(host, silent)
except Exception as e:
@@ -366,6 +367,9 @@
self._result = e
self._record_fail(host, silent, e)
raise
+ finally:
+ logging.debug('Finished verify task: %s.', type(self).__name__)
+
self._result = True
def verify(self, host):
@@ -860,6 +864,7 @@
attempted = False
for ra in self._repair_actions:
try:
+ logging.debug('Start repair task: %s.', type(ra).__name__)
ra._repair_host(host, silent)
except Exception as e:
# all logging and exception handling was done at
@@ -867,6 +872,7 @@
pass
finally:
self._send_action_metrics(host, ra)
+ logging.debug('Finished repair task: %s.', type(ra).__name__)
if ra.status not in ('skipped', 'blocked'):
attempted = True
diff --git a/client/cros/audio/audio_helper.py b/client/cros/audio/audio_helper.py
index 5404804..2ea7a7e 100644
--- a/client/cros/audio/audio_helper.py
+++ b/client/cros/audio/audio_helper.py
@@ -431,8 +431,6 @@
cras_utils.set_system_volume(_DEFAULT_PLAYBACK_VOLUME)
cras_utils.set_selected_output_node_volume(_DEFAULT_PLAYBACK_VOLUME)
- cras_utils.set_capture_gain(_DEFAULT_CAPTURE_GAIN)
-
cras_utils.set_system_mute(False)
cras_utils.set_capture_mute(False)
diff --git a/client/cros/audio/cras_configs.py b/client/cros/audio/cras_configs.py
deleted file mode 100644
index 7be0595..0000000
--- a/client/cros/audio/cras_configs.py
+++ /dev/null
@@ -1,22 +0,0 @@
-# Copyright 2016 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""This module provides cras audio configs."""
-
-INTERNAL_MIC_GAIN_100DB = {
- 'chell': 500,
- 'auron_yuna': -1000,
- 'kevin': 0,
-}
-
-def get_proper_internal_mic_gain(board):
- """Return a proper internal mic gain.
-
- @param board: Board name.
-
- @returns: A number in 100 dB. E.g., 1000 is 10dB. This is in the same unit
- as cras_utils set_capture_gain. Returns None if there is no such
- entry.
- """
- return INTERNAL_MIC_GAIN_100DB.get(board, None)
diff --git a/client/cros/audio/cras_utils.py b/client/cros/audio/cras_utils.py
index 5f2df6a..e0c151d 100644
--- a/client/cros/audio/cras_utils.py
+++ b/client/cros/audio/cras_utils.py
@@ -188,15 +188,6 @@
get_cras_control_interface().SetOutputNodeVolume(node_id, volume)
-def set_capture_gain(gain):
- """Set the system capture gain.
-
- @param gain the capture gain in db*100 (100 = 1dB)
-
- """
- get_cras_control_interface().SetInputGain(gain)
-
-
def get_cras_control_interface(private=False):
"""Gets Cras DBus control interface.
diff --git a/client/cros/chameleon/audio_widget.py b/client/cros/chameleon/audio_widget.py
index 0010331..96c0ab3 100644
--- a/client/cros/chameleon/audio_widget.py
+++ b/client/cros/chameleon/audio_widget.py
@@ -12,7 +12,6 @@
from autotest_lib.client.cros.audio import audio_data
from autotest_lib.client.cros.audio import audio_test_data
-from autotest_lib.client.cros.audio import cras_configs
from autotest_lib.client.cros.audio import sox_utils
from autotest_lib.client.cros.chameleon import audio_test_utils
from autotest_lib.client.cros.chameleon import chameleon_audio_ids as ids
@@ -781,52 +780,6 @@
rate=48000)
-class CrosIntMicInputWidgetHandler(CrosInputWidgetHandler):
- """
- This class abstracts a Cros device audio input widget handler on int mic.
-
- """
- def __init__(self, audio_facade, system_facade):
- """Initializes a CrosWidgetHandler.
-
- @param audio_facade: An AudioFacadeRemoteAdapter to access Cros device
- audio functionality.
- @param system_facade: A SystemFacadeRemoteAdapter to access Cros device
- audio functionality.
-
- """
- super(CrosIntMicInputWidgetHandler, self).__init__(audio_facade)
- self._system_facade = system_facade
-
-
- def set_proper_gain(self):
- """Sets a proper gain.
-
- On some boards, the default gain is too high. It relies on automatic
- gain control in application level to adjust the gain. Since there is no
- automatic gain control in the test, we set a proper gain before
- recording.
-
- """
- board = self._system_facade.get_current_board()
- proper_gain = cras_configs.get_proper_internal_mic_gain(board)
-
- if proper_gain is None:
- logging.debug('No proper gain for %s', board)
- return
-
- logging.debug('Set gain to %f dB on internal mic for %s ',
- proper_gain / 100, board)
- self._audio_facade.set_input_gain(proper_gain)
-
-
- def start_recording(self, node_type=None, block_size=None):
- """Starts recording audio with proper gain."""
- self.set_proper_gain()
- super(CrosIntMicInputWidgetHandler, self).start_recording(
- node_type, block_size)
-
-
class CrosHotwordingWidgetHandler(CrosInputWidgetHandler):
"""
This class abstracts a Cros device audio input widget handler on hotwording.
diff --git a/client/cros/chameleon/chameleon_audio_helper.py b/client/cros/chameleon/chameleon_audio_helper.py
index c4cfd73..73cabf5 100644
--- a/client/cros/chameleon/chameleon_audio_helper.py
+++ b/client/cros/chameleon/chameleon_audio_helper.py
@@ -301,9 +301,6 @@
elif is_usb:
return audio_widget.CrosUSBInputWidgetHandler(
self._audio_facade)
- elif is_internal_mic:
- return audio_widget.CrosIntMicInputWidgetHandler(
- self._audio_facade, self._system_facade)
elif is_hotwording:
return audio_widget.CrosHotwordingWidgetHandler(
self._audio_facade, self._system_facade)
diff --git a/client/cros/enterprise/enterprise_au_context.py b/client/cros/enterprise/enterprise_au_context.py
deleted file mode 100644
index 8a67ba6..0000000
--- a/client/cros/enterprise/enterprise_au_context.py
+++ /dev/null
@@ -1,125 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import logging
-import re
-
-from autotest_lib.client.bin import utils
-from autotest_lib.client.common_lib import error
-from autotest_lib.client.cros.update_engine import nano_omaha_devserver
-from autotest_lib.client.cros.update_engine import update_engine_event as uee
-from autotest_lib.client.cros.update_engine import update_engine_util
-
-_MIN_BUILD = '1.1.1'
-_MAX_BUILD = '999999.9.9'
-
-class NanoOmahaEnterpriseAUContext(object):
- """
- Contains methods required for Enterprise AU tests using Nano Omaha.
-
- """
- def __init__(self, image_url, image_size, sha256, to_build=_MAX_BUILD,
- from_build=_MIN_BUILD, is_rollback=False, is_critical=False):
- """
- Start a Nano Omaha instance and intialize variables.
-
- @param image_url: Url of update image.
- @param image_size: Size of the update.
- @param sha256: Sha256 hash of the update.
- @param to_build: String of the build number Nano Omaha should serve.
- @param from_build: String of the build number this device should say
- it is on by setting lsb_release.
- @param is_rollback: whether the build should serve with the rollback
- flag.
- @param is_critical: whether the build should serve marked as critical.
-
- """
- self._omaha = nano_omaha_devserver.NanoOmahaDevserver()
- self._omaha.set_image_params(image_url, image_size, sha256,
- build=to_build, is_rollback=is_rollback)
- self._omaha.start()
-
- self._au_util = update_engine_util.UpdateEngineUtil()
-
- update_url = self._omaha.get_update_url()
- self._au_util._create_custom_lsb_release(from_build, update_url)
-
- self._is_rollback = is_rollback
- self._is_critical = is_critical
-
-
- def update_and_poll_for_update_start(self, is_interactive=False):
- """
- Check for an update and wait until it starts.
-
- @param is_interactive: whether the request is interactive.
-
- @raises: error.TestFail when update does not start after timeout.
-
- """
- self._au_util._check_for_update(port=self._omaha.get_port(),
- interactive=is_interactive)
-
- def update_started():
- """Polling function: True or False if update has started."""
- status = self._au_util._get_update_engine_status()
- logging.info('Status: %s', status)
- return (status[self._au_util._CURRENT_OP]
- == self._au_util._UPDATE_ENGINE_DOWNLOADING)
-
- utils.poll_for_condition(
- update_started,
- timeout=60,
- exception=error.TestFail('Update did not start!'))
-
-
- def get_update_requests(self):
- """
- Get the contents of all the update requests from the most recent log.
-
- @returns: a sequential list of <request> xml blocks or None if none.
-
- """
- return self._au_util._get_update_requests()
-
-
- def get_time_of_last_update_request(self):
- """
- Get the time of the last update request from most recent logfile.
-
- @returns: seconds since epoch of when last update request happened
- (second accuracy), or None if no such timestamp exists.
-
- """
- return self._au_util._get_time_of_last_update_request()
-
-
- def get_latest_initial_request(self):
- """
- Return the most recent initial update request.
-
- AU requests occur in a chain of messages back and forth, e.g. the
- initial request for an update -> the reply with the update -> the
- report that install has started -> report that install has finished,
- etc. This function finds the first request in the latest such chain.
-
- This message has no eventtype listed, or is rebooted_after_update
- type (as an artifact from a previous update since this one).
- Subsequent messages in the chain have different eventtype values.
-
- @returns: string of the entire update request or None.
-
- """
- requests = self.get_update_requests()
- if not requests:
- return None
-
- MATCH_STR = r'eventtype="(.*?)"'
- for i in xrange(len(requests) - 1, -1, -1):
- search = re.search(MATCH_STR, requests[i])
- if (not search or
- search.group(1) == uee.EVENT_TYPE_REBOOTED_AFTER_UPDATE):
- return requests[i]
-
- return None
diff --git a/client/cros/multimedia/audio_facade_native.py b/client/cros/multimedia/audio_facade_native.py
index c82256a..487e145 100644
--- a/client/cros/multimedia/audio_facade_native.py
+++ b/client/cros/multimedia/audio_facade_native.py
@@ -390,15 +390,6 @@
cras_utils.set_selected_output_node_volume(volume)
- def set_input_gain(self, gain):
- """Sets the system capture gain.
-
- @param gain: the capture gain in db*100 (100 = 1dB)
-
- """
- cras_utils.set_capture_gain(gain)
-
-
def set_selected_node_types(self, output_node_types, input_node_types):
"""Set selected node types.
diff --git a/client/cros/multimedia/cfm_facade_native.py b/client/cros/multimedia/cfm_facade_native.py
index a828f5e..8dd17a7 100644
--- a/client/cros/multimedia/cfm_facade_native.py
+++ b/client/cros/multimedia/cfm_facade_native.py
@@ -19,12 +19,6 @@
from autotest_lib.client.common_lib.cros import kiosk_utils
from autotest_lib.client.cros.graphics import graphics_utils
-try:
- from telemetry.core.exceptions import BrowserConnectionGoneException
-except ImportError:
- # Telemetry isn't available in unit tests (which don't cover enrollment)
- pass
-
class TimeoutException(Exception):
"""Timeout Exception class."""
@@ -65,17 +59,6 @@
"auto_login": False,
"disable_gaia_services": False,
"extra_browser_args": extra_browser_args})
- # TODO(turg) If possible, remove switching enrollment altogether
- try:
- enrollment.SwitchToRemora(self._resource._browser)
- except BrowserConnectionGoneException:
- logging.error('Chrome restarted without DevTools when switching enrollment mode')
- logging.error('Attempting to restart')
-
- self._resource.start_custom_chrome({
- "auto_login": False,
- "disable_gaia_services": False,
- "extra_browser_args": extra_browser_args})
enrollment.RemoraEnrollment(self._resource._browser, self._USER_ID,
self._PWD)
diff --git a/client/cros/update_engine/nano_omaha_devserver.py b/client/cros/update_engine/nano_omaha_devserver.py
deleted file mode 100644
index a62f1e6..0000000
--- a/client/cros/update_engine/nano_omaha_devserver.py
+++ /dev/null
@@ -1,239 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import BaseHTTPServer
-import base64
-import binascii
-import thread
-import urlparse
-
-from string import Template
-from xml.dom import minidom
-
-def _split_url(url):
- """Splits a URL into the URL base and path."""
- split_url = urlparse.urlsplit(url)
- url_base = urlparse.urlunsplit(
- (split_url.scheme, split_url.netloc, '', '', ''))
- url_path = split_url.path
- return url_base, url_path.lstrip('/')
-
-
-class NanoOmahaDevserver(object):
- """A simple Omaha instance that can be setup on a DUT in client tests."""
-
- def __init__(self, eol_date=None, failures_per_url=1, backoff=False,
- num_urls=2):
- """
- Create a nano omaha devserver.
-
- @param eol_date: An integer representing days from Unix Epoch
- of the device's EOL date.
- @param failures_per_url: how many times each url can fail.
- @param backoff: Whether we should wait a while before trying to
- update again after a failure.
- @param num_urls: The number of URLs in the omaha response.
-
- """
- self._eol_date = eol_date
- self._failures_per_url = failures_per_url
- self._backoff = backoff
- self._num_urls = num_urls
-
-
- def create_update_response(self, appid):
- """
- Create an update response using the values from set_image_params().
-
- @param appid: the appid parsed from the request.
-
- @returns: a string of the response this server should send.
-
- """
- EOL_TEMPLATE = Template("""
- <response protocol="3.0">
- <daystart elapsed_seconds="44801"/>
- <app appid="$appid" status="ok">
- <ping status="ok"/>
- <updatecheck _eol_date="$eol_date" status="noupdate"/>
- </app>
- </response>
- """)
-
- RESPONSE_TEMPLATE = Template("""
- <response protocol="3.0">
- <daystart elapsed_seconds="44801"/>
- <app appid="$appid" status="ok">
- <ping status="ok"/>
- <updatecheck ${ROLLBACK_FLAGS}status="ok">
- <urls>
- $PER_URL_TAGS
- </urls>
- <manifest version="$build_number">
- <packages>
- <package hash_sha256="$sha256" name="$image_name"
- size="$image_size" required="true"/>
- </packages>
- <actions>
- <action event="postinstall"
- ChromeOSVersion="$build_number"
- sha256="$sha256"
- needsadmin="false"
- IsDeltaPayload="$is_delta"
- MaxFailureCountPerUrl="$failures_per_url"
- DisablePayloadBackoff="$disable_backoff"
- $OPTIONAL_ACTION_FLAGS
- />
- </actions>
- </manifest>
- </updatecheck>
- </app>
- </response>
- """)
- PER_URL_TEMPLATE = Template('<url codebase="$base/"/>')
- FLAG_TEMPLATE = Template('$key="$value"')
- ROLLBACK_TEMPLATE = Template("""
- _firmware_version="$fw"
- _firmware_version_0="$fw0"
- _firmware_version_1="$fw1"
- _firmware_version_2="$fw2"
- _firmware_version_3="$fw3"
- _firmware_version_4="$fw4"
- _kernel_version="$kern"
- _kernel_version_0="$kern0"
- _kernel_version_1="$kern1"
- _kernel_version_2="$kern2"
- _kernel_version_3="$kern3"
- _kernel_version_4="$kern4"
- _rollback="$is_rollback"
- """)
-
- # IF EOL date, return a simplified response with _eol_date tag.
- if self._eol_date is not None:
- return EOL_TEMPLATE.substitute(appid=appid, eol_date=self._eol_date)
-
- template_keys = {}
- template_keys['is_delta'] = str(self._is_delta).lower()
- template_keys['build_number'] = self._build
- template_keys['sha256'] = (
- binascii.hexlify(base64.b64decode(self._sha256)))
- template_keys['image_size'] = self._image_size
- template_keys['failures_per_url'] = self._failures_per_url
- template_keys['disable_backoff'] = str(not self._backoff).lower()
- template_keys['num_urls'] = self._num_urls
- template_keys['appid'] = appid
-
- (base, name) = _split_url(self._image_url)
- template_keys['base'] = base
- template_keys['image_name'] = name
-
- # For now, set all version flags to the same value.
- if self._is_rollback:
- fw_val = '5'
- k_val = '7'
- rollback_flags = ROLLBACK_TEMPLATE.substitute(
- fw=fw_val, fw0=fw_val, fw1=fw_val, fw2=fw_val, fw3=fw_val,
- fw4=fw_val, kern=k_val, kern0=k_val, kern1=k_val, kern2=k_val,
- kern3=k_val, kern4=k_val, is_rollback='true')
- else:
- rollback_flags = ''
- template_keys['ROLLBACK_FLAGS'] = rollback_flags
-
- per_url = ''
- for i in xrange(self._num_urls):
- per_url += PER_URL_TEMPLATE.substitute(template_keys)
- template_keys['PER_URL_TAGS'] = per_url
-
- action_flags = []
- def add_action_flag(key, value):
- """Helper function for the OPTIONAL_ACTION_FLAGS parameter."""
- action_flags.append(
- FLAG_TEMPLATE.substitute(key=key, value=value))
- if self._critical:
- add_action_flag('deadline', 'now')
- if self._metadata_size:
- add_action_flag('MetadataSize', self._metadata_size)
- if self._metadata_signature:
- add_action_flag('MetadataSignatureRsa', self._metadata_signature)
- if self._public_key:
- add_action_flag('PublicKeyRsa', self._public_key)
- template_keys['OPTIONAL_ACTION_FLAGS'] = (
- '\n '.join(action_flags))
-
- return RESPONSE_TEMPLATE.substitute(template_keys)
-
-
- class Handler(BaseHTTPServer.BaseHTTPRequestHandler):
- """Inner class for handling HTTP requests."""
- def do_POST(self):
- """Handler for POST requests."""
- if '/update' in self.path:
- # Parse the app id from the request to use in the response.
- content_len = int(self.headers.getheader('content-length'))
- request_string = self.rfile.read(content_len)
- request_dom = minidom.parseString(request_string)
- app = request_dom.firstChild.getElementsByTagName('app')[0]
- appid = app.getAttribute('appid')
-
- response = self.server._devserver.create_update_response(appid)
-
- self.send_response(200)
- self.send_header('Content-Type', 'application/xml')
- self.end_headers()
- self.wfile.write(response)
- else:
- self.send_response(500)
-
- def start(self):
- """Starts the server."""
- self._httpd = BaseHTTPServer.HTTPServer(('127.0.0.1', 0), self.Handler)
- self._httpd._devserver = self
- # Serve HTTP requests in a dedicated thread.
- thread.start_new_thread(self._httpd.serve_forever, ())
- self._port = self._httpd.socket.getsockname()[1]
-
- def stop(self):
- """Stops the server."""
- self._httpd.shutdown()
-
- def get_port(self):
- """Returns the TCP port number the server is listening on."""
- return self._port
-
- def get_update_url(self):
- """Returns the update url for this server."""
- return 'http://127.0.0.1:%d/update' % self._port
-
- def set_image_params(self, image_url, image_size, sha256,
- metadata_size=None, metadata_signature=None,
- public_key=None, is_delta=False, critical=True,
- is_rollback=False, build='999999.0.0'):
- """
- Sets the values to return in the Omaha response.
-
- Only the |image_url|, |image_size| and |sha256| parameters are
- mandatory.
-
- @param image_url: the url of the image to install.
- @param image_size: the size of the image to install.
- @param sha256: the sha256 hash of the image to install.
- @param metadata_size: the size of the metadata.
- @param metadata_signature: the signature of the metadata.
- @param public_key: the public key.
- @param is_delta: True if image is a delta, False if a full payload.
- @param critical: True for forced update, False for regular update.
- @param is_rollback: True if image is for rollback, False if not.
- @param build: the build number the response should claim to have.
-
- """
- self._image_url = image_url
- self._image_size = image_size
- self._sha256 = sha256
- self._metadata_size = metadata_size
- self._metadata_signature = metadata_signature
- self._public_key = public_key
- self._is_delta = is_delta
- self._critical = critical
- self._is_rollback = is_rollback
- self._build = build
diff --git a/client/cros/update_engine/update_engine_util.py b/client/cros/update_engine/update_engine_util.py
index 751e7e0..61a5e05 100644
--- a/client/cros/update_engine/update_engine_util.py
+++ b/client/cros/update_engine/update_engine_util.py
@@ -13,7 +13,7 @@
from autotest_lib.client.common_lib import error
from autotest_lib.client.common_lib import utils
-
+from autotest_lib.client.cros.update_engine import update_engine_event
_DEFAULT_RUN = utils.run
_DEFAULT_COPY = shutil.copy
@@ -352,7 +352,7 @@
files[1])).stdout
- def _create_custom_lsb_release(self, update_url, build='0.0.0.0'):
+ def _create_custom_lsb_release(self, update_url, build='0.0.0.0', **kwargs):
"""
Create a custom lsb-release file.
@@ -363,8 +363,15 @@
@param update_url: String of url to use for update check.
@param build: String of the build number to use. Represents the
Chrome OS build this device thinks it is on.
+ @param kwargs: A dictionary of key/values to be made into a query string
+ and appended to the update_url
"""
+ # TODO(ahassani): This is quite fragile as the given URL can already
+ # have a search query. We need to unpack the URL and update the search
+ # query portion of it with kwargs.
+ update_url = (update_url + '?' + '&'.join('%s=%s' % (k, v)
+ for k, v in kwargs.items()))
self._run('mkdir %s' % os.path.dirname(self._CUSTOM_LSB_RELEASE),
ignore_status=True)
self._run('touch %s' % self._CUSTOM_LSB_RELEASE)
@@ -459,3 +466,34 @@
return None
else:
return targets[-1].rpartition(err_str)[2]
+
+
+ def _get_latest_initial_request(self):
+ """
+ Return the most recent initial update request.
+
+ AU requests occur in a chain of messages back and forth, e.g. the
+ initial request for an update -> the reply with the update -> the
+ report that install has started -> report that install has finished,
+ etc. This function finds the first request in the latest such chain.
+
+ This message has no eventtype listed, or is rebooted_after_update
+ type (as an artifact from a previous update since this one).
+ Subsequent messages in the chain have different eventtype values.
+
+ @returns: string of the entire update request or None.
+
+ """
+ requests = self._get_update_requests()
+ if not requests:
+ return None
+
+ MATCH_STR = r'eventtype="(.*?)"'
+ for i in xrange(len(requests) - 1, -1, -1):
+ search = re.search(MATCH_STR, requests[i])
+ if (not search or
+ (search.group(1) ==
+ update_engine_event.EVENT_TYPE_REBOOTED_AFTER_UPDATE)):
+ return requests[i]
+
+ return None
diff --git a/client/deps/dejagnu/dejagnu-1.4.4.tar.bz2 b/client/deps/dejagnu/dejagnu-1.4.4.tar.bz2
deleted file mode 100644
index 4a7680f..0000000
--- a/client/deps/dejagnu/dejagnu-1.4.4.tar.bz2
+++ /dev/null
Binary files differ
diff --git a/client/deps/dejagnu/dejagnu.py b/client/deps/dejagnu/dejagnu.py
deleted file mode 100755
index af484c0..0000000
--- a/client/deps/dejagnu/dejagnu.py
+++ /dev/null
@@ -1,21 +0,0 @@
-#!/usr/bin/python2
-
-import os
-from autotest_lib.client.bin import utils
-
-version = 1
-
-def setup(tarball, topdir):
- srcdir = os.path.join(topdir, 'src')
- utils.extract_tarball_to_dir(tarball, 'src')
- os.chdir(srcdir)
- utils.configure('--prefix=%s/dejagnu' % topdir)
- utils.make()
- utils.make('install')
-
- os.chdir(topdir)
-
-pwd = os.getcwd()
-# http://ftp.gnu.org/pub/gnu/dejagnu/dejagnu-1.4.4.tar.gz
-tarball = os.path.join(pwd, 'dejagnu-1.4.4.tar.bz2')
-utils.update_version(pwd+'/src', False, version, setup, tarball, pwd)
diff --git a/client/deps/systemtap/elfutils-0.140.tar.bz2 b/client/deps/systemtap/elfutils-0.140.tar.bz2
deleted file mode 100644
index 80416ed..0000000
--- a/client/deps/systemtap/elfutils-0.140.tar.bz2
+++ /dev/null
Binary files differ
diff --git a/client/deps/systemtap/systemtap-0.9.5.tar.gz b/client/deps/systemtap/systemtap-0.9.5.tar.gz
deleted file mode 100644
index 513b82d..0000000
--- a/client/deps/systemtap/systemtap-0.9.5.tar.gz
+++ /dev/null
Binary files differ
diff --git a/client/deps/systemtap/systemtap.py b/client/deps/systemtap/systemtap.py
deleted file mode 100755
index 5723757..0000000
--- a/client/deps/systemtap/systemtap.py
+++ /dev/null
@@ -1,22 +0,0 @@
-#!/usr/bin/python2
-
-import os
-import shutil
-from autotest_lib.client.bin import utils
-
-version = 1
-
-def setup(topdir):
- srcdir = os.path.join(topdir, 'src')
-
- os.chdir(srcdir)
-
- utils.configure('--with-elfutils=elfutils ' \
- '--prefix=%s/systemtap' % topdir)
- utils.make('-j %d' % utils.count_cpus())
- utils.make('install')
-
- os.chdir(topdir)
-
-pwd = os.getcwd()
-utils.update_version(pwd+'/src', True, version, setup, pwd)
diff --git a/client/site_tests/autoupdate_CannedOmahaUpdate/autoupdate_CannedOmahaUpdate.py b/client/site_tests/autoupdate_CannedOmahaUpdate/autoupdate_CannedOmahaUpdate.py
index 51a6467..216a20d 100644
--- a/client/site_tests/autoupdate_CannedOmahaUpdate/autoupdate_CannedOmahaUpdate.py
+++ b/client/site_tests/autoupdate_CannedOmahaUpdate/autoupdate_CannedOmahaUpdate.py
@@ -35,7 +35,8 @@
"""
try:
- self._check_for_update(port=port, critical_update=True)
+ self._check_for_update(port=port, critical_update=True,
+ wait_for_completion=True)
except error.CmdError as e:
if not allow_failure:
raise error.TestFail('Update attempt failed: %s' %
diff --git a/client/site_tests/network_ChromeWifiConfigure/network_ChromeWifiConfigure.py b/client/site_tests/network_ChromeWifiConfigure/network_ChromeWifiConfigure.py
index 044dc47..2e43171 100644
--- a/client/site_tests/network_ChromeWifiConfigure/network_ChromeWifiConfigure.py
+++ b/client/site_tests/network_ChromeWifiConfigure/network_ChromeWifiConfigure.py
@@ -29,7 +29,7 @@
def _create_wifi(self, ssid, security):
logging.info('create_wifi')
- shared = 'true'
+ shared = 'false'
properties = {
'Type': 'WiFi',
'WiFi': {
@@ -61,6 +61,9 @@
def run_once(self):
+ """
+ Entry point of the test.
+ """
with cntc.ChromeNetworkingTestContext() as testing_context:
self._chrome_testing = testing_context
self._run_once_internal()
diff --git a/client/site_tests/platform_KernelVersionByBoard/expected b/client/site_tests/platform_KernelVersionByBoard/expected
index c40c5bf..1ea33d0 100644
--- a/client/site_tests/platform_KernelVersionByBoard/expected
+++ b/client/site_tests/platform_KernelVersionByBoard/expected
@@ -9,6 +9,7 @@
butterfly 3.8
candy 4.4
caroline 3.18
+caroline-kernelnext 4.19
cave 3.18
celes 3.18
chell 3.18
@@ -16,6 +17,7 @@
cid 3.14
clapper 4.4
cyan 3.18
+cyan-kernelnext 4.19
daisy 3.8
daisy_skate 3.8
daisy_spring 3.8
@@ -97,21 +99,19 @@
tricky 3.8
ultima 3.18
umaro 3.18
-veyron_fievel 3.14
-veyron_gus 3.14
-veyron_jaq 3.14
-veyron_jerry 3.14
-veyron_jerry-kernelnext 4.19
-veyron_mickey 3.14
-veyron_mighty 3.14
-veyron_minnie 3.14
-veyron_minnie-kernelnext 4.19
-veyron_nicky 3.14
-veyron_pinky 3.14
-veyron_rialto 3.14
-veyron_speedy 3.14
-veyron_thea 3.14
-veyron_tiger 3.14
+veyron_fievel 4.19
+veyron_gus 4.19
+veyron_jaq 4.19
+veyron_jerry 4.19
+veyron_mickey 4.19
+veyron_mighty 4.19
+veyron_minnie 4.19
+veyron_nicky 4.19
+veyron_pinky 4.19
+veyron_rialto 4.19
+veyron_speedy 4.19
+veyron_thea 4.19
+veyron_tiger 4.19
winky 4.4
wizpig 3.18
wolf 3.8
diff --git a/client/site_tests/policy_DeviceAutoUpdateDisabled/policy_DeviceAutoUpdateDisabled.py b/client/site_tests/policy_DeviceAutoUpdateDisabled/policy_DeviceAutoUpdateDisabled.py
index dfea1d7..1942b9c 100644
--- a/client/site_tests/policy_DeviceAutoUpdateDisabled/policy_DeviceAutoUpdateDisabled.py
+++ b/client/site_tests/policy_DeviceAutoUpdateDisabled/policy_DeviceAutoUpdateDisabled.py
@@ -6,19 +6,23 @@
import math
import time
+from autotest_lib.client.common_lib import autotemp
from autotest_lib.client.common_lib import error
-from autotest_lib.client.cros.enterprise import enterprise_au_context
+from autotest_lib.client.common_lib import utils
+from autotest_lib.client.cros.update_engine import nebraska_wrapper
+from autotest_lib.client.cros.update_engine import update_engine_test
from autotest_lib.client.cros.enterprise import enterprise_policy_base
class policy_DeviceAutoUpdateDisabled(
- enterprise_policy_base.EnterprisePolicyTest):
+ enterprise_policy_base.EnterprisePolicyTest,
+ update_engine_test.UpdateEngineTest):
"""Test for the DeviceAutoUpdateDisabled policy."""
version = 1
_POLICY = 'DeviceAutoUpdateDisabled'
- def _test_update_disabled(self, should_update):
+ def _test_update_disabled(self, port, should_update):
"""
Main test function.
@@ -34,7 +38,12 @@
logging.info('Update test start time: %s', start_time)
try:
- self._au_context.update_and_poll_for_update_start()
+ self._check_for_update(port=port, interactive=False)
+
+ utils.poll_for_condition(
+ self._is_update_started,
+ timeout=60,
+ exception=error.TestFail('Update did not start!'))
except error.TestFail as e:
if should_update:
raise e
@@ -42,7 +51,7 @@
if not should_update:
raise error.TestFail('Update started when it should not have!')
- update_time = self._au_context.get_time_of_last_update_request()
+ update_time = self._get_time_of_last_update_request()
logging.info('Last update time: %s', update_time)
if should_update and (not update_time or update_time < start_time):
@@ -51,20 +60,36 @@
raise error.TestFail('Update request was sent!')
- def run_once(self, case, image_url, image_size, sha256, enroll=True):
+ def run_once(self, case, image_url, enroll=True):
"""
Entry point of this test.
@param case: True, False, or None for the value of the update policy.
@param image_url: Url of update image (this build).
- @param image_size: Size of the update.
- @param sha256: Sha256 hash of the update.
"""
+ # Because we are doing polimorphism and the EnterprisePolicyTest is
+ # earlier in the python MRO, this class's initialize() will get called,
+ # but not the UpdateEngineTest's initialize(). So we need to call it
+ # manually.
+ update_engine_test.UpdateEngineTest.initialize(self)
+
self.setup_case(device_policies={self._POLICY: case}, enroll=enroll)
- self._au_context = enterprise_au_context.NanoOmahaEnterpriseAUContext(
- image_url=image_url, image_size=image_size, sha256=sha256)
+ metadata_dir = autotemp.tempdir()
+ self._get_payload_properties_file(image_url, metadata_dir.name,
+ target_version='999999.9.9')
+ base_url = ''.join(image_url.rpartition('/')[0:2])
+ with nebraska_wrapper.NebraskaWrapper(
+ log_dir=self.resultsdir,
+ update_metadata_dir=metadata_dir.name,
+ update_payloads_address=base_url) as nebraska:
- # When policy is False or not set, user should update.
- self._test_update_disabled(should_update=case is not True)
+ update_url = nebraska.get_update_url()
+ self._create_custom_lsb_release(update_url, build='1.1.1')
+
+ # When policy is False or not set, user should update.
+ self._test_update_disabled(port=nebraska.get_port(),
+ should_update=case is not True)
+
+ self.cleanup()
diff --git a/client/site_tests/policy_DeviceTargetVersionPrefix/policy_DeviceTargetVersionPrefix.py b/client/site_tests/policy_DeviceTargetVersionPrefix/policy_DeviceTargetVersionPrefix.py
index 167f1fa..e858a69 100644
--- a/client/site_tests/policy_DeviceTargetVersionPrefix/policy_DeviceTargetVersionPrefix.py
+++ b/client/site_tests/policy_DeviceTargetVersionPrefix/policy_DeviceTargetVersionPrefix.py
@@ -5,13 +5,17 @@
import logging
import re
+from autotest_lib.client.common_lib import autotemp
from autotest_lib.client.common_lib import error
-from autotest_lib.client.cros.enterprise import enterprise_au_context
+from autotest_lib.client.common_lib import utils
from autotest_lib.client.cros.enterprise import enterprise_policy_base
+from autotest_lib.client.cros.update_engine import nebraska_wrapper
+from autotest_lib.client.cros.update_engine import update_engine_test
class policy_DeviceTargetVersionPrefix(
- enterprise_policy_base.EnterprisePolicyTest):
+ enterprise_policy_base.EnterprisePolicyTest,
+ update_engine_test.UpdateEngineTest):
"""
Test for the DeviceTargetVersionPrefix policy.
@@ -32,14 +36,15 @@
'notset': None}
# The policy value -> what the device will send in the update request.
- _POLICY_TO_REQUEST = {'4444.': '4444',
- '4444.4.4': '4444.4.'}
+ _POLICY_TO_REQUEST = {'4444.': '4444', '4444.4.4': '4444.4.'}
- def _test_version_prefix(self, prefix_value):
+
+ def _test_version_prefix(self, prefix_value, port):
"""
Actual test. Fail if update request doesn't match expected.
@param prefix_value: the value of this policy.
+ @param port: The port we should connect to Nebraska server.
@raises error.TestFail if test does not pass.
@@ -47,8 +52,14 @@
# E.g. <updatecheck targetversionprefix="10718.25.0.">
MATCH_STR = r'targetversionprefix="(.*?).?"'
- self._au_context.update_and_poll_for_update_start()
- latest_request = self._au_context.get_latest_initial_request()
+ self._check_for_update(port=port)
+
+ utils.poll_for_condition(
+ self._is_update_started,
+ timeout=60,
+ exception=error.TestFail('Update did not start!'))
+
+ latest_request = self._get_latest_initial_request()
if not latest_request:
raise error.TestFail('Could not find most recent update request!')
@@ -69,21 +80,34 @@
'match the value expected from policy!')
- def run_once(self, case, image_url, image_size, sha256):
+ def run_once(self, case, image_url):
"""
Entry point of this test.
@param case: Name of the testcase to run.
@param image_url: Url of update image (this build).
- @param image_size: Size of the update.
- @param sha256: Sha256 hash of the update.
"""
+ # Because we are doing polimorphism and the EnterprisePolicyTest is
+ # earlier in the python MRO, this class's initialize() will get called,
+ # but not the UpdateEngineTest's initialize(). So we need to call it
+ # manually.
+ update_engine_test.UpdateEngineTest.initialize(self)
+
case_value = self._TEST_CASES[case]
self.setup_case(device_policies={self._POLICY_NAME: case_value},
enroll=True)
- self._au_context = enterprise_au_context.NanoOmahaEnterpriseAUContext(
- image_url=image_url, image_size=image_size, sha256=sha256)
+ metadata_dir = autotemp.tempdir()
+ self._get_payload_properties_file(image_url, metadata_dir.name,
+ target_version='999999.9.9')
+ base_url = ''.join(image_url.rpartition('/')[0:2])
+ with nebraska_wrapper.NebraskaWrapper(
+ log_dir=self.resultsdir,
+ update_metadata_dir=metadata_dir.name,
+ update_payloads_address=base_url) as nebraska:
- self._test_version_prefix(case_value)
+ update_url = nebraska.get_update_url()
+ self._create_custom_lsb_release(update_url, build='1.1.1')
+
+ self._test_version_prefix(case_value, nebraska.get_port())
diff --git a/client/tests/systemtap/control b/client/tests/systemtap/control
deleted file mode 100644
index f7418fc..0000000
--- a/client/tests/systemtap/control
+++ /dev/null
@@ -1,14 +0,0 @@
-AUTHOR = """
-Anton Blanchard <anton@samba.org>
-"""
-NAME = "Systemtap test suite"
-TEST_TYPE = "client"
-TEST_CLASS = "General"
-TEST_CATEGORY = "Functional"
-TIME = "MEDIUM"
-DOC = """\
-This test runs the systemtap testsuite. You will need a kernel with debug
-symbols, or a matching debuginfo package from your distro.
-"""
-
-job.run_test(url='systemtap', local=False)
diff --git a/client/tests/systemtap/systemtap.py b/client/tests/systemtap/systemtap.py
deleted file mode 100644
index 08720c1..0000000
--- a/client/tests/systemtap/systemtap.py
+++ /dev/null
@@ -1,102 +0,0 @@
-import os, shutil, re
-from autotest_lib.client.bin import test, utils
-from autotest_lib.client.common_lib import error
-
-
-class systemtap(test.test):
- """
- This autotest module runs the systemtap test suite.
-
- @author Anton Blanchard <anton@au.ibm.com>
- """
-
- version = 1
- def initialize(self, local=False):
- self.job.require_gcc()
-
- self.systemtap_dir = ''
- if local == False:
- self.systemtap_dir = os.path.join(self.autodir,
- 'deps/systemtap/systemtap')
-
- def setup(self, local=False, tarball_systemtap='systemtap-0.9.5.tar.gz', tarball_elfutils='elfutils-0.140.tar.bz2'):
- depdir = os.path.join(self.autodir, 'deps/systemtap')
- tarball_systemtap = utils.unmap_url(depdir, tarball_systemtap, self.tmpdir)
- tarball_elfutils = utils.unmap_url(depdir, tarball_elfutils, self.tmpdir)
- srcdir = os.path.join(depdir, 'src')
- utils.extract_tarball_to_dir(tarball_systemtap, srcdir)
- elfdir = os.path.join(srcdir, 'elfutils')
- utils.extract_tarball_to_dir(tarball_elfutils, elfdir)
-
- self.job.setup_dep(['dejagnu'])
- if local == False:
- self.job.setup_dep(['systemtap'])
-
- # Try grabbing the systemtap tarball out of the deps directory
- depdir = os.path.join(self.autodir, 'deps/systemtap')
- if os.path.exists(os.path.join(depdir, tarball_systemtap)):
- tarball = utils.unmap_url(depdir, tarball_systemtap, self.tmpdir)
- else:
- tarball = utils.unmap_url(self.bindir, tarball_systemtap, self.tmpdir)
- utils.extract_tarball_to_dir(tarball_systemtap, self.srcdir)
-
- testsuite = os.path.join(self.srcdir, 'testsuite')
- os.chdir(testsuite)
-
- utils.configure()
- utils.make()
-
- # Run a simple systemtap script to make sure systemtap and the
- # kernel debuginfo packages are correctly installed
- script = "PATH=%s/bin:$PATH stap -c /bin/true -e 'probe syscall.read { exit() }'" % self.systemtap_dir
- try:
- utils.system(script)
- except:
- raise error.TestError('simple systemtap test failed, kernel debuginfo package may be missing: %s' % script)
-
-
- def run_once(self):
- testsuite = os.path.join(self.srcdir, 'testsuite')
- os.chdir(testsuite)
-
- dejagnu_dir = os.path.join(self.autodir, 'deps/dejagnu/dejagnu')
-
- utils.system('PATH=%s/bin:%s/bin:$PATH make installcheck' %
- (self.systemtap_dir, dejagnu_dir))
-
- # After we are done with this iteration, we move the log files to
- # the results dir
- sum = os.path.join(testsuite, 'systemtap.sum')
- log = os.path.join(testsuite, 'systemtap.log')
-
- if self.iteration:
- logfile = 'systemtap.log.%d' % self.iteration
- sumfile = 'systemtap.sum.%d' % self.iteration
- else:
- logfile = 'systemtap.log.profile'
- sumfile = 'systemtap.sum.profile'
-
- self.logfile = os.path.join(self.resultsdir, logfile)
- self.sumfile = os.path.join(self.resultsdir, sumfile)
-
- shutil.move(log, self.logfile)
- shutil.move(sum, self.sumfile)
-
-
- def postprocess_iteration(self):
- os.chdir(self.resultsdir)
-
- r = re.compile("# of (.*)\t(\d+)")
-
- f = open(self.sumfile, 'r')
- keyval = {}
- for line in f:
- result = r.match(line)
- if result:
- key = result.group(1)
- key = key.strip().replace(' ', '_')
- value = result.group(2)
- keyval[key] = value
- f.close()
-
- self.write_perf_keyval(keyval)
diff --git a/docs/faft-how-to-run-doc.md b/docs/faft-how-to-run-doc.md
index 0e2a370..17f57a6 100644
--- a/docs/faft-how-to-run-doc.md
+++ b/docs/faft-how-to-run-doc.md
@@ -18,9 +18,8 @@
FAFT (Fully Automated Firmware Tests) is a collection of tests and related
infrastructure that exercise and verify capabilities of Chrome OS.
The features tested by FAFT are implemented through low-level software
-(firmware/BIOS) and hardware.
-FAFT evolved from SAFT (Semi-Automated Firmware Tests) and you can locate tests
-in the [FAFT suite](https://chromium.googlesource.com/chromiumos/third_party/autotest/+/master/server/site_tests/)
+(firmware/BIOS) and hardware. FAFT evolved from SAFT
+(Semi-Automated Firmware Tests) and you can locate tests in the [FAFT suite]
in the Autotest tree as directories with the prefix `firmware_`.
The founding principles of FAFT are:
@@ -31,12 +30,7 @@
- Easy to integrate with existing test infrastructure (e.g. test lab, continuous testing, etc).
To access some of these low-level capabilities, the tests require a
-[servo](https://chromium.googlesource.com/chromiumos/third_party/hdctools/+/refs/heads/master/README.md#Power-Measurement)
-board
-([servo v2](https://chromium.googlesource.com/chromiumos/third_party/hdctools/+/refs/heads/master/docs/servo_v2.md),
-[servo v4](https://chromium.googlesource.com/chromiumos/third_party/hdctools/+/refs/heads/master/docs/servo_v4.md)
-with [servo micro](https://chromium.googlesource.com/chromiumos/third_party/hdctools/+/refs/heads/master/docs/servo_micro.md)
-or [servo v4 Type-C](https://chromium.googlesource.com/chromiumos/third_party/hdctools/+/refs/heads/master/docs/servo_v4.md#Type_C-Version)).
+[servo] board ([servo v2], [servo v4] with [servo micro] or [servo v4 Type-C])
The servo board is connected directly to the DUT (Device Under Test) to enable
access to low-level hardware interfaces, as well as staging areas for backup
@@ -79,10 +73,10 @@
(but using a type A servo v4 with a converter to type C is fine.
A pure type C setup will put the security chip into debug mode).
-Figure 1.Diagram of hardware configuration for a ServoV4 Type-A with servo micro
-

+**Figure 1.Diagram of hardware configuration for a ServoV4 Type-A with servo micro.**
+
Details of servoV4 Type-A with micro connections:
1. Connect one end (micro USB) of the servo micro to servoV4 using a micro USB to USB cable.
@@ -108,10 +102,10 @@
controller, DUT, and network. It is important to ensure the DUT is powered off
before plugging in cables and components to the servo and DUT.
-Figure 2.Diagram of hardware configuration for a ServoV4 Type-C.
-

+**Figure 2.Diagram of hardware configuration for a ServoV4 Type-C.**
+
Details of servoV4 Type-C connections in Figure 2:
1. Connect the USB Type-C cable of the servoV4 to the DUT.
@@ -124,10 +118,10 @@
Make sure to use the following servo type and configuration
for running the FAFT PD suite
-Figure 3.Diagram of hardware configuration for a ServoV4 Type-C with servo micro
-

+**Figure 3.Diagram of hardware configuration for a ServoV4 Type-C with servo micro.**
+
Details about FAFT PD's ServoV4 Type-C + servo micro setup (Figure 3):
- The suite should only be run on devices released in 2019 and forward.
@@ -150,6 +144,8 @@

+**Figure 4.Diagram of hardware configuration for a ServoV2 board.**
+
Details of servo v2 connections:
1. Connect one end(ribbon cable) of the flex cable to servoV2 and the other end to the debug header on the chrome device.
@@ -173,10 +169,23 @@
### Setup Confirmation {#setup-confirmation}
To run FAFT you use the test_that tool, which does not automatically start a
-servod process for communicating with the servo board. Before running any tests:
+servod process for communicating with the servo board. Before running any tests, go into chroot:
-1. Run `$ sudo servod --board=$BOARD --no-ns-pid` where `$BOARD` is the code name of the board you are testing. For example: `$ sudo servod --board=eve`
-2. Run the `firmware_FAFTSetup` test to verify basic functionality and ensure that your setup is correct. If test_that is in `/usr/bin`, the syntax is `$ /usr/bin/test_that --board=$BOARD $DUT_IP firmware_FAFTSetup`
+1. (chroot 1) Run `$ sudo servod --board=$BOARD --no-ns-pid` where `$BOARD` is the code name of the board you are testing. For example: `$ sudo servod --board=eve`
+1. Go into a second chroot
+1. (chroot 2) Run the `firmware_FAFTSetup` test to verify basic functionality and ensure that your setup is correct.
+1. If test_that is in `/usr/bin`, the syntax is `$ /usr/bin/test_that --board=$BOARD $DUT_IP firmware_FAFTSetup`
+
+It is important to note that this syntax will work only if the correct packages
+for the DUT have been built. To build the packages, which usually takes
+a few hours, run the following from chroot:
+
+`$ ./build_packages --board=$BOARD` where `$BOARD` is the code name of the board under test
+
+If packages have not been built, the command won't work unless a path to the
+autotest directory is included in the command as follows:
+
+`$ test_that --autotest_dir ~/trunk/src/third_party/autotest/files/ --args="servo_host=localhost servo_port=9999" -b $BOARD $IP $TEST_NAME`
### Sample Commands {#sample-commands}
@@ -214,7 +223,7 @@
- `$ /usr/bin/test_that --board=$BOARD $DUT_IP --args "servo_host=$SERVO_HOST servo_port=$SERVO_PORT" suite:faft_lv1`
-To run multiple servo boards on the same servo host, use serial and port number.
+To run multiple servo boards on the same servo host (labstation), use serial and port number.
- `$ sudo servod --board=$BOARD --port $port_number --serial $servo_serial_number`
- `$ /usr/bin/test_that --board=$BOARD $DUT_IP --args "servo_host=localhost servo_port=$port_number faft_iterations=5000" f:.*firmware_ConsecutiveBoot/control`
@@ -237,4 +246,11 @@
Q: I got an error while running FAFT: `AutoservRunError: command execution error: sudo -n which flash_ec` . What's wrong?
-- A: Run `sudo emerge chromeos-ec` inside your chroot.
\ No newline at end of file
+- A: Run `sudo emerge chromeos-ec` inside your chroot.
+
+[FAFT suite]: https://chromium.googlesource.com/chromiumos/third_party/autotest/+/master/server/site_tests/
+[servo]: https://chromium.googlesource.com/chromiumos/third_party/hdctools/+/refs/heads/master/README.md#Power-Measurement
+[servo v2]: https://chromium.googlesource.com/chromiumos/third_party/hdctools/+/refs/heads/master/docs/servo_v2.md
+[servo v4]: https://chromium.googlesource.com/chromiumos/third_party/hdctools/+/refs/heads/master/docs/servo_v4.md
+[servo micro]: https://chromium.googlesource.com/chromiumos/third_party/hdctools/+/refs/heads/master/docs/servo_micro.md
+[servo v4 Type-C]: https://chromium.googlesource.com/chromiumos/third_party/hdctools/+/refs/heads/master/docs/servo_v4.md#Type_C-Version
diff --git a/frontend/afe/moblab_rpc_interface.py b/frontend/afe/moblab_rpc_interface.py
index ed0624c..e48f297 100644
--- a/frontend/afe/moblab_rpc_interface.py
+++ b/frontend/afe/moblab_rpc_interface.py
@@ -28,7 +28,6 @@
from autotest_lib.frontend.afe import rpc_utils
from autotest_lib.server import frontend
from autotest_lib.server.hosts import moblab_host
-from chromite.lib import gs
_CONFIG = global_config.global_config
MOBLAB_BOTO_LOCATION = '/home/moblab/.boto'
@@ -75,8 +74,7 @@
@classmethod
def get_gsutil_cmd(cls):
if not cls._GSUTIL_CMD:
- cls._GSUTIL_CMD = gs.GSContext.GetDefaultGSUtilBin(
- cache_dir=CROS_CACHEDIR)
+ cls._GSUTIL_CMD = 'gsutil'
return cls._GSUTIL_CMD
diff --git a/server/control_segments/repair b/server/control_segments/repair
index 8aace51..35cd9b9 100644
--- a/server/control_segments/repair
+++ b/server/control_segments/repair
@@ -43,7 +43,9 @@
logging.debug('Repair with labels list %s', labels_list)
try:
- target.labels.update_labels(target, keep_pool=True)
+ target.labels.update_labels(target,
+ task_name='repair',
+ keep_pool=True)
except Exception:
logging.exception('Exception while updating labels.')
except Exception:
diff --git a/server/cros/bluetooth/bluetooth_adapter_tests.py b/server/cros/bluetooth/bluetooth_adapter_tests.py
index f44b7b8..80701b5 100644
--- a/server/cros/bluetooth/bluetooth_adapter_tests.py
+++ b/server/cros/bluetooth/bluetooth_adapter_tests.py
@@ -1043,6 +1043,14 @@
'is_discovering': is_discovering}
return all(self.results.values())
+ @_test_retry_and_log(False)
+ def test_is_discovering(self):
+ """Test that the adapter is already discovering."""
+ is_discovering = self._wait_for_condition(
+ self.bluetooth_facade.is_discovering, method_name())
+
+ self.results = {'is_discovering': is_discovering}
+ return all(self.results.values())
@_test_retry_and_log
def test_stop_discovery(self):
@@ -1070,6 +1078,16 @@
'is_discoverable': is_discoverable}
return all(self.results.values())
+ @_test_retry_and_log(False)
+ def test_is_discoverable(self):
+ """Test that the adapter is discoverable."""
+ is_discoverable = self._wait_for_condition(
+ self.bluetooth_facade.is_discoverable, method_name())
+
+ self.results = {'is_discoverable': is_discoverable}
+ return all(self.results.values())
+
+
def _test_timeout_property(self, set_property, check_property, set_timeout,
get_timeout, property_name,
timeout_values = [0, 60, 180]):
@@ -1374,6 +1392,44 @@
'device_discovered': device_discovered}
return has_device_initially or device_discovered
+ def _test_discover_by_device(self, device):
+ device_discovered = device.Discover(self.bluetooth_facade.address)
+
+ self.results = {
+ 'device_discovered': device_discovered
+ }
+
+ return all(self.results.values())
+
+ @_test_retry_and_log(False)
+ def test_discover_by_device(self, device):
+ """Test that the device could discover the adapter address.
+
+ @param device: Meta device to represent peer device.
+
+ @returns: True if the adapter is found by the device.
+ """
+ return self._test_discover_by_device(device)
+
+ @_test_retry_and_log(False)
+ def test_discover_by_device_fails(self, device):
+ """Test that the device could not discover the adapter address.
+
+ @param device: Meta device to represent peer device.
+
+ @returns False if the adapter is found by the device.
+ """
+ return not self._test_discover_by_device(device)
+
+ @_test_retry_and_log(False)
+ def test_device_set_discoverable(self, device, discoverable):
+ """Test that we could set the peer device to discoverable. """
+ try:
+ device.SetDiscoverable(discoverable)
+ except:
+ return False
+
+ return True
@_test_retry_and_log
def test_pairing(self, device_address, pin, trusted=True):
@@ -1604,6 +1660,26 @@
'connection_seen_by_adapter': connection_seen_by_adapter}
return all(self.results.values())
+ @_test_retry_and_log
+ def test_connection_by_device_only(self, device, adapter_address):
+ """Test that the device could connect to adapter successfully.
+
+ This is a modified version of test_connection_by_device that only
+ communicates with the peer device and not the host (in case the host is
+ suspended for example).
+
+ @param device: the bluetooth peer device
+ @param adapter_address: address of the adapter
+
+ @returns: True if the connection was established by the device or False.
+ """
+ connected = device.ConnectToRemoteAddress(adapter_address)
+ self.results = {
+ 'connection_by_device': connected
+ }
+
+ return all(self.results.values())
+
@_test_retry_and_log
def test_disconnection_by_device(self, device):
diff --git a/server/cros/bluetooth/bluetooth_dbus_api_tests.py b/server/cros/bluetooth/bluetooth_dbus_api_tests.py
index b9c8a4b..bebb035 100644
--- a/server/cros/bluetooth/bluetooth_dbus_api_tests.py
+++ b/server/cros/bluetooth/bluetooth_dbus_api_tests.py
@@ -1,4 +1,4 @@
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
@@ -13,12 +13,19 @@
method_name = bluetooth_adapter_tests.method_name
_test_retry_and_log = bluetooth_adapter_tests._test_retry_and_log
+DEFAULT_START_DELAY_SECS = 2
+DEFAULT_HOLD_INTERVAL = 10
+DEFAULT_HOLD_TIMEOUT = 60
+
# String representation of DBus exceptions
-DBUS_ERRORS ={
+DBUS_ERRORS = {
'InProgress' : 'org.bluez.Error.InProgress: Operation already in progress',
'NotReady' : 'org.bluez.Error.NotReady: Resource Not Ready',
'Failed': {
- 'discovery' : 'org.bluez.Error.Failed: No discovery started'}}
+ 'discovery_start' : 'org.bluez.Error.Failed: No discovery started',
+ 'discovery_unpause' : 'org.bluez.Error.Failed: Discovery not paused'
+ }
+ }
class BluetoothDBusAPITests(bluetooth_adapter_tests.BluetoothAdapterTests):
@@ -56,6 +63,46 @@
else:
return True
+ def _compare_error(self, expected, actual):
+ """ Helper function to compare error and log. """
+ if expected == actual:
+ return True
+ else:
+ logging.debug("Expected error is %s Actual error is %s",expected,
+ actual)
+ return False
+
+ def _get_hci_state(self, msg=''):
+ """ get state of bluetooth controller. """
+ hci_state = self.log_flags(msg, self.get_dev_info()[3])
+ logging.debug("hci_state is %s", hci_state)
+ return hci_state
+
+ def _wait_till_hci_state_inquiry(self):
+ """ Wait till adapter is in INQUIRY state.
+
+ @return: True if adapter does INQUIRY before timeout, False otherwise
+ """
+ return self._wait_for_condition(
+ lambda: 'INQUIRY' in self._get_hci_state('Expecting INQUIRY'),
+ method_name(),
+ start_delay = DEFAULT_START_DELAY_SECS)
+
+ def _wait_till_hci_state_no_inquiry_holds(self):
+ """ Wait till adapter does not enter INQUIRY for a period of time
+
+ @return : True if adapter is not in INQUIRY for a period of time before
+ timeout. Otherwise False.
+ """
+ return self._wait_till_condition_holds(
+ lambda: 'INQUIRY' not in self._get_hci_state('Expecting NOINQUIRY'),
+ method_name(),
+ hold_interval = DEFAULT_HOLD_INTERVAL,
+ timeout = DEFAULT_HOLD_TIMEOUT,
+ start_delay = DEFAULT_START_DELAY_SECS)
+
+
+
def _wait_till_discovery_stops(self, stop_discovery=True):
"""stop discovery if specified and wait for discovery to stop
@@ -141,11 +188,14 @@
start_discovery, error = self.bluetooth_facade.start_discovery()
is_discovering = self._wait_till_discovery_starts(start_discovery=False)
+ inquiry_state = self._wait_till_hci_state_inquiry()
+
self.results = {'reset' : reset,
'is_power_on' : is_power_on,
'is_not_discovering': is_not_discovering,
'start_discovery' : start_discovery,
- 'is_discovering': is_discovering
+ 'is_discovering': is_discovering,
+ 'inquiry_state' : inquiry_state
}
return all(self.results.values())
@@ -161,10 +211,13 @@
start_discovery, error = self.bluetooth_facade.start_discovery()
+
self.results = {'reset' : reset,
'is_discovering' : is_discovering,
'start_discovery_failed' : not start_discovery,
- 'error_matches' : error == DBUS_ERRORS['InProgress']}
+ 'error_matches' : self._compare_error(error,
+ DBUS_ERRORS['InProgress'])
+ }
return all(self.results.values())
@_test_retry_and_log(False)
@@ -183,7 +236,8 @@
self.results = {'reset' : reset,
'power_off' : is_power_off,
'start_discovery_failed' : not start_discovery,
- 'error_matches' : error == DBUS_ERRORS['NotReady'],
+ 'error_matches' : self._compare_error(error,
+ DBUS_ERRORS['NotReady']),
'power_on' : is_power_on}
return all(self.results.values())
@@ -225,6 +279,7 @@
stop_discovery, error = self.bluetooth_facade.stop_discovery()
is_not_discovering = self._wait_till_discovery_stops(
stop_discovery=False)
+ self._wait_till_hci_state_no_inquiry_holds()
self.results = {'reset' : reset,
'is_power_on' : is_power_on,
'is_discovering': is_discovering,
@@ -251,7 +306,8 @@
'reset' : reset,
'is_not_discovering' : is_not_discovering,
'stop_discovery_failed' : not stop_discovery,
- 'error_matches' : error == DBUS_ERRORS['Failed']['discovery'],
+ 'error_matches' : self._compare_error(error,
+ DBUS_ERRORS['Failed']['discovery_start']),
'still_not_discovering': still_not_discovering}
return all(self.results.values())
@@ -271,7 +327,8 @@
self.results = {'reset' : reset,
'is_power_off' : is_power_off,
'stop_discovery_failed' : not stop_discovery,
- 'error_matches' : error == DBUS_ERRORS['NotReady'],
+ 'error_matches' : self._compare_error(error,
+ DBUS_ERRORS['NotReady']),
'is_power_on' : is_power_on}
return all(self.results.values())
@@ -315,13 +372,15 @@
""" Test success case of pause_discovery call. """
reset = self._reset_state()
is_discovering = self._wait_till_discovery_starts()
+ self._wait_till_hci_state_inquiry()
+
pause_discovery, error = self.bluetooth_facade.pause_discovery(False)
- #TODO: Confirm discovery is paused by check the state of the adapter
-
+ no_inquiry = self._wait_till_hci_state_no_inquiry_holds()
self.results = {'reset' : reset,
'is_discovering': is_discovering,
'pause_discovery' : pause_discovery,
+ 'no_inquiry' : no_inquiry
}
return all(self.results.values())
@@ -334,12 +393,12 @@
pause_discovery, error = self.bluetooth_facade.pause_discovery(False)
- #TODO: Confirm discovery is paused by check the state of the adapter
-
+ no_inquiry = self._wait_till_hci_state_no_inquiry_holds()
self.results = {'reset' : reset,
'is_power_on' : is_power_on,
'is_not_discovering': is_not_discovering,
'pause_discovery' : pause_discovery,
+ 'no_inquiry' : no_inquiry
}
return all(self.results.values())
@@ -362,7 +421,8 @@
self.results = {'reset' : reset,
'is_power_off' : is_power_off,
'pause_discovery_failed' : not pause_discovery,
- 'error_matches' : error == DBUS_ERRORS['NotReady'],
+ 'error_matches' : self._compare_error(error,
+ DBUS_ERRORS['NotReady']),
'is_power_on' : is_power_on,
'discovery_started' : discovery_started
}
@@ -381,17 +441,146 @@
pause_discovery, _ = self.bluetooth_facade.pause_discovery()
pause_discovery_again, error = self.bluetooth_facade.pause_discovery()
- #TODO: Confirm discovery is paused by check the state of the adapter
+
+ no_inquiry = self._wait_till_hci_state_no_inquiry_holds()
self.results = {'reset' : reset,
'is_power_on' : is_power_on,
'is_discovering': is_discovering,
'pause_discovery' : pause_discovery,
'pause_discovery_failed' : not pause_discovery_again,
- 'error_matches' : error == DBUS_ERRORS['InProgress'],
+ 'error_matches' : self._compare_error(error,
+ DBUS_ERRORS['InProgress']),
+ 'no_inquiry' : no_inquiry,
}
return all(self.results.values())
+########################################################################
+# dbus call: unpause_discovery
+# arguments: boolean system_suspend_resume
+# returns : True/False
+# Notes: 1: argument system_suspend_resume is ignored in the code
+# 2: pause/unpause state is not reflected in Discovering state
+#####################################################
+# Positive cases
+# Case 1
+# preconditions: Adapter powered on AND
+# Discovery started and Discovery currently paused
+# Argument: [True|False]
+######################################################
+# Negative cases
+#
+# result: Success
+# Case 1
+# preconditions: Adapter powered on AND
+# Discovery currently not paused
+# Argument: [True|False]
+# result: Failed
+#
+# Case 2
+# preconditions: Adapter powered off
+# result: Failure
+# error : NotReady
+#
+# Case 3
+# precondition: Adapter powered on AND
+# Discovery paused
+# result: Failure
+# error: Busy
+#########################################################################
+ @_test_retry_and_log(False)
+ def test_dbus_unpause_discovery_success(self):
+ """ Test success case of unpause_discovery call. """
+ reset = self._reset_state()
+ is_discovering = self._wait_till_discovery_starts()
+ pause_discovery, _ = self.bluetooth_facade.pause_discovery()
+ no_inquiry_after_pause = self._wait_till_hci_state_no_inquiry_holds()
+
+ unpause_discovery, error = self.bluetooth_facade.unpause_discovery()
+
+ inquiry_after_unpause = self._wait_till_hci_state_inquiry()
+ self.results = {'reset' : reset,
+ 'is_discovering': is_discovering,
+ 'pause_discovery' : pause_discovery,
+ 'no_inquiry_after_pause' : no_inquiry_after_pause,
+ 'unpause_discovery' : unpause_discovery,
+ 'error' : error is None,
+ 'inquiry_after_unpause' : inquiry_after_unpause
+ }
+ return all(self.results.values())
+
+ @_test_retry_and_log(False)
+ def test_dbus_unpause_discovery_fail_without_pause(self):
+ """ Test failure case of unpause_discovery call.
+
+ Call unpause_discovery without calling pause_discovery and check it will
+ fail with org.bluez.Error.Failed: Discovery not paused'
+ """
+ reset = self._reset_state()
+ is_discovering = self._wait_till_discovery_starts()
+
+ unpause_discovery, error = self.bluetooth_facade.unpause_discovery()
+
+ inquiry_after_unpause = self._wait_till_hci_state_inquiry()
+ self.results = {'reset' : reset,
+ 'is_discovering': is_discovering,
+ 'unpause_discovery_fails' : not unpause_discovery,
+ 'error' : self._compare_error(error,
+ DBUS_ERRORS['Failed']['discovery_unpause']),
+ 'inquiry_after_unpause' : inquiry_after_unpause
+ }
+ return all(self.results.values())
+
+ @_test_retry_and_log(False)
+ def test_dbus_unpause_discovery_fail_power_off(self):
+ """ Test Failure case of unpause_discovery call.
+
+ unpause discovery when adapter is turned off and confirm it fails with
+ 'org.bluez.Error.Failed: Discovery not paused'
+
+ """
+ reset = self._reset_state()
+ is_power_off = self._wait_till_power_off()
+
+ unpause_discovery, error = self.bluetooth_facade.unpause_discovery()
+
+ self.results = {'reset' : reset,
+ 'is_power_off' : is_power_off,
+ 'unpause_discovery_failed' : not unpause_discovery,
+ 'error_matches' : self._compare_error(error,
+ DBUS_ERRORS['Failed']['discovery_unpause']),
+
+ }
+ return all(self.results.values())
+
+
+ @_test_retry_and_log(False)
+ def test_dbus_unpause_discovery_fail_already_unpaused(self):
+ """ Test Failure case of unpause_discovery call.
+
+ Call unpause discovery twice and make sure second call fails
+ with 'org.bluez.Error.InProgress: Operation already in progress'.
+ """
+ reset = self._reset_state()
+ is_discovering = self._wait_till_discovery_starts()
+ pause_discovery, error = self.bluetooth_facade.pause_discovery()
+ unpause_discovery, _ = self.bluetooth_facade.unpause_discovery()
+
+ unpause_again, error = self.bluetooth_facade.unpause_discovery()
+
+ inquiry_after_unpause = self._wait_till_hci_state_inquiry()
+
+ self.results = {
+ 'reset' : reset,
+ 'is_discovering': is_discovering,
+ 'pause_discovery' : pause_discovery,
+ 'unpause_discovery' : unpause_discovery,
+ 'unpause_again_failed': not unpause_again,
+ 'error_matches' : self._compare_error(error,
+ DBUS_ERRORS['Failed']['discovery_unpause']),
+ 'inquiry_after_unpause':inquiry_after_unpause
+ }
+ return all(self.results.values())
########################################################################
# dbus call: get_suppported_capabilities
diff --git a/server/cros/faft/cr50_test.py b/server/cros/faft/cr50_test.py
index aec0851..64f29ce 100644
--- a/server/cros/faft/cr50_test.py
+++ b/server/cros/faft/cr50_test.py
@@ -31,8 +31,7 @@
CR50_DEBUG_FILE = '*/cr50.dbg.%s.bin.*%s'
CR50_ERASEFLASHINFO_FILE = (
'*/cr50_Unknown_NodeLocked-%s_cr50-accessory-mp.bin')
- CR50_TOT_VER_FILE = 'tot/LATEST'
- CR50_TOT_FILE = 'tot/cr50.bin.%s.%s'
+ CR50_QUAL_VERSION_FILE = 'chromeos-cr50-QUAL_VERSION'
NONE = 0
# Saved the original device state during init.
INITIAL_IMAGE_STATE = 1 << 0
@@ -95,22 +94,8 @@
elif self.original_ccd_level != 'lock':
raise error.TestNAError('Lock the console before running cr50 test')
- self._save_original_state()
+ self._save_original_state(full_args.get('release_path', ''))
- # Verify cr50 is still running the correct version
- cr50_qual_version = full_args.get('cr50_qual_version', '').strip()
- if cr50_qual_version:
- _, running_rw, running_bid = self.get_saved_cr50_original_version()
- expected_rw, expected_bid_sym = cr50_qual_version.split('/')
- expected_bid = cr50_utils.GetBoardIdInfoString(expected_bid_sym,
- symbolic=False)
- logging.debug('Running %s %s Expect %s %s', running_rw, running_bid,
- expected_rw, expected_bid)
- if running_rw != expected_rw or expected_bid != running_bid:
- raise error.TestError('Not running %s' % cr50_qual_version)
-
- # We successfully saved the device state
- self._saved_state |= self.INITIAL_IMAGE_STATE
# Try and download all images necessary to restore cr50 state.
try:
self._save_dbg_image(full_args.get('cr50_dbg_image_path', ''))
@@ -119,12 +104,7 @@
logging.warning('Error saving DBG image: %s', str(e))
if restore_cr50_image:
raise error.TestNAError('Need DBG image: %s' % str(e))
- try:
- self._save_original_images(full_args.get('release_path', ''))
- self._saved_state |= self.DEVICE_IMAGES
- except Exception as e:
- logging.warning('Error saving ChromeOS image cr50 firmware: %s',
- str(e))
+
try:
self._save_eraseflashinfo_image(
full_args.get('cr50_eraseflashinfo_image_path', ''))
@@ -135,6 +115,106 @@
raise error.TestNAError('Need eraseflashinfo image: %s' %
str(e))
+ # TODO(b/143888583): remove qual update during init once new design to
+ # to provision cr50 updates is in place.
+ is_qual = full_args.get('is_qual', '').lower() == 'true'
+ if is_qual or self.running_qual_suite():
+ release_ver_arg = full_args.get('release_ver', '')
+ release_path_arg = full_args.get('release_path', '')
+ self.ensure_qual_image_is_running(release_ver_arg, release_path_arg)
+
+
+ def running_qual_suite(self):
+ """Return True if the qual image needs to be running."""
+ for pool in self.host.host_info_store.get().pools:
+ # TODO(b/149109740): remove once the pool values are verified.
+ # Change to run with faft-cr50 and faft-cr50-experimental suites.
+ logging.info('Checking pool: %s', pool)
+ if 'faft-cr50-experimental' in pool:
+ return True
+ return False
+
+
+ def ensure_qual_image_is_running(self, qual_ver_str, qual_path):
+ """Update to the qualification image if it's not running.
+
+ qual_ver_str and path are command line args that may be supplied to
+ specify a local version or path. If neither are supplied, the version
+ from gs will be used to determine what version cr50 should run.
+
+ qual_ver_str and qual_path should not be supplied together. If they are,
+ the path will be used. It's not a big deal as long as they agree with
+ each other.
+
+ @param qual_ver_str: qualification version string or None.
+ @param qual_path: local path to the qualification image or None.
+ """
+ # Get the local image information.
+ if qual_path:
+ dest, qual_ver = cr50_utils.InstallImage(self.host, qual_path,
+ '/tmp/qual_cr50.bin')
+ self.host.run('rm ' + dest)
+ qual_bid_str = (cr50_utils.GetBoardIdInfoString(qual_ver[2], False)
+ if qual_ver[2] else '')
+ qual_ver_str = '%s/%s' % (qual_ver[1], qual_bid_str)
+
+ # Determine the qualification version from.
+ if not qual_ver_str:
+ gsurl = os.path.join(self.GS_PRIVATE, self.CR50_QUAL_VERSION_FILE)
+ dut_path = self.download_cr50_gs_file(gsurl, False)[1]
+ qual_ver_str = self.host.run('cat ' + dut_path).stdout.strip()
+
+ # Download the qualification image based on the version.
+ if not qual_path:
+ rw, bid = qual_ver_str.split('/')
+ qual_path, qual_ver = self.download_cr50_release_image(rw, bid)
+
+ logging.info('Cr50 Qual Version: %s', qual_ver_str)
+ logging.info('Cr50 Qual Path: %s', qual_path)
+ qual_chip_bid = cr50_utils.GetChipBIDFromImageBID(
+ qual_ver[2], self.get_device_brand())
+ logging.info('Cr50 Qual Chip BID: %s', qual_chip_bid)
+
+ # Replace only the prod or prepvt image based on the major version.
+ if int(qual_ver[1].split('.')[1]) % 2:
+ prod_ver = self._original_image_state['prod_version']
+ prepvt_ver = qual_ver
+ prod_path = self._device_prod_image
+ prepvt_path = qual_path
+ else:
+ prod_ver = qual_ver
+ prepvt_ver = self._original_image_state['prepvt_version']
+ prod_path = qual_path
+ prepvt_path = self._device_prepvt_image
+
+ # Generate a dictionary with all of the expected state.
+ qual_state = {}
+ qual_state['prod_version'] = prod_ver
+ qual_state['prepvt_version'] = prepvt_ver
+ qual_state['chip_bid'] = qual_chip_bid
+ qual_state['running_image_bid'] = qual_ver[2]
+ # The test can't rollback RO. The newest RO should be running at the end
+ # of the test. max_ro will be none if the versions are the same. Use the
+ # running_ro in that case.
+ running_ro = self.get_saved_cr50_original_version()[0]
+ max_ro = cr50_utils.GetNewestVersion(running_ro, qual_ver[0])
+ qual_state['running_image_ver'] = (max_ro or running_ro, qual_ver[1],
+ None)
+ mismatch = self._check_running_image_and_board_id(qual_state)
+ if not mismatch:
+ logging.info('Running qual image. No update needed.')
+ return
+ logging.info('Cr50 qual update required.')
+ # TODO(b/149109740): remove once running_qual_suite logic has been
+ # verified.
+ logging.info('Skipping until logic has been verified')
+ return
+ filesystem_util.make_rootfs_writable(self.host)
+ self._update_device_images_and_running_cr50_firmware(qual_state,
+ qual_path, prod_path, prepvt_path)
+ logging.info("Recording qual device state as 'original' device state")
+ self._save_original_state(qual_path)
+
def _saved_cr50_state(self, state):
"""Returns True if the test has saved the given state
@@ -237,14 +317,27 @@
running_rw, running_bid)[0]
- def _save_original_state(self):
+ def _save_original_state(self, release_path):
"""Save the cr50 related state.
- Save the device's current cr50 version, cr50 board id, and image
- at /opt/google/cr50/firmware/cr50.bin.prod. These will be used to
- restore the state during cleanup.
+ Save the device's current cr50 version, cr50 board id, the running cr50
+ image, the prepvt, and prod cr50 images. These will be used to restore
+ the cr50 state during cleanup.
+
+ @param release_path: the optional command line arg of path for the local
+ cr50 image.
"""
+ self._saved_state &= ~self.INITIAL_IMAGE_STATE
self._original_image_state = self.get_image_and_bid_state()
+ # We successfully saved the device state
+ self._saved_state |= self.INITIAL_IMAGE_STATE
+ self._saved_state &= ~self.DEVICE_IMAGES
+ try:
+ self._save_original_images(release_path)
+ self._saved_state |= self.DEVICE_IMAGES
+ except Exception as e:
+ logging.warning('Error saving ChromeOS image cr50 firmware: %s',
+ str(e))
def get_saved_cr50_original_version(self):
@@ -386,20 +479,6 @@
return not not restore
- def _restore_device_files(self):
- """Copy the original .prod and .prepvt images onto the dut."""
- if not filesystem_util.is_rootfs_writable(self.host):
- return
- # Copy the original .prod file onto the DUT.
- if self._device_prod_image:
- cr50_utils.InstallImage(self.host, self._device_prod_image,
- cr50_utils.CR50_PROD)
- # Copy the original .prepvt file onto the DUT.
- if self._device_prepvt_image:
- cr50_utils.InstallImage(self.host, self._device_prepvt_image,
- cr50_utils.CR50_PREPVT)
-
-
def _get_image_information(self, ext):
"""Get the image information for the .prod or .prepvt image.
@@ -562,35 +641,58 @@
logging.info('DUT did not respond. Resetting it.')
- def _restore_device_images_and_running_cr50_image(self):
- """Restore the images on the device and the running cr50 image."""
- state_mismatch = self._check_original_image_state()
- if not state_mismatch:
- logging.info('Cr50 and device are using the original images.')
- return
- if self._provision_update:
+ def _update_device_images_and_running_cr50_firmware(
+ self, state, release_path, prod_path, prepvt_path):
+ """Update cr50, set the board id, and copy firmware to the DUT.
+
+ @param state: A dictionary with the expected running version, board id,
+ device cr50 firmware versions.
+ @param release_path: The image to update cr50 to
+ @param prod_path: The path to the .prod image
+ @param prepvt_path: The path to the .prepvt image
+ @raises TestError: if setting any state failed
+ """
+ mismatch = self._check_running_image_and_board_id(state)
+ if not mismatch:
+ logging.info('Nothing to do.')
return
# Use the DBG image to restore the original image.
- if self._cleanup_required(state_mismatch, self.DBG_IMAGE):
- self.update_cr50_image_and_board_id(
- self.get_saved_cr50_original_path(),
- self._original_image_state['chip_bid'])
+ if self._cleanup_required(mismatch, self.DBG_IMAGE):
+ self.update_cr50_image_and_board_id(release_path, state['chip_bid'])
- new_mismatch = self._check_original_image_state()
+ new_mismatch = self._check_running_image_and_board_id(state)
# Copy the original .prod and .prepvt images back onto the DUT.
- if self._cleanup_required(new_mismatch, self.DEVICE_IMAGES):
- self._restore_device_files()
+ if (self._cleanup_required(new_mismatch, self.DEVICE_IMAGES) and
+ filesystem_util.is_rootfs_writable(self.host)):
+ # Copy the .prod file onto the DUT.
+ if prod_path and 'prod_version' in new_mismatch:
+ cr50_utils.InstallImage(self.host, prod_path,
+ cr50_utils.CR50_PROD)
+ # Copy the .prepvt file onto the DUT.
+ if prepvt_path and 'prepvt_version' in new_mismatch:
+ cr50_utils.InstallImage(self.host, prepvt_path,
+ cr50_utils.CR50_PREPVT)
- mismatch_last = self._check_original_image_state()
- if mismatch_last:
- raise error.TestError('Could not restore state: %s' %
- mismatch_last)
+ final_mismatch = self._check_running_image_and_board_id(state)
+ if final_mismatch:
+ raise error.TestError('Could not update cr50 image state: %s' %
+ final_mismatch)
+ logging.info('Successfully updated all device cr50 firmware state.')
- logging.info('Successfully restored the original cr50 state')
- if self._raise_error_on_mismatch:
+
+ def _restore_device_images_and_running_cr50_firmware(self):
+ """Restore the images on the device and the running cr50 image."""
+ if self._provision_update:
+ return
+ mismatch = self._check_original_image_state()
+ self._update_device_images_and_running_cr50_firmware(
+ self._original_image_state, self.get_saved_cr50_original_path(),
+ self._device_prod_image, self._device_prepvt_image)
+
+ if self._raise_error_on_mismatch and mismatch:
raise error.TestError('Unexpected state mismatch during '
- 'cleanup %s' % state_mismatch)
+ 'cleanup %s' % mismatch)
def _restore_ccd_settings(self):
@@ -628,7 +730,7 @@
dev signed images completely clears the CCD state.
"""
try:
- self._restore_device_images_and_running_cr50_image()
+ self._restore_device_images_and_running_cr50_firmware()
except Exception as e:
logging.warning('Issue restoring Cr50 image: %s', str(e))
raise
diff --git a/server/cros/faft/fingerprint_test.py b/server/cros/faft/fingerprint_test.py
index da45c16..cecb7b8 100644
--- a/server/cros/faft/fingerprint_test.py
+++ b/server/cros/faft/fingerprint_test.py
@@ -107,6 +107,12 @@
_FIRMWARE_VERSION_RW_VERSION: 'nocturne_fp_v2.2.191-1d529566e',
_FIRMWARE_VERSION_KEY_ID: '6f38c866182bd9bf7a4462c06ac04fa6a0074351',
},
+ 'nocturne_fp_v2.0.3266-99b5e2c98.bin': {
+ _FIRMWARE_VERSION_SHA256SUM: '73d822071518cf1b6e705d9c5903c2bcf37bae536784b275b96d916c44d3b6b7',
+ _FIRMWARE_VERSION_RO_VERSION: 'nocturne_fp_v2.2.64-58cf5974e',
+ _FIRMWARE_VERSION_RW_VERSION: 'nocturne_fp_v2.0.3266-99b5e2c98',
+ _FIRMWARE_VERSION_KEY_ID: '6f38c866182bd9bf7a4462c06ac04fa6a0074351',
+ },
},
_FP_BOARD_NAME_NAMI: {
'nami_fp_v2.2.144-7a08e07eb.bin': {
@@ -121,6 +127,12 @@
_FIRMWARE_VERSION_RW_VERSION: 'nami_fp_v2.2.191-1d529566e',
_FIRMWARE_VERSION_KEY_ID: '35486c0090ca390408f1fbbf2a182966084fe2f8',
},
+ 'nami_fp_v2.0.3266-99b5e2c98.bin': {
+ _FIRMWARE_VERSION_SHA256SUM: '115bca7045428ce6639b41cc0fdc13d1ca414f6e76842e805a9fbb798a9cd7ad',
+ _FIRMWARE_VERSION_RO_VERSION: 'nami_fp_v2.2.144-7a08e07eb',
+ _FIRMWARE_VERSION_RW_VERSION: 'nami_fp_v2.0.3266-99b5e2c98',
+ _FIRMWARE_VERSION_KEY_ID: '35486c0090ca390408f1fbbf2a182966084fe2f8',
+ },
},
_FP_BOARD_NAME_DARTMONKEY: {
'dartmonkey_v2.0.2887-311310808.bin': {
@@ -128,6 +140,12 @@
_FIRMWARE_VERSION_RO_VERSION: 'dartmonkey_v2.0.2887-311310808',
_FIRMWARE_VERSION_RW_VERSION: 'dartmonkey_v2.0.2887-311310808',
_FIRMWARE_VERSION_KEY_ID: '257a0aa3ac9e81aa4bc3aabdb6d3d079117c5799',
+ },
+ 'dartmonkey_v2.0.3266-99b5e2c98.bin': {
+ _FIRMWARE_VERSION_SHA256SUM: 'ac1c74b5d2676923f041ee1a27bf5b9892fab1d4f82fe924550a9b55917606ae',
+ _FIRMWARE_VERSION_RO_VERSION: 'dartmonkey_v2.0.2887-311310808',
+ _FIRMWARE_VERSION_RW_VERSION: 'dartmonkey_v2.0.3266-99b5e2c98',
+ _FIRMWARE_VERSION_KEY_ID: '257a0aa3ac9e81aa4bc3aabdb6d3d079117c5799',
}
}
}
diff --git a/server/cros/network/attenuator_hosts.py b/server/cros/network/attenuator_hosts.py
index cf70d54..7d1cdf5 100644
--- a/server/cros/network/attenuator_hosts.py
+++ b/server/cros/network/attenuator_hosts.py
@@ -13,6 +13,36 @@
1: {2437: 56, 5220: 56, 5765: 56},
2: {2437: 53, 5220: 59, 5765: 59},
3: {2437: 57, 5220: 56, 5765: 56}},
+ 'chromeos1-dev-host15-attenuator': {
+ 0: {2437: 52, 5220: 57, 5765: 57},
+ 1: {2437: 56, 5220: 55, 5765: 59},
+ 2: {2437: 52, 5220: 57, 5765: 57},
+ 3: {2437: 56, 5220: 55, 5765: 60}},
+ 'chromeos1-dev-host16-attenuator': {
+ 0: {2437: 52, 5220: 58, 5765: 62},
+ 1: {2437: 55, 5220: 55, 5765: 56},
+ 2: {2437: 52, 5220: 58, 5765: 61},
+ 3: {2437: 55, 5220: 55, 5765: 56}},
+ 'chromeos1-dev-host17-attenuator': {
+ 0: {2437: 52, 5220: 58, 5765: 57},
+ 1: {2437: 56, 5220: 55, 5765: 59},
+ 2: {2437: 52, 5220: 57, 5765: 58},
+ 3: {2437: 56, 5220: 55, 5765: 59}},
+ 'chromeos1-dev-host18-attenuator': {
+ 0: {2437: 52, 5220: 57, 5765: 57},
+ 1: {2437: 56, 5220: 56, 5765: 59},
+ 2: {2437: 52, 5220: 57, 5765: 57},
+ 3: {2437: 56, 5220: 56, 5765: 59}},
+ 'chromeos1-dev-host19-attenuator': {
+ 0: {2437: 52, 5220: 57, 5765: 60},
+ 1: {2437: 55, 5220: 54, 5765: 56},
+ 2: {2437: 52, 5220: 57, 5765: 60},
+ 3: {2437: 55, 5220: 54, 5765: 55}},
+ 'chromeos1-dev-host20-attenuator': {
+ 0: {2437: 53, 5220: 57, 5765: 62},
+ 1: {2437: 57, 5220: 55, 5765: 55},
+ 2: {2437: 53, 5220: 57, 5765: 61},
+ 3: {2437: 57, 5220: 55, 5765: 55}},
'chromeos1-test-host2-attenuator': {
0: {2437: 53, 5220: 59, 5765: 58},
1: {2437: 57, 5220: 57, 5765: 59},
diff --git a/server/cros/network/hostap_config.py b/server/cros/network/hostap_config.py
index a27314b..9294493 100644
--- a/server/cros/network/hostap_config.py
+++ b/server/cros/network/hostap_config.py
@@ -391,14 +391,19 @@
@property
def _ht_mode(self):
- """@return string one of (None, HT20, HT40+, HT40-)"""
- if not self._is_11n:
- return None
- if self._ht40_plus_allowed:
- return self.HT_NAMES[self.HT_CHANNEL_WIDTH_40_PLUS]
- if self._ht40_minus_allowed:
- return self.HT_NAMES[self.HT_CHANNEL_WIDTH_40_MINUS]
- return self.HT_NAMES[self.HT_CHANNEL_WIDTH_20]
+ """
+ @return object one of ( None,
+ HT_CHANNEL_WIDTH_40_PLUS,
+ HT_CHANNEL_WIDTH_40_MINUS,
+ HT_CHANNEL_WIDTH_20)
+ """
+ if self._is_11n or self.is_11ac:
+ if self._ht40_plus_allowed:
+ return self.HT_CHANNEL_WIDTH_40_PLUS
+ if self._ht40_minus_allowed:
+ return self.HT_CHANNEL_WIDTH_40_MINUS
+ return self.HT_CHANNEL_WIDTH_20
+ return None
@property
@@ -425,6 +430,8 @@
return packet_capturer.WIDTH_HT40_PLUS
if ht_mode == self.HT_CHANNEL_WIDTH_40_MINUS:
return packet_capturer.WIDTH_HT40_MINUS
+ if ht_mode == self.HT_CHANNEL_WIDTH_20:
+ return packet_capturer.WIDTH_HT20
if self.vht_channel_width == self.VHT_CHANNEL_WIDTH_80:
return packet_capturer.WIDTH_VHT80
@@ -453,7 +460,7 @@
ht_mode = self._ht_mode
if ht_mode:
- return ht_mode
+ return self.HT_NAMES[ht_mode]
return '11' + self._hw_mode.upper()
diff --git a/server/cros/servo/servo.py b/server/cros/servo/servo.py
index 36819db..07d7ee6 100644
--- a/server/cros/servo/servo.py
+++ b/server/cros/servo/servo.py
@@ -17,6 +17,7 @@
from autotest_lib.client.common_lib import lsbrelease_utils
from autotest_lib.server import utils as server_utils
from autotest_lib.server.cros.servo import firmware_programmer
+from autotest_lib.server.cros.faft.utils.config import Config as FAFTConfig
# Time to wait when probing for a usb device, it takes on avg 17 seconds
# to do a full probe.
@@ -52,13 +53,19 @@
if not os.path.exists(dest_dir):
os.mkdir(dest_dir)
- # Try to extract image candidates from tarball
+ # Generate a list of all tarball files
+ tarball_files = server_utils.system_output(
+ ('tar tf %s' % tarball), timeout=120, ignore_status=True).splitlines()
+
+ # Check if image candidates are in the list of tarball files
for image in image_candidates:
- status = server_utils.system(
+ if image in tarball_files:
+ # Extract and return the first image candidate found
+ status = server_utils.system(
('tar xf %s -C %s %s' % (tarball, dest_dir, image)),
- timeout=60, ignore_status=True)
- if status == 0:
- return image
+ timeout=120, ignore_status=True)
+ if status == 0:
+ return image
return None
@@ -1203,6 +1210,13 @@
@return: Path to extracted EC image.
"""
+
+ # Ignore extracting EC image and re-programming if not a Chrome EC
+ chrome_ec = FAFTConfig(board).chrome_ec
+ if not chrome_ec:
+ logging.info('Not a Chrome EC, ignore re-programming it')
+ return None
+
# Best effort; try to retrieve the EC board from the version as
# reported by the EC.
ec_board = None
@@ -1224,12 +1238,12 @@
ec_image = _extract_image_from_tarball(tarball_path, dest_dir,
ec_image_candidates)
- # Check if EC image was found and return path or None
+ # Check if EC image was found and return path or raise error
if ec_image:
return os.path.join(dest_dir, ec_image)
else:
- logging.info('Not a Chrome EC, ignore re-programming it')
- return None
+ raise error.TestError('Failed to extract EC image from %s',
+ tarball_path)
def extract_bios_image(self, board, model, tarball_path):
diff --git a/server/cros/tradefed/generate_controlfiles_GTS.py b/server/cros/tradefed/generate_controlfiles_GTS.py
index 2d6f501..a8fbf7f 100755
--- a/server/cros/tradefed/generate_controlfiles_GTS.py
+++ b/server/cros/tradefed/generate_controlfiles_GTS.py
@@ -112,7 +112,8 @@
# partner moblabs.
CONFIG['PUBLIC_MODULE_RETRY_COUNT'] = {
_ALL: 2,
- 'GtsMediaTestCases': 5 # TODO(b/140841434)
+ 'GtsMediaTestCases': 5, # TODO(b/140841434)
+ 'GtsYouTubeTestCases': 5, # TODO(b/149376356)
}
CONFIG['PUBLIC_OVERRIDE_TEST_PRIORITY'] = {
diff --git a/server/cros/tradefed/tradefed_prerequisite.py b/server/cros/tradefed/tradefed_prerequisite.py
index 0da8c66..5364c55 100644
--- a/server/cros/tradefed/tradefed_prerequisite.py
+++ b/server/cros/tradefed/tradefed_prerequisite.py
@@ -15,7 +15,7 @@
# lines = output.splitlines()
# if len(lines) < 2 or not lines[0].startswith('Devices:'):
# return False, '%s: Bluetooth device is missing.'\
- # 'Stdout of the command "hcitool dev1"'\
+ # 'Stdout of the command "hcitool dev"'\
# 'on host %s was %s' % (_ERROR_PREFIX, host, output)
return True, ''
@@ -24,11 +24,11 @@
"""Check that region is set to "us".
"""
for host in hosts:
- output = host.run('vpd -g region').stdout
- if output != 'us':
- return False, '%s: Region is not "us".'\
- 'Stdout of the command "vpd -l'\
- '| grep region" on host %s was %s'\
+ output = host.run('vpd -g region', ignore_status=True).stdout
+ if output not in ['us', '']:
+ return False, '%s: Region is not "us" or empty. '\
+ 'STDOUT of the command "vpd -l '\
+ 'region" on host %s was %s'\
% (_ERROR_PREFIX, host, output)
return True, ''
diff --git a/server/cros/update_engine/omaha_devserver.py b/server/cros/update_engine/omaha_devserver.py
index 48c3e41..41169c3 100644
--- a/server/cros/update_engine/omaha_devserver.py
+++ b/server/cros/update_engine/omaha_devserver.py
@@ -48,20 +48,17 @@
_DEVSERVER_TIMELIMIT_SECONDS = 12 * 60 * 60
- def __init__(self, omaha_host, payload_location, max_updates=1,
- critical_update=True, moblab=False):
+ def __init__(self, omaha_host, payload_location, critical_update=True,
+ moblab=False):
"""Starts a private devserver instance, operating at Omaha capacity.
@param omaha_host: host address where the devserver is spawned.
@param payload_location: partial path from static dir to payload.
- @param max_updates: int number of updates this devserver will handle.
- This is passed to src/platform/dev/devserver.py.
@param critical_update: Whether to set a deadline in responses.
@param moblab: True if we are running on moblab.
"""
self._critical_update = critical_update
- self._max_updates = max_updates
self._omaha_host = omaha_host
self._devserver_pid = 0
self._devserver_port = 0 # Determined later from devserver portfile.
@@ -224,7 +221,6 @@
'--pidfile=%s' % self._devserver_pidfile,
'--portfile=%s' % self._devserver_portfile,
'--logfile=%s' % self._devserver_logfile,
- '--max_updates=%s' % self._max_updates,
'--host_log',
'--static_dir=%s' % self._devserver_static_dir,
'--payload=%s' % os.path.join(self._devserver_static_dir,
diff --git a/server/cros/update_engine/update_engine_test.py b/server/cros/update_engine/update_engine_test.py
index 8200665..23feef9 100644
--- a/server/cros/update_engine/update_engine_test.py
+++ b/server/cros/update_engine/update_engine_test.py
@@ -651,8 +651,8 @@
def get_update_url_for_test(self, job_repo_url, full_payload=True,
- critical_update=False, max_updates=1,
- public=False, moblab=False):
+ critical_update=False, public=False,
+ moblab=False):
"""
Get the correct update URL for autoupdate tests to use.
@@ -672,9 +672,6 @@
@param job_repo_url: string url containing the current build.
@param full_payload: bool whether we want a full payload.
@param critical_update: bool whether we need a critical update.
- @param max_updates: int number of updates the test will perform. This
- is passed to src/platform/dev/devserver.py if we
- create our own deverver.
@param public: url needs to be publicly accessible.
@param moblab: True if we are running on moblab.
@@ -726,7 +723,7 @@
# We need to start our own devserver instance on the lab devserver
# for the rest of the test scenarios.
self._omaha_devserver = omaha_devserver.OmahaDevserver(
- lab_devserver.hostname, payload_location, max_updates=max_updates,
+ lab_devserver.hostname, payload_location,
critical_update=critical_update, moblab=moblab)
self._omaha_devserver.start_devserver()
diff --git a/server/hosts/base_label.py b/server/hosts/base_label.py
index 3cfe7f6..f95c220 100644
--- a/server/hosts/base_label.py
+++ b/server/hosts/base_label.py
@@ -102,6 +102,25 @@
return prefix_labels, full_labels
+ def update_for_task(self, task_name):
+ """
+ This method helps to check which labels need to be updated.
+ State config labels are updated only for repair task.
+ Lab config labels are updated only for deploy task.
+ All labels are updated for any task.
+
+ It is the responsibility of the subclass to override this method
+ to differentiate itself as a state config label or a lab config label
+ and return the appropriate boolean value.
+
+ If the subclass doesn't override this method then that label will
+ always be updated for any type of task.
+
+ @returns True if labels should be updated for the task with given name
+ """
+ return True
+
+
class StringLabel(BaseLabel):
"""
This class represents a string label that is dynamically generated.
@@ -174,12 +193,14 @@
class LabelRetriever(object):
"""This class will assist in retrieving/updating the host labels."""
- def _populate_known_labels(self, label_list):
+ def _populate_known_labels(self, label_list, task_name):
"""Create a list of known labels that is created through this class."""
for label_instance in label_list:
- prefixed_labels, full_labels = label_instance.get_all_labels()
- self.label_prefix_names.update(prefixed_labels)
- self.label_full_names.update(full_labels)
+ # populate only the labels that need to be updated for this task.
+ if label_instance.update_for_task(task_name):
+ prefixed_labels, full_labels = label_instance.get_all_labels()
+ self.label_prefix_names.update(prefixed_labels)
+ self.label_full_names.update(full_labels)
def __init__(self, label_list):
@@ -207,6 +228,29 @@
return labels
+ def get_labels_for_update(self, host, task_name):
+ """
+ Retrieve the labels for the host which needs to be updated.
+
+ @param host: The host to get the labels for updating.
+ @param task_name: task name(repair/deploy) for the operation.
+
+ @returns labels to be updated
+ """
+ labels = []
+ for label in self._labels:
+ try:
+ # get only the labels which need to be updated for this task.
+ if label.update_for_task(task_name):
+ logging.info('checking label update %s',
+ label.__class__.__name__)
+ labels.extend(label.get(host))
+ except Exception:
+ logging.exception('error getting label %s.',
+ label.__class__.__name__)
+ return labels
+
+
def _is_known_label(self, label):
"""
Checks if the label is a label known to the label detection framework.
@@ -253,7 +297,7 @@
host.host_info_store.commit(new_info)
- def update_labels(self, host, keep_pool=False):
+ def update_labels(self, host, task_name='', keep_pool=False):
"""
Retrieve the labels from the host and update if needed.
@@ -261,12 +305,12 @@
"""
# If we haven't yet grabbed our list of known labels, do so now.
if not self.label_full_names and not self.label_prefix_names:
- self._populate_known_labels(self._labels)
+ self._populate_known_labels(self._labels, task_name)
# Label detection hits the DUT so it can be slow. Do it before reading
# old labels from HostInfoStore to minimize the time between read and
# commit of the HostInfo.
- new_labels = self.get_labels(host)
+ new_labels = self.get_labels_for_update(host, task_name)
old_info = host.host_info_store.get()
self._carry_over_unknown_labels(old_info.labels, new_labels)
new_info = host_info.HostInfo(
diff --git a/server/hosts/base_label_unittest.py b/server/hosts/base_label_unittest.py
index 975b792..2a6db21 100755
--- a/server/hosts/base_label_unittest.py
+++ b/server/hosts/base_label_unittest.py
@@ -3,7 +3,6 @@
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
-import mock
import unittest
import common
@@ -108,6 +107,9 @@
self.assertEqual(prefix_tbl, set())
self.assertEqual(prefix_tbls, set())
+ def test_update_for_task(self):
+ self.assertTrue(self.test_base_label.update_for_task(''))
+
class StringPrefixLabelUnittests(unittest.TestCase):
"""Unittest for testing base_label.StringPrefixLabel."""
@@ -137,7 +139,7 @@
def setUp(self):
label_list = [TestStringPrefixLabel(), TestBaseLabel()]
self.retriever = base_label.LabelRetriever(label_list)
- self.retriever._populate_known_labels(label_list)
+ self.retriever._populate_known_labels(label_list, '')
def test_populate_known_labels(self):
diff --git a/server/hosts/cros_host.py b/server/hosts/cros_host.py
index 543453b..83d348c 100644
--- a/server/hosts/cros_host.py
+++ b/server/hosts/cros_host.py
@@ -662,6 +662,15 @@
@staticmethod
def get_version_from_image(image, version_regex):
+ """Get version string from binary image using regular expression.
+
+ @param image: Binary image to search
+ @param version_regex: Regular expression to search for
+
+ @return Version string
+
+ @raises TestFail if no version string is found in image
+ """
with open(image, 'rb') as f:
image_data = f.read()
match = re.findall(version_regex, image_data)
@@ -805,9 +814,10 @@
if ec_image:
logging.info('Checking EC firmware version.')
dest_ec_version = self.get_ec_version()
- ec_regex = self._EC_REGEX % model
+ ec_version_prefix = dest_ec_version.split('_', 1)[0]
+ ec_regex = self._EC_REGEX % ec_version_prefix
image_ec_version = self.get_version_from_image(ec_image,
- ec_regex)
+ ec_regex)
if dest_ec_version != image_ec_version:
raise error.TestFail(
'Failed to update EC RO, version %s (expected %s)' %
@@ -903,6 +913,7 @@
self._servo_host = host
if self._servo_host is not None:
self.servo = self._servo_host.get_servo()
+ self._update_servo_labels()
else:
self.servo = None
@@ -922,8 +933,23 @@
if not self._servo_host:
raise error.AutoservError('No servo host for %s.' %
self.hostname)
- self._servo_host.repair()
- self.servo = self._servo_host.get_servo()
+ try:
+ self._servo_host.repair()
+ except:
+ raise
+ finally:
+ self.set_servo_host(self._servo_host)
+
+
+ def _update_servo_labels(self):
+ """Set servo info labels to dut host_info"""
+ if self._servo_host:
+ host_info = self.host_info_store.get()
+
+ servo_state = self._servo_host.get_servo_state()
+ host_info.set_version_label(servo_host.SERVO_STATE_LABEL_PREFIX, servo_state)
+
+ self.host_info_store.commit(host_info)
def repair(self):
diff --git a/server/hosts/cros_host_unittest.py b/server/hosts/cros_host_unittest.py
index 26b85b4..9dd4bc9 100755
--- a/server/hosts/cros_host_unittest.py
+++ b/server/hosts/cros_host_unittest.py
@@ -1,12 +1,14 @@
#!/usr/bin/python2
# pylint: disable=missing-docstring
+import mock
import unittest
import common
from autotest_lib.server.hosts import cros_host
from autotest_lib.server.hosts import servo_host
+from autotest_lib.server.hosts import host_info
CROSSYSTEM_RESULT = '''
fwb_tries = 0 # Fake comment
@@ -26,6 +28,9 @@
CHROMEOS_RELEASE_UNIBUILD=1
'''
+SERVO_STATE_PREFIX = servo_host.SERVO_STATE_LABEL_PREFIX
+
+
class MockCmd(object):
"""Simple mock command with base command and results"""
@@ -104,6 +109,71 @@
self.assertEqual(got, {servo_host.SERVO_HOST_ATTR: 'host'})
+class DictFilteringTestCase(unittest.TestCase):
+ """Test to verify servo_state was set-up as label in host_info_store"""
+
+ def create_host(self):
+ host = MockHost()
+ host.servo = None
+ host._servo_host = mock.Mock()
+ host._servo_host.get_servo.return_value = 'Not Empty'
+ host._servo_host.get_servo_state.return_value = 'SOME_STATE'
+ host.host_info_store = host_info.InMemoryHostInfoStore()
+ self.assertEqual(host.host_info_store.get().get_label_value(SERVO_STATE_PREFIX), '')
+ return host
+
+ def test_do_not_update_label_when_servo_host_is_not_inited(self):
+ host = self.create_host()
+ host._servo_host = None
+
+ host._update_servo_labels()
+ self.assertEqual(host.host_info_store.get().get_label_value(SERVO_STATE_PREFIX), '')
+
+ def test_do_not_update_label_when_servo_host_is_inited(self):
+ host = self.create_host()
+
+ host._update_servo_labels()
+ host._servo_host.get_servo_state.assert_called()
+ self.assertEqual(host.host_info_store.get().get_label_value(SERVO_STATE_PREFIX), 'SOME_STATE')
+
+ def test_repair_servo__update_servo_labels_after_repair_when_repair_is_fail(self):
+ host = self.create_host()
+ host._servo_host.repair.side_effect = Exception('Something bad')
+
+ try:
+ host.repair_servo()
+ self.assertEqual("Exception is", 'expecting to raise')
+ except:
+ pass
+ host._servo_host.get_servo_state.assert_called()
+ self.assertEqual(host.host_info_store.get().get_label_value(SERVO_STATE_PREFIX), 'SOME_STATE')
+
+ def test_repair_servo__update_servo_labels_after_repair_when_repair_is_not_fail(self):
+ host = self.create_host()
+ try:
+ host.repair_servo()
+ except:
+ self.assertEqual("Exception is not", 'expected')
+ pass
+ host._servo_host.get_servo_state.assert_called()
+ self.assertEqual(host.host_info_store.get().get_label_value(SERVO_STATE_PREFIX), 'SOME_STATE')
+
+ def test_set_servo_host_update_servo_state_when_host_exist(self):
+ host = self.create_host()
+ host._servo_host = mock.Mock()
+ host._servo_host.get_servo.return_value = 'Not Empty'
+ host._servo_host.get_servo_state.return_value = 'SOME_STATE'
+ self.assertEqual(host.host_info_store.get().get_label_value(SERVO_STATE_PREFIX), '')
+
+ try:
+ host.repair_servo()
+ except:
+ self.assertEqual("Exception is not", 'expected')
+ pass
+ host._servo_host.get_servo_state.assert_called()
+ self.assertEqual(host.host_info_store.get().get_label_value(SERVO_STATE_PREFIX), 'SOME_STATE')
+
+
if __name__ == "__main__":
unittest.main()
diff --git a/server/hosts/cros_label.py b/server/hosts/cros_label.py
index 7cf31a2..17038db 100644
--- a/server/hosts/cros_label.py
+++ b/server/hosts/cros_label.py
@@ -27,6 +27,10 @@
# fallback values if we can't contact the HWID server
HWID_LABELS_FALLBACK = ['sku', 'phase', 'touchscreen', 'touchpad', 'variant', 'stylus']
+# Repair and Deploy taskName
+REPAIR_TASK_NAME = 'repair'
+DEPLOY_TASK_NAME = 'deploy'
+
def _parse_lsb_output(host):
"""Parses the LSB output and returns key data points for labeling.
@@ -116,6 +120,11 @@
return []
+ def update_for_task(self, task_name):
+ # This label is stored in the lab config, so only deploy tasks update it
+ # or when no task name is mentioned.
+ return task_name in (DEPLOY_TASK_NAME, '')
+
class BrandCodeLabel(base_label.StringPrefixLabel):
"""Determine the correct brand_code (aka RLZ-code) for the device."""
@@ -305,6 +314,10 @@
pass
return has_chameleon
+ def update_for_task(self, task_name):
+ # This label is stored in the state config, so only repair tasks update
+ # it or when no task name is mentioned.
+ return task_name in (REPAIR_TASK_NAME, '')
class ChameleonConnectionLabel(base_label.StringPrefixLabel):
@@ -319,6 +332,11 @@
def generate_labels(self, host):
return [chameleon.get_label() for chameleon in host.chameleon_list]
+ def update_for_task(self, task_name):
+ # This label is stored in the lab config, so only deploy tasks update it
+ # or when no task name is mentioned.
+ return task_name in (DEPLOY_TASK_NAME, '')
+
class ChameleonPeripheralsLabel(base_label.StringPrefixLabel):
"""Return the Chameleon peripherals labels.
@@ -386,7 +404,10 @@
logging.info('Bluetooth labels are %s', labels_list)
return labels_list
-
+ def update_for_task(self, task_name):
+ # This label is stored in the lab config, so only deploy tasks update it
+ # or when no task name is mentioned.
+ return task_name in (DEPLOY_TASK_NAME, '')
class AudioLoopbackDongleLabel(base_label.BaseLabel):
@@ -425,6 +446,11 @@
return True
return False
+ def update_for_task(self, task_name):
+ # This label is stored in the state config, so only repair tasks update
+ # it or when no task name is mentioned.
+ return task_name in (REPAIR_TASK_NAME, '')
+
class PowerSupplyLabel(base_label.StringPrefixLabel):
"""
@@ -611,6 +637,11 @@
return (servo_host_hostname is not None
and servo_host.servo_host_is_up(servo_host_hostname))
+ def update_for_task(self, task_name):
+ # This label is stored in the state config, so only repair tasks update
+ # it or when no task name is mentioned.
+ return task_name in (REPAIR_TASK_NAME, '')
+
class ArcLabel(base_label.BaseLabel):
"""Label indicates if host has ARC support."""
@@ -878,40 +909,16 @@
CROS_LABELS = [
- AccelsLabel(),
- ArcLabel(),
- AudioLoopbackDongleLabel(),
- BluetoothLabel(),
- BoardLabel(),
- ModelLabel(),
- ChameleonConnectionLabel(),
- ChameleonLabel(),
- ChameleonPeripheralsLabel(),
+ AudioLoopbackDongleLabel(), #STATECONFIG
+ ChameleonConnectionLabel(), #LABCONFIG
+ ChameleonLabel(), #STATECONFIG
+ ChameleonPeripheralsLabel(), #LABCONFIG
common_label.OSLabel(),
- Cr50Label(),
- Cr50ROKeyidLabel(),
- Cr50RWKeyidLabel(),
- Cr50ROVersionLabel(),
- Cr50RWVersionLabel(),
- CtsArchLabel(),
- DetachableBaseLabel(),
- DeviceSkuLabel(),
- BrandCodeLabel(),
- ECLabel(),
- FingerprintLabel(),
+ DeviceSkuLabel(), #LABCONFIG
HWIDLabel(),
- InternalDisplayLabel(),
- LucidSleepLabel(),
- PowerSupplyLabel(),
- ReferenceDesignLabel(),
- ServoLabel(),
- StorageLabel(),
- VideoGlitchLabel(),
+ ServoLabel(), #STATECONFIG
]
LABSTATION_LABELS = [
- BoardLabel(),
- ModelLabel(),
common_label.OSLabel(),
- PowerSupplyLabel(),
]
diff --git a/server/hosts/cros_label_unittest.py b/server/hosts/cros_label_unittest.py
index f837e3e..1cd9c99 100755
--- a/server/hosts/cros_label_unittest.py
+++ b/server/hosts/cros_label_unittest.py
@@ -20,6 +20,10 @@
from autotest_lib.server.hosts.cros_label import Cr50RWVersionLabel
from autotest_lib.server.hosts.cros_label import DeviceSkuLabel
from autotest_lib.server.hosts.cros_label import ModelLabel
+from autotest_lib.server.hosts.cros_label import AudioLoopbackDongleLabel
+from autotest_lib.server.hosts.cros_label import ChameleonConnectionLabel
+from autotest_lib.server.hosts.cros_label import ChameleonLabel
+from autotest_lib.server.hosts.cros_label import ChameleonPeripheralsLabel
from autotest_lib.server.hosts.cros_label import ServoLabel
from autotest_lib.server.hosts import host_info
@@ -239,6 +243,11 @@
host = MockHost(['device-sku:48'])
self.assertEqual(DeviceSkuLabel().generate_labels(host), ['48'])
+ def test_update_for_task(self):
+ self.assertTrue(DeviceSkuLabel().update_for_task(''))
+ self.assertFalse(DeviceSkuLabel().update_for_task('repair'))
+ self.assertTrue(DeviceSkuLabel().update_for_task('deploy'))
+
class BrandCodeLabelTests(unittest.TestCase):
"""Unit tests for DeviceSkuLabel"""
@@ -440,6 +449,39 @@
self.assertEqual(prefix_labels, set(['servo_state']))
self.assertEqual(full_labels, set(['servo']))
+ def test_update_for_task(self):
+ self.assertTrue(ServoLabel().update_for_task(''))
+ self.assertTrue(ServoLabel().update_for_task('repair'))
+ self.assertFalse(ServoLabel().update_for_task('deploy'))
+
+
+class AudioLoopbackDongleLabelTests(unittest.TestCase):
+ def test_update_for_task(self):
+ self.assertTrue(AudioLoopbackDongleLabel().update_for_task(''))
+ self.assertTrue(AudioLoopbackDongleLabel().update_for_task('repair'))
+ self.assertFalse(AudioLoopbackDongleLabel().update_for_task('deploy'))
+
+
+class ChameleonConnectionLabelTests(unittest.TestCase):
+ def test_update_for_task(self):
+ self.assertTrue(ChameleonConnectionLabel().update_for_task(''))
+ self.assertFalse(ChameleonConnectionLabel().update_for_task('repair'))
+ self.assertTrue(ChameleonConnectionLabel().update_for_task('deploy'))
+
+
+class ChameleonLabelTests(unittest.TestCase):
+ def test_update_for_task(self):
+ self.assertTrue(ChameleonLabel().update_for_task(''))
+ self.assertTrue(ChameleonLabel().update_for_task('repair'))
+ self.assertFalse(ChameleonLabel().update_for_task('deploy'))
+
+
+class ChameleonPeripheralsLabelTests(unittest.TestCase):
+ def test_update_for_task(self):
+ self.assertTrue(ChameleonPeripheralsLabel().update_for_task(''))
+ self.assertFalse(ChameleonPeripheralsLabel().update_for_task('repair'))
+ self.assertTrue(ChameleonPeripheralsLabel().update_for_task('deploy'))
+
if __name__ == '__main__':
unittest.main()
diff --git a/server/hosts/servo_host.py b/server/hosts/servo_host.py
index 87c0eb4..f479215 100644
--- a/server/hosts/servo_host.py
+++ b/server/hosts/servo_host.py
@@ -54,6 +54,10 @@
'SCHEDULER', 'drone_installation_directory',
default='/usr/local/autotest')
+SERVO_STATE_LABEL_PREFIX = 'servo_state'
+SERVO_STATE_WORKING = 'WORKING'
+SERVO_STATE_BROKEN = 'BROKEN'
+
class ServoHost(base_servohost.BaseServoHost):
"""Host class for a servo host(e.g. beaglebone, labstation)
@@ -70,6 +74,15 @@
# Ready test function
SERVO_READY_METHOD = 'get_version'
+ def _init_attributes(self):
+ self._servo_state = None
+ self.servo_port = None
+ self.servo_board = None
+ self.servo_model = None
+ self.servo_serial = None
+ self._servo = None
+ self._servod_server_proxy = None
+
def _initialize(self, servo_host='localhost',
servo_port=DEFAULT_PORT, servo_board=None,
@@ -93,12 +106,11 @@
"""
super(ServoHost, self)._initialize(hostname=servo_host,
is_in_lab=is_in_lab, *args, **dargs)
+ self._init_attributes()
self.servo_port = int(servo_port)
self.servo_board = servo_board
self.servo_model = servo_model
self.servo_serial = servo_serial
- self._servo = None
- self._servod_server_proxy = None
# Path of the servo host lock file.
self._lock_file = (self.TEMP_FILE_DIR + str(self.servo_port)
@@ -194,7 +206,11 @@
self.record('INFO', None, None, message)
try:
self._repair_strategy.verify(self, silent)
+ self._servo_state = SERVO_STATE_WORKING
+ self.record('INFO', None, None, 'ServoHost verify set servo_state as WORKING')
except:
+ self._servo_state = SERVO_STATE_BROKEN
+ self.record('INFO', None, None, 'ServoHost verify set servo_state as BROKEN')
self.disconnect_servo()
self.stop_servod()
raise
@@ -210,11 +226,15 @@
self.record('INFO', None, None, message)
try:
self._repair_strategy.repair(self, silent)
+ self._servo_state = SERVO_STATE_WORKING
+ self.record('INFO', None, None, 'ServoHost repair set servo_state as WORKING')
# If target is a labstation then try to withdraw any existing
# reboot request created by this servo because it passed repair.
if self.is_labstation():
self.withdraw_reboot_request()
except:
+ self._servo_state = SERVO_STATE_BROKEN
+ self.record('INFO', None, None, 'ServoHost repair set servo_state as BROKEN')
self.disconnect_servo()
self.stop_servod()
raise
@@ -356,6 +376,10 @@
super(ServoHost, self).close()
+ def get_servo_state(self):
+ return SERVO_STATE_BROKEN if self._servo_state is None else self._servo_state
+
+
def make_servo_hostname(dut_hostname):
"""Given a DUT's hostname, return the hostname of its servo.
diff --git a/server/hosts/servo_host_unittest.py b/server/hosts/servo_host_unittest.py
new file mode 100644
index 0000000..fa38ef3
--- /dev/null
+++ b/server/hosts/servo_host_unittest.py
@@ -0,0 +1,83 @@
+import mock
+import unittest
+
+import common
+
+from autotest_lib.server.hosts import servo_host
+
+
+class MockCmd(object):
+ """Simple mock command with base command and results"""
+
+ def __init__(self, cmd, exit_status, stdout):
+ self.cmd = cmd
+ self.stdout = stdout
+ self.exit_status = exit_status
+
+
+class MockHost(servo_host.ServoHost):
+ """Simple host for running mock'd host commands"""
+
+ def __init__(self, *args):
+ self._mock_cmds = {c.cmd: c for c in args}
+ self._init_attributes()
+ self.hostname = "some_hostname"
+
+ def run(self, command, **kwargs):
+ """Finds the matching result by command value"""
+ mock_cmd = self._mock_cmds[command]
+ file_out = kwargs.get('stdout_tee', None)
+ if file_out:
+ file_out.write(mock_cmd.stdout)
+ return mock_cmd
+
+
+class ServoHostServoStateTestCase(unittest.TestCase):
+ """Tests to verify changing the servo_state"""
+ def test_return_broken_if_state_not_defined(self):
+ host = MockHost()
+ self.assertIsNotNone(host)
+ self.assertIsNone(host._servo_state)
+ self.assertIsNotNone(host.get_servo_state())
+ self.assertEqual(host.get_servo_state(), servo_host.SERVO_STATE_BROKEN)
+
+ def test_verify_set_state_broken_if_raised_error(self):
+ host = MockHost()
+ host._is_localhost = True
+ host._repair_strategy = mock.Mock()
+ host._repair_strategy.verify.side_effect = Exception('something_ex')
+ try:
+ host.verify(silent=True)
+ self.assertEqual("Should not be reached", 'expecting error')
+ except:
+ pass
+ self.assertEqual(host.get_servo_state(), servo_host.SERVO_STATE_BROKEN)
+
+ def test_verify_set_state_working_if_no_raised_error(self):
+ host = MockHost()
+ host._repair_strategy = mock.Mock()
+ host.verify(silent=True)
+ self.assertEqual(host.get_servo_state(), servo_host.SERVO_STATE_WORKING)
+
+ def test_repair_set_state_broken_if_raised_error(self):
+ host = MockHost()
+ host._is_localhost = True
+ host._repair_strategy = mock.Mock()
+ host._repair_strategy.repair.side_effect = Exception('something_ex')
+ try:
+ host.repair(silent=True)
+ self.assertEqual("Should not be reached", 'expecting error')
+ except:
+ pass
+ self.assertEqual(host.get_servo_state(), servo_host.SERVO_STATE_BROKEN)
+
+ def test_repair_set_state_working_if_no_raised_error(self):
+ host = MockHost()
+ host._is_labstation = False
+ host._repair_strategy = mock.Mock()
+ host.repair(silent=True)
+ self.assertEqual(host.get_servo_state(), servo_host.SERVO_STATE_WORKING)
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/server/site_tests/autoupdate_ForcedOOBEUpdate/autoupdate_ForcedOOBEUpdate.py b/server/site_tests/autoupdate_ForcedOOBEUpdate/autoupdate_ForcedOOBEUpdate.py
index 43cb028..9c0013c 100644
--- a/server/site_tests/autoupdate_ForcedOOBEUpdate/autoupdate_ForcedOOBEUpdate.py
+++ b/server/site_tests/autoupdate_ForcedOOBEUpdate/autoupdate_ForcedOOBEUpdate.py
@@ -59,8 +59,7 @@
def run_once(self, full_payload=True, cellular=False,
- interrupt=None, max_updates=1, job_repo_url=None,
- moblab=False):
+ interrupt=None, job_repo_url=None, moblab=False):
"""
Runs a forced autoupdate during ChromeOS OOBE.
@@ -68,8 +67,6 @@
@param cellular: True to do the update over a cellualar connection.
Requires that the DUT have a sim card slot.
@param interrupt: Type of interrupt to try: [reboot, network, suspend]
- @param max_updates: Used to tell the test how many times it is
- expected to ping its omaha server.
@param job_repo_url: Used for debugging locally. This is used to figure
out the current build and the devserver to use.
The test will read this from a host argument
@@ -87,7 +84,6 @@
full_payload=full_payload,
critical_update=True,
public=cellular,
- max_updates=max_updates,
moblab=moblab)
before = self._get_chromeos_version()
payload_info = None
@@ -125,6 +121,10 @@
raise error.TestFail('The update did not continue where it '
'left off after interruption.')
+ # We create a new lsb-release file with no_update=True so there won't be
+ # any more actual updates happen.
+ self._create_custom_lsb_release(update_url, no_update=True)
+
self._wait_for_oobe_update_to_complete()
if cellular:
diff --git a/server/site_tests/autoupdate_ForcedOOBEUpdate/control.interrupt.network.full b/server/site_tests/autoupdate_ForcedOOBEUpdate/control.interrupt.network.full
index ecdac55..1b39c6e 100644
--- a/server/site_tests/autoupdate_ForcedOOBEUpdate/control.interrupt.network.full
+++ b/server/site_tests/autoupdate_ForcedOOBEUpdate/control.interrupt.network.full
@@ -27,6 +27,6 @@
def run(machine):
host = hosts.create_host(machine)
job.run_test('autoupdate_ForcedOOBEUpdate', host=host, full_payload=True,
- interrupt='network', max_updates=1, **args_dict)
+ interrupt='network', **args_dict)
job.parallel_simple(run, machines)
diff --git a/server/site_tests/autoupdate_ForcedOOBEUpdate/control.interrupt.network.full.moblab b/server/site_tests/autoupdate_ForcedOOBEUpdate/control.interrupt.network.full.moblab
index 23219c4..708ff6a 100644
--- a/server/site_tests/autoupdate_ForcedOOBEUpdate/control.interrupt.network.full.moblab
+++ b/server/site_tests/autoupdate_ForcedOOBEUpdate/control.interrupt.network.full.moblab
@@ -27,6 +27,6 @@
def run(machine):
host = hosts.create_host(machine)
job.run_test('autoupdate_ForcedOOBEUpdate', host=host, full_payload=True,
- interrupt='network', max_updates=1, moblab=True, **args_dict)
+ interrupt='network', moblab=True, **args_dict)
job.parallel_simple(run, machines)
diff --git a/server/site_tests/autoupdate_ForcedOOBEUpdate/control.interrupt.reboot.full b/server/site_tests/autoupdate_ForcedOOBEUpdate/control.interrupt.reboot.full
index 77eb5f5..52a56a9 100644
--- a/server/site_tests/autoupdate_ForcedOOBEUpdate/control.interrupt.reboot.full
+++ b/server/site_tests/autoupdate_ForcedOOBEUpdate/control.interrupt.reboot.full
@@ -24,6 +24,6 @@
def run(machine):
host = hosts.create_host(machine)
job.run_test('autoupdate_ForcedOOBEUpdate', host=host, full_payload=True,
- interrupt='reboot', max_updates=2, **args_dict)
+ interrupt='reboot', **args_dict)
job.parallel_simple(run, machines)
diff --git a/server/site_tests/autoupdate_ForcedOOBEUpdate/control.interrupt.reboot.full.moblab b/server/site_tests/autoupdate_ForcedOOBEUpdate/control.interrupt.reboot.full.moblab
index 675642b..b39e37f 100644
--- a/server/site_tests/autoupdate_ForcedOOBEUpdate/control.interrupt.reboot.full.moblab
+++ b/server/site_tests/autoupdate_ForcedOOBEUpdate/control.interrupt.reboot.full.moblab
@@ -24,6 +24,6 @@
def run(machine):
host = hosts.create_host(machine)
job.run_test('autoupdate_ForcedOOBEUpdate', host=host, full_payload=True,
- interrupt='reboot', max_updates=2, moblab=True, **args_dict)
+ interrupt='reboot', moblab=True, **args_dict)
job.parallel_simple(run, machines)
diff --git a/server/site_tests/autoupdate_ForcedOOBEUpdate/control.interrupt.suspend.full b/server/site_tests/autoupdate_ForcedOOBEUpdate/control.interrupt.suspend.full
index 4a8c750..617fbcc 100644
--- a/server/site_tests/autoupdate_ForcedOOBEUpdate/control.interrupt.suspend.full
+++ b/server/site_tests/autoupdate_ForcedOOBEUpdate/control.interrupt.suspend.full
@@ -24,6 +24,6 @@
def run(machine):
host = hosts.create_host(machine)
job.run_test('autoupdate_ForcedOOBEUpdate', host=host, full_payload=True,
- interrupt='suspend', max_updates=1, **args_dict)
+ interrupt='suspend', **args_dict)
job.parallel_simple(run, machines)
diff --git a/server/site_tests/autoupdate_ForcedOOBEUpdate/control.interrupt.suspend.full.moblab b/server/site_tests/autoupdate_ForcedOOBEUpdate/control.interrupt.suspend.full.moblab
index 0aa9fc6..f44fe90 100644
--- a/server/site_tests/autoupdate_ForcedOOBEUpdate/control.interrupt.suspend.full.moblab
+++ b/server/site_tests/autoupdate_ForcedOOBEUpdate/control.interrupt.suspend.full.moblab
@@ -24,6 +24,6 @@
def run(machine):
host = hosts.create_host(machine)
job.run_test('autoupdate_ForcedOOBEUpdate', host=host, full_payload=True,
- interrupt='suspend', max_updates=1, moblab=True, **args_dict)
+ interrupt='suspend', moblab=True, **args_dict)
job.parallel_simple(run, machines)
diff --git a/server/site_tests/autoupdate_Interruptions/autoupdate_Interruptions.py b/server/site_tests/autoupdate_Interruptions/autoupdate_Interruptions.py
index 83f4b1f..3b559ed 100644
--- a/server/site_tests/autoupdate_Interruptions/autoupdate_Interruptions.py
+++ b/server/site_tests/autoupdate_Interruptions/autoupdate_Interruptions.py
@@ -19,8 +19,7 @@
super(autoupdate_Interruptions, self).cleanup()
- def run_once(self, full_payload=True, interrupt=None, job_repo_url=None,
- max_updates=2):
+ def run_once(self, full_payload=True, interrupt=None, job_repo_url=None):
"""
Runs an update with interruptions from the user.
@@ -30,15 +29,12 @@
out the current build and the devserver to use.
The test will read this from a host argument
when run in the lab.
- @param max_updates: The number of update attempts the devserver should
- accept.
"""
update_url = self.get_update_url_for_test(job_repo_url,
full_payload=full_payload,
- critical_update=True,
- max_updates=max_updates)
+ critical_update=True)
chromeos_version = self._get_chromeos_version()
# Choose a random downloaded progress to interrupt the update.
@@ -82,7 +78,10 @@
self._host.reboot()
utils.poll_for_condition(self._get_update_engine_status,
desc='update engine to start')
- self._check_for_update(server=server, port=parsed_url.port)
+ # We do check update with no_update=True so it doesn't start the update
+ # again.
+ self._check_for_update(server=server, port=parsed_url.port,
+ no_update=True)
# Verify that the update completed successfully by checking hostlog.
rootfs_hostlog, reboot_hostlog = self._create_hostlog_files()
diff --git a/server/site_tests/autoupdate_Interruptions/control.network.full b/server/site_tests/autoupdate_Interruptions/control.network.full
index 65bfdf0..d488310 100644
--- a/server/site_tests/autoupdate_Interruptions/control.network.full
+++ b/server/site_tests/autoupdate_Interruptions/control.network.full
@@ -23,6 +23,6 @@
def run(machine):
host = hosts.create_host(machine)
job.run_test('autoupdate_Interruptions', host=host, full_payload=True,
- interrupt='network', max_updates=1, **args_dict)
+ interrupt='network', **args_dict)
job.parallel_simple(run, machines)
diff --git a/server/site_tests/autoupdate_Interruptions/control.reboot.full b/server/site_tests/autoupdate_Interruptions/control.reboot.full
index 50cac27..4675f34 100644
--- a/server/site_tests/autoupdate_Interruptions/control.reboot.full
+++ b/server/site_tests/autoupdate_Interruptions/control.reboot.full
@@ -23,6 +23,6 @@
def run(machine):
host = hosts.create_host(machine)
job.run_test('autoupdate_Interruptions', host=host, full_payload=True,
- interrupt='reboot', max_updates=2, **args_dict)
+ interrupt='reboot', **args_dict)
job.parallel_simple(run, machines)
diff --git a/server/site_tests/autoupdate_Interruptions/control.suspend.full b/server/site_tests/autoupdate_Interruptions/control.suspend.full
index 45df4bb..680f9f8 100644
--- a/server/site_tests/autoupdate_Interruptions/control.suspend.full
+++ b/server/site_tests/autoupdate_Interruptions/control.suspend.full
@@ -23,6 +23,6 @@
def run(machine):
host = hosts.create_host(machine)
job.run_test('autoupdate_Interruptions', host=host, full_payload=True,
- interrupt='suspend', max_updates=1, **args_dict)
+ interrupt='suspend', **args_dict)
job.parallel_simple(run, machines)
diff --git a/server/site_tests/autoupdate_P2P/autoupdate_P2P.py b/server/site_tests/autoupdate_P2P/autoupdate_P2P.py
index 52c1a39..ab4a118 100644
--- a/server/site_tests/autoupdate_P2P/autoupdate_P2P.py
+++ b/server/site_tests/autoupdate_P2P/autoupdate_P2P.py
@@ -310,8 +310,7 @@
# P2P updates are very slow so we will only update with a delta payload.
update_url = self.get_update_url_for_test(job_repo_url,
full_payload=False,
- critical_update=False,
- max_updates=2)
+ critical_update=False)
# The first device just updates normally.
self._update_dut(self._hosts[0], update_url)
diff --git a/server/site_tests/bluetooth_AdapterSASanity/bluetooth_AdapterSASanity.py b/server/site_tests/bluetooth_AdapterSASanity/bluetooth_AdapterSASanity.py
index 235d036..83fd880 100644
--- a/server/site_tests/bluetooth_AdapterSASanity/bluetooth_AdapterSASanity.py
+++ b/server/site_tests/bluetooth_AdapterSASanity/bluetooth_AdapterSASanity.py
@@ -195,13 +195,21 @@
self.test_dbus_start_discovery_success()
self.test_dbus_start_discovery_fail_discovery_in_progress()
self.test_dbus_start_discovery_fail_power_off()
+
self.test_dbus_stop_discovery_success()
self.test_dbus_stop_discovery_fail_discovery_not_in_progress()
self.test_dbus_stop_discovery_fail_power_off()
+
self.test_dbus_pause_discovery_success()
self.test_dbus_pause_discovery_success_no_discovery_in_progress()
self.test_dbus_pause_discovery_fail_already_paused()
self.test_dbus_pause_discovery_fail_power_off()
+
+ self.test_dbus_unpause_discovery_success()
+ self.test_dbus_unpause_discovery_fail_without_pause()
+ self.test_dbus_unpause_discovery_fail_power_off()
+ self.test_dbus_unpause_discovery_fail_already_unpaused()
+
self.test_dbus_get_supported_capabilities_success()
self.test_dbus_get_supported_capabilities_success_power_off()
diff --git a/server/site_tests/bluetooth_AdapterSRSanity/bluetooth_AdapterSRSanity.py b/server/site_tests/bluetooth_AdapterSRSanity/bluetooth_AdapterSRSanity.py
new file mode 100644
index 0000000..3c88075
--- /dev/null
+++ b/server/site_tests/bluetooth_AdapterSRSanity/bluetooth_AdapterSRSanity.py
@@ -0,0 +1,415 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+""" Server-side bluetooth adapter tests that involve suspend/resume with peers
+
+paired and/or connected.
+
+Single chameleon tests:
+ - Reconnect on resume test
+ - Classic HID
+ - LE HID
+ - A2DP
+ - Wake from suspend test
+ - Classic HID
+ - LE HID
+ - A2DP shouldn't wake from suspend
+ - Suspend while discovering (discovering should pause and unpause)
+ - Suspend while advertising (advertising should pause and unpause)
+
+Multiple chameleon tests:
+ - Reconnect on resume test
+ - One classic HID, One LE HID
+ - Two classic HID
+ - Two LE HID
+ - Wake from suspend test
+ - Two classic HID
+ - Two classic LE
+"""
+
+from datetime import datetime, timedelta
+import logging
+import multiprocessing
+import threading
+import time
+
+from autotest_lib.server.cros.bluetooth import bluetooth_adapter_tests
+from autotest_lib.server.cros.bluetooth.bluetooth_adapter_quick_tests import \
+ BluetoothAdapterQuickTests
+
+test_wrapper = BluetoothAdapterQuickTests.quick_test_test_decorator
+batch_wrapper = BluetoothAdapterQuickTests.quick_test_batch_decorator
+test_retry_and_log = bluetooth_adapter_tests._test_retry_and_log
+
+SHORT_SUSPEND = 10
+MED_SUSPEND = 20
+LONG_SUSPEND = 30
+
+RESUME_DELTA = 5
+
+class bluetooth_AdapterSRSanity(
+ BluetoothAdapterQuickTests,
+ bluetooth_adapter_tests.BluetoothAdapterTests):
+ """Server side bluetooth adapter suspend resume test with peer."""
+
+ def _suspend_async(self,
+ suspend_time=SHORT_SUSPEND,
+ allow_early_resume=False):
+ """ Suspend asynchronously and return process for joining
+
+ @param suspend_time: how long to stay in suspend
+ @param allow_early_resume: are we expecting to wake up earlier
+ @returns multiprocessing.Process object with suspend task
+ """
+
+ def _action_suspend():
+ self.host.suspend(
+ suspend_time=suspend_time,
+ allow_early_resume=allow_early_resume)
+
+ proc = multiprocessing.Process(target=_action_suspend)
+ proc.daemon = True
+ return proc
+
+ def _device_connect_async(self, device_type, device, adapter_address):
+ """ Connects peer device asynchronously with DUT.
+
+ This function uses a thread instead of a subprocess so that the test
+ result is stored for the test. Otherwise, the test connection was
+ sometimes failing but the test itself was passing.
+
+ @param device_type: The device type (used to check if it's LE)
+ @param device: the meta device with the peer device
+ @param adapter_address: the address of the adapter
+
+ @returns threading.Thread object with device connect task
+ """
+
+ def _action_device_connect():
+ time.sleep(1)
+ if 'BLE' in device_type:
+ # LE reconnects by advertising (dut controller will create LE
+ # connection, not the peer device)
+ self.test_device_set_discoverable(device, True)
+ else:
+ # Classic requires peer to initiate a connection to wake up the
+ # dut
+ self.test_connection_by_device_only(device, adapter_address)
+
+ thread = threading.Thread(target=_action_device_connect)
+ return thread
+
+ @test_retry_and_log(False)
+ def suspend_and_wait_for_sleep(self, suspend):
+ """ Suspend the device and wait until it is sleeping.
+
+ @param suspend: Sub-process that does the actual suspend call.
+
+ @return True if host is asleep within a short timeout, False otherwise.
+ """
+ suspend.start()
+ try:
+ self.host.test_wait_for_sleep(sleep_timeout=SHORT_SUSPEND)
+ except:
+ suspend.join()
+ return False
+
+ return True
+
+ @test_retry_and_log(False)
+ def wait_for_resume(self, boot_id, suspend, resume_timeout=SHORT_SUSPEND):
+ """ Wait for device to resume from suspend.
+
+ @param boot_id: Current boot id
+ @param suspend: Sub-process that does actual suspend call.
+ @param resume_timeout: Expect device to resume in given timeout.
+
+ @raises if timeout triggers during resume
+
+ @return True if suspend sub-process completed without error.
+ """
+ success = True
+
+ # Sometimes it takes longer to resume from suspend; give some leeway
+ resume_timeout = resume_timeout + RESUME_DELTA
+ try:
+ start = datetime.now()
+ self.host.test_wait_for_resume(
+ boot_id, resume_timeout=resume_timeout)
+
+ delta = datetime.now() - start
+
+ # TODO(abhishekpandit) - Figure out why test_wait_for_resume isn't
+ # timing out
+ if delta > timedelta(seconds=resume_timeout):
+ success = False
+ except Exception as e:
+ success = False
+ logging.error("wait_for_resume: %s", e)
+ finally:
+ suspend.join()
+ self.results = {
+ "resume_success": success,
+ "suspend_result": suspend.exitcode == 0
+ }
+
+ return all(self.results.values())
+
+ def test_discover_and_pair(self, device):
+ """ Discovers and pairs given device. Automatically connects too."""
+ self.test_device_set_discoverable(device, True)
+ self.test_discover_device(device.address)
+ self.bluetooth_facade.stop_discovery()
+ self.test_pairing(device.address, device.pin, trusted=True)
+
+ def _test_keyboard_with_string(self, device):
+ self.test_keyboard_input_from_trace(device, "simple_text")
+
+ # ---------------------------------------------------------------
+ # Reconnect after suspend tests
+ # ---------------------------------------------------------------
+
+ def run_reconnect_device(self, devtuples):
+ """ Reconnects a device after suspend/resume.
+
+ @param devtuples: array of tuples consisting of the following
+ * device_type: MOUSE, BLE_MOUSE, etc.
+ * device: meta object for peer device
+ * device_test: Optional; test function to run w/
+ device (for example, mouse click)
+ """
+ boot_id = self.host.get_boot_id()
+ suspend = self._suspend_async()
+
+ try:
+ for _, device, device_test in devtuples:
+ self.test_discover_and_pair(device)
+ self.test_device_set_discoverable(device, False)
+ self.test_connection_by_adapter(device.address)
+
+ # Trigger suspend, wait for regular resume, verify we can reconnect
+ # and run device specific test
+ self.suspend_and_wait_for_sleep(suspend)
+ self.wait_for_resume(boot_id, suspend, resume_timeout=SHORT_SUSPEND)
+
+ for device_type, device, device_test in devtuples:
+ if 'BLE' in device_type:
+ # LE can't reconnect without advertising/discoverable
+ self.test_device_set_discoverable(device, True)
+
+ # Test that host sees connection complete
+ self.test_connection_by_device(device)
+ if device_test is not None:
+ device_test(device)
+
+ finally:
+ for _, device, __ in devtuples:
+ self.test_remove_pairing(device.address)
+
+ @test_wrapper('Reconnect Classic HID', devices={'MOUSE': 1})
+ def sr_reconnect_classic_hid(self):
+ """ Reconnects a classic HID device after suspend/resume. """
+ device_type = 'MOUSE'
+ device = self.devices[device_type][0]
+ self.run_reconnect_device([(device_type, device,
+ self.test_mouse_left_click)])
+
+ @test_wrapper('Reconnect LE HID', devices={'BLE_MOUSE': 1})
+ def sr_reconnect_le_hid(self):
+ """ Reconnects a LE HID device after suspend/resume. """
+ device_type = 'BLE_MOUSE'
+ device = self.devices[device_type][0]
+ self.run_reconnect_device([(device_type, device,
+ self.test_mouse_left_click)])
+
+ @test_wrapper('Reconnect A2DP', devices={})
+ def sr_reconnect_a2dp(self):
+ """ Reconnects an A2DP device after suspend/resume. """
+ raise NotImplementedError()
+
+ @test_wrapper('Reconnect Multiple Classic HID',
+ devices={'MOUSE': 1, 'KEYBOARD': 1})
+ def sr_reconnect_multiple_classic_hid(self):
+ """ Reconnects multiple classic HID devices after suspend/resume. """
+ devices = [
+ ('MOUSE', self.devices['MOUSE'][0], self.test_mouse_left_click),
+ ('KEYBOARD', self.devices['KEYBOARD'][0],
+ self._test_keyboard_with_string)
+ ]
+ self.run_reconnect_device(devices)
+
+ @test_wrapper('Reconnect Multiple LE HID',
+ devices={'BLE_MOUSE': 1, 'BLE_KEYBOARD': 1})
+ def sr_reconnect_multiple_le_hid(self):
+ """ Reconnects multiple LE HID devices after suspend/resume. """
+ devices = [
+ ('BLE_MOUSE', self.devices['BLE_MOUSE'][0],
+ self.test_mouse_left_click),
+ ('BLE_KEYBOARD', self.devices['BLE_KEYBOARD'][0],
+ self._test_keyboard_with_string)
+ ]
+ self.run_reconnect_device(devices)
+
+ @test_wrapper(
+ 'Reconnect one of each classic+LE HID',
+ devices={
+ 'BLE_MOUSE': 1,
+ 'KEYBOARD': 1
+ })
+ def sr_reconnect_multiple_classic_le_hid(self):
+ """ Reconnects one of each classic and LE HID devices after
+ suspend/resume.
+ """
+ devices = [
+ ('BLE_MOUSE', self.devices['BLE_MOUSE'][0],
+ self.test_mouse_left_click),
+ ('KEYBOARD', self.devices['KEYBOARD'][0],
+ self._test_keyboard_with_string)
+ ]
+ self.run_reconnect_device(devices)
+
+ # ---------------------------------------------------------------
+ # Wake from suspend tests
+ # ---------------------------------------------------------------
+
+ def run_peer_wakeup_device(self, device_type, device, device_test=None):
+ """ Uses paired peer device to wake the device from suspend.
+
+ @param device_type: the device type (used to determine if it's LE)
+ @param device: the meta device with the paired device
+ @param device_test: What to test to run after waking and connecting the
+ adapter/host
+ """
+ boot_id = self.host.get_boot_id()
+ suspend = self._suspend_async(
+ suspend_time=LONG_SUSPEND, allow_early_resume=True)
+
+ try:
+ self.test_discover_and_pair(device)
+ self.test_device_set_discoverable(device, False)
+
+ adapter_address = self.bluetooth_facade.address
+
+ # TODO(abhishekpandit) - Wait for powerd event that marks the
+ # power/wakeup of hci0's parent to "enabled"; otherwise, we won't be
+ # able to wake
+ time.sleep(3)
+
+ # Trigger suspend, asynchronously trigger wake and wait for resume
+ self.suspend_and_wait_for_sleep(suspend)
+
+ # Trigger peer wakeup
+ peer_wake = self._device_connect_async(device_type, device,
+ adapter_address)
+ peer_wake.start()
+
+ # Expect a quick resume
+ self.wait_for_resume(boot_id, suspend, resume_timeout=SHORT_SUSPEND)
+
+ # Finish peer wake process
+ peer_wake.join()
+
+ if device_test is not None:
+ device_test(device)
+
+ finally:
+ self.test_remove_pairing(device.address)
+
+ @test_wrapper('Peer wakeup Classic HID', devices={'MOUSE': 1})
+ def sr_peer_wake_classic_hid(self):
+ """ Use classic HID device to wake from suspend. """
+ device = self.devices['MOUSE'][0]
+ self.run_peer_wakeup_device(
+ 'MOUSE', device, device_test=self.test_mouse_left_click)
+
+ @test_wrapper('Peer wakeup LE HID', devices={'BLE_MOUSE': 1})
+ def sr_peer_wake_le_hid(self):
+ """ Use LE HID device to wake from suspend. """
+ device = self.devices['BLE_MOUSE'][0]
+ self.run_peer_wakeup_device(
+ 'BLE_MOUSE', device, device_test=self.test_mouse_left_click)
+
+ @test_wrapper('Peer wakeup with A2DP should fail')
+ def sr_peer_wake_a2dp_should_fail(self):
+ """ Use A2DP device to wake from suspend and fail. """
+ raise NotImplementedError()
+
+ # ---------------------------------------------------------------
+ # Suspend while discovering and advertising
+ # ---------------------------------------------------------------
+
+ @test_wrapper('Suspend while discovering', devices={'BLE_MOUSE': 1})
+ def sr_while_discovering(self):
+ """ Suspend while discovering. """
+ device = self.devices['BLE_MOUSE'][0]
+ boot_id = self.host.get_boot_id()
+ suspend = self._suspend_async(
+ suspend_time=SHORT_SUSPEND, allow_early_resume=False)
+
+ # We don't pair to the peer device because we don't want it in the
+ # whitelist. However, we want an advertising peer in this test
+ # responding to the discovery requests.
+ self.test_device_set_discoverable(device, True)
+
+ self.test_start_discovery()
+ self.suspend_and_wait_for_sleep(suspend)
+
+ # If discovery events wake us early, we will raise and suspend.exitcode
+ # will be non-zero
+ self.wait_for_resume(boot_id, suspend, resume_timeout=SHORT_SUSPEND)
+
+ # Discovering should restore after suspend
+ self.test_is_discovering()
+
+ self.test_stop_discovery()
+
+ @test_wrapper('Suspend while advertising', devices={'MOUSE': 1})
+ def sr_while_advertising(self):
+ """ Suspend while advertising. """
+ device = self.devices['MOUSE'][0]
+ boot_id = self.host.get_boot_id()
+ suspend = self._suspend_async(
+ suspend_time=MED_SUSPEND, allow_early_resume=False)
+
+ self.test_discoverable()
+ self.suspend_and_wait_for_sleep(suspend)
+
+ # Peer device should not be able to discover us in suspend
+ self.test_discover_by_device_fails(device)
+
+ self.wait_for_resume(boot_id, suspend, resume_timeout=MED_SUSPEND)
+
+ # Test that we are properly discoverable again
+ self.test_is_discoverable()
+ self.test_discover_by_device(device)
+
+ self.test_nondiscoverable()
+
+ @batch_wrapper('SR with Peer Sanity')
+ def sr_sanity_batch_run(self, num_iterations=1, test_name=None):
+ """ Batch of suspend/resume peer sanity tests. """
+ self.sr_reconnect_classic_hid()
+ self.sr_reconnect_le_hid()
+ self.sr_peer_wake_classic_hid()
+ self.sr_peer_wake_le_hid()
+ self.sr_while_discovering()
+ self.sr_while_advertising()
+
+ def run_once(self,
+ host,
+ num_iterations=1,
+ test_name=None,
+ flag='Quick Sanity'):
+ """Running Bluetooth adapter suspend resume with peer autotest.
+
+ @param host: the DUT, usually a chromebook
+ @param num_iterations: the number of times to execute the test
+ @param test_name: the test to run or None for all tests
+ @param flag: run tests with this flag (default: Quick Sanity)
+
+ """
+
+ # Initialize and run the test batch or the requested specific test
+ self.quick_test_init(host, use_chameleon=True, flag=flag)
+ self.sr_sanity_batch_run(num_iterations, test_name)
+ self.quick_test_cleanup()
diff --git a/server/site_tests/bluetooth_AdapterSRSanity/control b/server/site_tests/bluetooth_AdapterSRSanity/control
new file mode 100644
index 0000000..2bfd547
--- /dev/null
+++ b/server/site_tests/bluetooth_AdapterSRSanity/control
@@ -0,0 +1,37 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.server import utils
+from autotest_lib.server.cros.bluetooth import bluetooth_test
+
+AUTHOR = 'chromeos-bluetooth'
+NAME = 'bluetooth_AdapterSRSanity'
+PURPOSE = ('Batch of Bluetooth SR sanity tests')
+CRITERIA = 'Pass all sanity test'
+ATTRIBUTES = 'suite:bluetooth_flaky'
+TIME = 'SHORT'
+TEST_CATEGORY = 'Functional'
+TEST_CLASS = 'bluetooth'
+TEST_TYPE = 'server'
+DEPENDENCIES = 'bluetooth, chameleon:bt_hid, chameleon:bt_ble_hid'
+
+DOC = """
+ A Batch of Bluetooth suspend+resume with peer sanity tests. This test is
+ written as a batch of tests in order to reduce test time, since auto-test
+ ramp up time is costy. The batch is using BluetoothAdapterQuickTests wrapper
+ methods to start and end a test and a batch of tests.
+
+ This class can be called to run the entire test batch or to run a
+ specific test only
+ """
+
+args_dict = utils.args_to_dict(args)
+chameleon_args = hosts.CrosHost.get_chameleon_arguments(args_dict)
+
+def run(machine):
+ host = hosts.create_host(machine, chameleon_args=chameleon_args)
+ job.run_test('bluetooth_AdapterSRSanity', host=host,
+ num_iterations=1)
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/bluetooth_AdapterSRSanity/control.sr_peer_wake_classic_hid b/server/site_tests/bluetooth_AdapterSRSanity/control.sr_peer_wake_classic_hid
new file mode 100644
index 0000000..5c7f41f
--- /dev/null
+++ b/server/site_tests/bluetooth_AdapterSRSanity/control.sr_peer_wake_classic_hid
@@ -0,0 +1,29 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.server import utils
+from autotest_lib.server.cros.bluetooth import bluetooth_test
+
+AUTHOR = 'chromeos-bluetooth'
+NAME = 'bluetooth_AdapterSRSanity.sr_peer_wake_classic_hid'
+PURPOSE = ('Single run of a test')
+CRITERIA = 'Pass test'
+ATTRIBUTES = 'suite:bluetooth_flaky'
+TIME = 'SHORT'
+TEST_CATEGORY = 'Functional'
+TEST_CLASS = 'bluetooth'
+TEST_TYPE = 'server'
+DEPENDENCIES = 'bluetooth, chameleon:bt_hid, chameleon:bt_ble_hid'
+
+DOC = """ Single run of a Suspend-Resume sanity testcase. """
+
+args_dict = utils.args_to_dict(args)
+chameleon_args = hosts.CrosHost.get_chameleon_arguments(args_dict)
+
+def run(machine):
+ host = hosts.create_host(machine, chameleon_args=chameleon_args)
+ job.run_test('bluetooth_AdapterSRSanity', host=host, num_iterations=1,
+ test_name=NAME.split('.')[1])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/bluetooth_AdapterSRSanity/control.sr_peer_wake_le_hid b/server/site_tests/bluetooth_AdapterSRSanity/control.sr_peer_wake_le_hid
new file mode 100644
index 0000000..b520a27
--- /dev/null
+++ b/server/site_tests/bluetooth_AdapterSRSanity/control.sr_peer_wake_le_hid
@@ -0,0 +1,29 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.server import utils
+from autotest_lib.server.cros.bluetooth import bluetooth_test
+
+AUTHOR = 'chromeos-bluetooth'
+NAME = 'bluetooth_AdapterSRSanity.sr_peer_wake_le_hid'
+PURPOSE = ('Single run of a test')
+CRITERIA = 'Pass test'
+ATTRIBUTES = 'suite:bluetooth_flaky'
+TIME = 'SHORT'
+TEST_CATEGORY = 'Functional'
+TEST_CLASS = 'bluetooth'
+TEST_TYPE = 'server'
+DEPENDENCIES = 'bluetooth, chameleon:bt_hid, chameleon:bt_ble_hid'
+
+DOC = """ Single run of a Suspend-Resume sanity testcase. """
+
+args_dict = utils.args_to_dict(args)
+chameleon_args = hosts.CrosHost.get_chameleon_arguments(args_dict)
+
+def run(machine):
+ host = hosts.create_host(machine, chameleon_args=chameleon_args)
+ job.run_test('bluetooth_AdapterSRSanity', host=host, num_iterations=1,
+ test_name=NAME.split('.')[1])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/bluetooth_AdapterSRSanity/control.sr_reconnect_classic_hid b/server/site_tests/bluetooth_AdapterSRSanity/control.sr_reconnect_classic_hid
new file mode 100644
index 0000000..9047a27
--- /dev/null
+++ b/server/site_tests/bluetooth_AdapterSRSanity/control.sr_reconnect_classic_hid
@@ -0,0 +1,29 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.server import utils
+from autotest_lib.server.cros.bluetooth import bluetooth_test
+
+AUTHOR = 'chromeos-bluetooth'
+NAME = 'bluetooth_AdapterSRSanity.sr_reconnect_classic_hid'
+PURPOSE = ('Single run of a test')
+CRITERIA = 'Pass test'
+ATTRIBUTES = 'suite:bluetooth_flaky'
+TIME = 'SHORT'
+TEST_CATEGORY = 'Functional'
+TEST_CLASS = 'bluetooth'
+TEST_TYPE = 'server'
+DEPENDENCIES = 'bluetooth, chameleon:bt_hid, chameleon:bt_ble_hid'
+
+DOC = """ Single run of a Suspend-Resume sanity testcase. """
+
+args_dict = utils.args_to_dict(args)
+chameleon_args = hosts.CrosHost.get_chameleon_arguments(args_dict)
+
+def run(machine):
+ host = hosts.create_host(machine, chameleon_args=chameleon_args)
+ job.run_test('bluetooth_AdapterSRSanity', host=host, num_iterations=1,
+ test_name=NAME.split('.')[1])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/bluetooth_AdapterSRSanity/control.sr_reconnect_le_hid b/server/site_tests/bluetooth_AdapterSRSanity/control.sr_reconnect_le_hid
new file mode 100644
index 0000000..8203a28
--- /dev/null
+++ b/server/site_tests/bluetooth_AdapterSRSanity/control.sr_reconnect_le_hid
@@ -0,0 +1,29 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.server import utils
+from autotest_lib.server.cros.bluetooth import bluetooth_test
+
+AUTHOR = 'chromeos-bluetooth'
+NAME = 'bluetooth_AdapterSRSanity.sr_reconnect_le_hid'
+PURPOSE = ('Single run of a test')
+CRITERIA = 'Pass test'
+ATTRIBUTES = 'suite:bluetooth_flaky'
+TIME = 'SHORT'
+TEST_CATEGORY = 'Functional'
+TEST_CLASS = 'bluetooth'
+TEST_TYPE = 'server'
+DEPENDENCIES = 'bluetooth, chameleon:bt_hid, chameleon:bt_ble_hid'
+
+DOC = """ Single run of a Suspend-Resume sanity testcase. """
+
+args_dict = utils.args_to_dict(args)
+chameleon_args = hosts.CrosHost.get_chameleon_arguments(args_dict)
+
+def run(machine):
+ host = hosts.create_host(machine, chameleon_args=chameleon_args)
+ job.run_test('bluetooth_AdapterSRSanity', host=host, num_iterations=1,
+ test_name=NAME.split('.')[1])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/bluetooth_AdapterSRSanity/control.sr_reconnect_multiple_classic_hid b/server/site_tests/bluetooth_AdapterSRSanity/control.sr_reconnect_multiple_classic_hid
new file mode 100644
index 0000000..338043f
--- /dev/null
+++ b/server/site_tests/bluetooth_AdapterSRSanity/control.sr_reconnect_multiple_classic_hid
@@ -0,0 +1,29 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.server import utils
+from autotest_lib.server.cros.bluetooth import bluetooth_test
+
+AUTHOR = 'chromeos-bluetooth'
+NAME = 'bluetooth_AdapterSRSanity.sr_reconnect_multiple_classic_hid'
+PURPOSE = ('Single run of a test')
+CRITERIA = 'Pass test'
+ATTRIBUTES = 'suite:bluetooth_flaky'
+TIME = 'SHORT'
+TEST_CATEGORY = 'Functional'
+TEST_CLASS = 'bluetooth'
+TEST_TYPE = 'server'
+DEPENDENCIES = 'bluetooth, chameleon:bt_hid, chameleon:bt_ble_hid'
+
+DOC = """ Single run of a Suspend-Resume sanity testcase. """
+
+args_dict = utils.args_to_dict(args)
+chameleon_args = hosts.CrosHost.get_chameleon_arguments(args_dict)
+
+def run(machine):
+ host = hosts.create_host(machine, chameleon_args=chameleon_args)
+ job.run_test('bluetooth_AdapterSRSanity', host=host, num_iterations=1,
+ test_name=NAME.split('.')[1])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/bluetooth_AdapterSRSanity/control.sr_reconnect_multiple_classic_le_hid b/server/site_tests/bluetooth_AdapterSRSanity/control.sr_reconnect_multiple_classic_le_hid
new file mode 100644
index 0000000..f5e456d
--- /dev/null
+++ b/server/site_tests/bluetooth_AdapterSRSanity/control.sr_reconnect_multiple_classic_le_hid
@@ -0,0 +1,29 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.server import utils
+from autotest_lib.server.cros.bluetooth import bluetooth_test
+
+AUTHOR = 'chromeos-bluetooth'
+NAME = 'bluetooth_AdapterSRSanity.sr_reconnect_multiple_classic_le_hid'
+PURPOSE = ('Single run of a test')
+CRITERIA = 'Pass test'
+ATTRIBUTES = 'suite:bluetooth_flaky'
+TIME = 'SHORT'
+TEST_CATEGORY = 'Functional'
+TEST_CLASS = 'bluetooth'
+TEST_TYPE = 'server'
+DEPENDENCIES = 'bluetooth, chameleon:bt_hid, chameleon:bt_ble_hid'
+
+DOC = """ Single run of a Suspend-Resume sanity testcase. """
+
+args_dict = utils.args_to_dict(args)
+chameleon_args = hosts.CrosHost.get_chameleon_arguments(args_dict)
+
+def run(machine):
+ host = hosts.create_host(machine, chameleon_args=chameleon_args)
+ job.run_test('bluetooth_AdapterSRSanity', host=host, num_iterations=1,
+ test_name=NAME.split('.')[1])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/bluetooth_AdapterSRSanity/control.sr_reconnect_multiple_le_hid b/server/site_tests/bluetooth_AdapterSRSanity/control.sr_reconnect_multiple_le_hid
new file mode 100644
index 0000000..8ebcbc3
--- /dev/null
+++ b/server/site_tests/bluetooth_AdapterSRSanity/control.sr_reconnect_multiple_le_hid
@@ -0,0 +1,29 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.server import utils
+from autotest_lib.server.cros.bluetooth import bluetooth_test
+
+AUTHOR = 'chromeos-bluetooth'
+NAME = 'bluetooth_AdapterSRSanity.sr_reconnect_multiple_le_hid'
+PURPOSE = ('Single run of a test')
+CRITERIA = 'Pass test'
+ATTRIBUTES = 'suite:bluetooth_flaky'
+TIME = 'SHORT'
+TEST_CATEGORY = 'Functional'
+TEST_CLASS = 'bluetooth'
+TEST_TYPE = 'server'
+DEPENDENCIES = 'bluetooth, chameleon:bt_hid, chameleon:bt_ble_hid'
+
+DOC = """ Single run of a Suspend-Resume sanity testcase. """
+
+args_dict = utils.args_to_dict(args)
+chameleon_args = hosts.CrosHost.get_chameleon_arguments(args_dict)
+
+def run(machine):
+ host = hosts.create_host(machine, chameleon_args=chameleon_args)
+ job.run_test('bluetooth_AdapterSRSanity', host=host, num_iterations=1,
+ test_name=NAME.split('.')[1])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/bluetooth_AdapterSRSanity/control.sr_while_advertising b/server/site_tests/bluetooth_AdapterSRSanity/control.sr_while_advertising
new file mode 100644
index 0000000..2d08de3
--- /dev/null
+++ b/server/site_tests/bluetooth_AdapterSRSanity/control.sr_while_advertising
@@ -0,0 +1,29 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.server import utils
+from autotest_lib.server.cros.bluetooth import bluetooth_test
+
+AUTHOR = 'chromeos-bluetooth'
+NAME = 'bluetooth_AdapterSRSanity.sr_while_advertising'
+PURPOSE = ('Single run of a test')
+CRITERIA = 'Pass test'
+ATTRIBUTES = 'suite:bluetooth_flaky'
+TIME = 'SHORT'
+TEST_CATEGORY = 'Functional'
+TEST_CLASS = 'bluetooth'
+TEST_TYPE = 'server'
+DEPENDENCIES = 'bluetooth, chameleon:bt_hid, chameleon:bt_ble_hid'
+
+DOC = """ Single run of a Suspend-Resume sanity testcase. """
+
+args_dict = utils.args_to_dict(args)
+chameleon_args = hosts.CrosHost.get_chameleon_arguments(args_dict)
+
+def run(machine):
+ host = hosts.create_host(machine, chameleon_args=chameleon_args)
+ job.run_test('bluetooth_AdapterSRSanity', host=host, num_iterations=1,
+ test_name=NAME.split('.')[1])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/bluetooth_AdapterSRSanity/control.sr_while_discovering b/server/site_tests/bluetooth_AdapterSRSanity/control.sr_while_discovering
new file mode 100644
index 0000000..eb3c420
--- /dev/null
+++ b/server/site_tests/bluetooth_AdapterSRSanity/control.sr_while_discovering
@@ -0,0 +1,29 @@
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from autotest_lib.server import utils
+from autotest_lib.server.cros.bluetooth import bluetooth_test
+
+AUTHOR = 'chromeos-bluetooth'
+NAME = 'bluetooth_AdapterSRSanity.sr_while_discovering'
+PURPOSE = ('Single run of a test')
+CRITERIA = 'Pass test'
+ATTRIBUTES = 'suite:bluetooth_flaky'
+TIME = 'SHORT'
+TEST_CATEGORY = 'Functional'
+TEST_CLASS = 'bluetooth'
+TEST_TYPE = 'server'
+DEPENDENCIES = 'bluetooth, chameleon:bt_hid, chameleon:bt_ble_hid'
+
+DOC = """ Single run of a Suspend-Resume sanity testcase. """
+
+args_dict = utils.args_to_dict(args)
+chameleon_args = hosts.CrosHost.get_chameleon_arguments(args_dict)
+
+def run(machine):
+ host = hosts.create_host(machine, chameleon_args=chameleon_args)
+ job.run_test('bluetooth_AdapterSRSanity', host=host, num_iterations=1,
+ test_name=NAME.split('.')[1])
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/cheets_CTS_Instant/cheets_CTS_Instant.py b/server/site_tests/cheets_CTS_Instant/cheets_CTS_Instant.py
index 70406b2..c8b6298 100644
--- a/server/site_tests/cheets_CTS_Instant/cheets_CTS_Instant.py
+++ b/server/site_tests/cheets_CTS_Instant/cheets_CTS_Instant.py
@@ -43,11 +43,17 @@
cmd = []
for arg in template:
cmd.append(arg.format(session_id=session_id))
+ # See b/149681932. Pass empty url to force using local config, instead
+ # of doing a network access (which anyway returns an empty config.)
+ cmd.append('--dynamic-config-url=')
return cmd
def _tradefed_run_command(self, template):
"""Build tradefed 'run' command from template."""
cmd = template[:]
+ # See b/149681932. Pass empty url to force using local config, instead
+ # of doing a network access (which anyway returns an empty config.)
+ cmd.append('--dynamic-config-url=')
# If we are running outside of the lab we can collect more data.
if not utils.is_in_container():
logging.info('Running outside of lab, adding extra debug options.')
diff --git a/server/site_tests/cheets_CTS_P/cheets_CTS_P.py b/server/site_tests/cheets_CTS_P/cheets_CTS_P.py
index f830850..7e2c357 100644
--- a/server/site_tests/cheets_CTS_P/cheets_CTS_P.py
+++ b/server/site_tests/cheets_CTS_P/cheets_CTS_P.py
@@ -46,6 +46,7 @@
def _tradefed_retry_command(self, template, session_id):
"""Build tradefed 'retry' command from template."""
cmd = []
+ cmd += self.extra_command_flags
for arg in template:
cmd.append(arg.format(session_id=session_id))
return cmd
@@ -53,6 +54,7 @@
def _tradefed_run_command(self, template):
"""Build tradefed 'run' command from template."""
cmd = template[:]
+ cmd += self.extra_command_flags
# If we are running outside of the lab we can collect more data.
if not utils.is_in_container():
logging.info('Running outside of lab, adding extra debug options.')
@@ -134,6 +136,7 @@
retry_manual_tests=retry_manual_tests,
warn_on_test_retry=warn_on_test_retry,
hard_reboot_on_failure=hard_reboot_on_failure)
+ self.extra_command_flags = []
if camera_facing:
self.initialize_camerabox(camera_facing, cmdline_args)
@@ -181,6 +184,10 @@
@param prerequisites: a list of prerequisites that identify rogue DUTs.
@param timeout: time after which tradefed can be interrupted.
"""
+ # See b/149889853. Non-media test basically does not require dynamic
+ # config. To reduce the flakiness, let us suppress the config.
+ if not needs_push_media:
+ self.extra_command_flags.append('--dynamic-config-url=')
self._run_tradefed_with_retries(
test_name=test_name,
run_template=run_template,
diff --git a/server/site_tests/cheets_GTS/control.7.0_r3.GtsPermissionTestCases b/server/site_tests/cheets_GTS/control.7.0_r3.GtsPermissionTestCases
deleted file mode 100644
index 170a7a9..0000000
--- a/server/site_tests/cheets_GTS/control.7.0_r3.GtsPermissionTestCases
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.7.0_r3.GtsPermissionTestCases'
-ATTRIBUTES = 'suite:arc-gts'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'MEDIUM'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsPermissionTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
- host_list = [hosts.create_host(machine)]
- job.run_test(
- 'cheets_GTS',
- hosts=host_list,
- iterations=1,
- tag='7.0_r3.GtsPermissionTestCases',
- test_name='cheets_GTS.7.0_r3.GtsPermissionTestCases',
- authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
- run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsPermissionTestCases', '--ignore-business-logic-failure'],
- retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
- target_module='GtsPermissionTestCases',
- target_plan=None,
- uri='gs://chromeos-arc-images/cts/bundle/android-gts-7_r3-6045416.zip',
- timeout=720)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.7.0_r3.all.GtsCameraTestCases_-_GtsYouTubeTestCases b/server/site_tests/cheets_GTS/control.7.0_r3.all.GtsCameraTestCases_-_GtsYouTubeTestCases
deleted file mode 100644
index 489e03f..0000000
--- a/server/site_tests/cheets_GTS/control.7.0_r3.all.GtsCameraTestCases_-_GtsYouTubeTestCases
+++ /dev/null
@@ -1,36 +0,0 @@
-# Copyright 2016 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file has been automatically generated. Do not edit!
-
-AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.7.0_r3.all.GtsCameraTestCases_-_GtsYouTubeTestCases'
-ATTRIBUTES = 'suite:arc-gts-qual'
-DEPENDENCIES = 'arc'
-JOB_RETRIES = 1
-TEST_TYPE = 'server'
-TIME = 'LONG'
-MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsCameraTestCases, GtsCastHostTestCases, GtsContactsAppDeviceTestCases, GtsContactsTest, GtsContentTestCases, GtsDeviceConfigTestCases, GtsDexModuleRegistrationTestCases, GtsDialerDeviceTestCases, GtsDozeDeviceTestCases, GtsEdiHostTestCases, GtsExoPlayerTestCases, GtsFeaturesTestCases, GtsGameDeviceHostTestCases, GtsGmscoreHostTestCases, GtsGraphicsHostTestCases, GtsHomeHostTestCases, GtsIncidentConfirmationTestCases, GtsIncidentManagerTestCases, GtsInstallPackagesWhitelistDeviceTestCases, GtsInstantAppsHostTestCases, GtsLargeApkHostTestCases, GtsLauncherHostTestCases, GtsLocationHostTestCases, GtsLocationTestCases, GtsMediaTestCases, GtsMemoryHostTestCases, GtsMemoryTestCases, GtsModuleMetadataTestCases, GtsNetStatsHostTestCases, GtsNetTestCases, GtsNetworkStackHostTestCases, GtsNetworkWatchlistTestCases, GtsNmgiarcTestCases, GtsNoPermissionTestCases, GtsNoPermissionTestCases25, GtsNotificationTestCases, GtsOemLockServiceTestCases, GtsOsTestCases, GtsPackageInstallTestCases, GtsPackageInstallerTapjackingTestCases, GtsPackageManagerHostTestCases, GtsPackageNameCertPairsDeviceTestCases, GtsPackageUninstallTestCases, GtsPartnerBookmarksTestCases, GtsPermissionTestCases, GtsPlacementTestCases, GtsPlayAutoInstallTestCases, GtsPlayStoreHostTestCases, GtsPrintTestCases, GtsPrivacyTestCases, GtsPrivilegedUpdatePreparer, GtsPropertiesTestCases, GtsRegulationComplianceTestCases, GtsRlzTestCases, GtsSampleDeviceTestCases, GtsSampleDynamicConfigTestCases, GtsSampleHostTestCases, GtsScreenshotHostTestCases, GtsSearchHostTestCases, GtsSecurityHostTestCases, GtsSecurityTestCases, GtsSensorHostTestCases, GtsSettingsHostTestCases, GtsSettingsTestCases, GtsSetupWizardHostTestCases, GtsSetupWizardNoPermissionTestCases, GtsSimAppDialogTestCases, GtsSmartBatteryDeviceTestCases, GtsSsaidHostTestCases, GtsStagedInstallHostTestCases, GtsStatsdHostTestCases, GtsStorageTestCases, GtsSupervisionTestCases, GtsSuspendAppsPermissionTestCases, GtsSuspendAppsTestCases, GtsTelecomManagerTests, GtsTelephonyTestCases, GtsTestHarnessModeTestCases, GtsTetheringTestCases, GtsTvBugReportTestCases, GtsTvHostTestCases, GtsTvTestCases, GtsUnofficialApisUsageTestCases, GtsUsageStatsTestCases, GtsViewTestCases, GtsWebViewHostTestCases, GtsWebViewTestCases, GtsWellbeingHostTestCases, GtsWellbeingPermissionPolicyTestCases, GtsWellbeingTestCases, GtsYouTubeTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
-
-def run_TS(machine):
- host_list = [hosts.create_host(machine)]
- job.run_test(
- 'cheets_GTS',
- hosts=host_list,
- iterations=1,
- max_retry=9,
- needs_push_media=True,
- tag='7.0_r3.all.GtsCameraTestCases_-_GtsYouTubeTestCases',
- test_name='cheets_GTS.7.0_r3.all.GtsCameraTestCases_-_GtsYouTubeTestCases',
- authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
- run_template=['run', 'commandAndExit', 'gts', '--include-filter', 'GtsCameraTestCases', '--include-filter', 'GtsCastHostTestCases', '--include-filter', 'GtsContactsAppDeviceTestCases', '--include-filter', 'GtsContactsTest', '--include-filter', 'GtsContentTestCases', '--include-filter', 'GtsDeviceConfigTestCases', '--include-filter', 'GtsDexModuleRegistrationTestCases', '--include-filter', 'GtsDialerDeviceTestCases', '--include-filter', 'GtsDozeDeviceTestCases', '--include-filter', 'GtsEdiHostTestCases', '--include-filter', 'GtsExoPlayerTestCases', '--include-filter', 'GtsFeaturesTestCases', '--include-filter', 'GtsGameDeviceHostTestCases', '--include-filter', 'GtsGmscoreHostTestCases', '--include-filter', 'GtsGraphicsHostTestCases', '--include-filter', 'GtsHomeHostTestCases', '--include-filter', 'GtsIncidentConfirmationTestCases', '--include-filter', 'GtsIncidentManagerTestCases', '--include-filter', 'GtsInstallPackagesWhitelistDeviceTestCases', '--include-filter', 'GtsInstantAppsHostTestCases', '--include-filter', 'GtsLargeApkHostTestCases', '--include-filter', 'GtsLauncherHostTestCases', '--include-filter', 'GtsLocationHostTestCases', '--include-filter', 'GtsLocationTestCases', '--include-filter', 'GtsMediaTestCases', '--include-filter', 'GtsMemoryHostTestCases', '--include-filter', 'GtsMemoryTestCases', '--include-filter', 'GtsModuleMetadataTestCases', '--include-filter', 'GtsNetStatsHostTestCases', '--include-filter', 'GtsNetTestCases', '--include-filter', 'GtsNetworkStackHostTestCases', '--include-filter', 'GtsNetworkWatchlistTestCases', '--include-filter', 'GtsNmgiarcTestCases', '--include-filter', 'GtsNoPermissionTestCases', '--include-filter', 'GtsNoPermissionTestCases25', '--include-filter', 'GtsNotificationTestCases', '--include-filter', 'GtsOemLockServiceTestCases', '--include-filter', 'GtsOsTestCases', '--include-filter', 'GtsPackageInstallTestCases', '--include-filter', 'GtsPackageInstallerTapjackingTestCases', '--include-filter', 'GtsPackageManagerHostTestCases', '--include-filter', 'GtsPackageNameCertPairsDeviceTestCases', '--include-filter', 'GtsPackageUninstallTestCases', '--include-filter', 'GtsPartnerBookmarksTestCases', '--include-filter', 'GtsPermissionTestCases', '--include-filter', 'GtsPlacementTestCases', '--include-filter', 'GtsPlayAutoInstallTestCases', '--include-filter', 'GtsPlayStoreHostTestCases', '--include-filter', 'GtsPrintTestCases', '--include-filter', 'GtsPrivacyTestCases', '--include-filter', 'GtsPrivilegedUpdatePreparer', '--include-filter', 'GtsPropertiesTestCases', '--include-filter', 'GtsRegulationComplianceTestCases', '--include-filter', 'GtsRlzTestCases', '--include-filter', 'GtsSampleDeviceTestCases', '--include-filter', 'GtsSampleDynamicConfigTestCases', '--include-filter', 'GtsSampleHostTestCases', '--include-filter', 'GtsScreenshotHostTestCases', '--include-filter', 'GtsSearchHostTestCases', '--include-filter', 'GtsSecurityHostTestCases', '--include-filter', 'GtsSecurityTestCases', '--include-filter', 'GtsSensorHostTestCases', '--include-filter', 'GtsSettingsHostTestCases', '--include-filter', 'GtsSettingsTestCases', '--include-filter', 'GtsSetupWizardHostTestCases', '--include-filter', 'GtsSetupWizardNoPermissionTestCases', '--include-filter', 'GtsSimAppDialogTestCases', '--include-filter', 'GtsSmartBatteryDeviceTestCases', '--include-filter', 'GtsSsaidHostTestCases', '--include-filter', 'GtsStagedInstallHostTestCases', '--include-filter', 'GtsStatsdHostTestCases', '--include-filter', 'GtsStorageTestCases', '--include-filter', 'GtsSupervisionTestCases', '--include-filter', 'GtsSuspendAppsPermissionTestCases', '--include-filter', 'GtsSuspendAppsTestCases', '--include-filter', 'GtsTelecomManagerTests', '--include-filter', 'GtsTelephonyTestCases', '--include-filter', 'GtsTestHarnessModeTestCases', '--include-filter', 'GtsTetheringTestCases', '--include-filter', 'GtsTvBugReportTestCases', '--include-filter', 'GtsTvHostTestCases', '--include-filter', 'GtsTvTestCases', '--include-filter', 'GtsUnofficialApisUsageTestCases', '--include-filter', 'GtsUsageStatsTestCases', '--include-filter', 'GtsViewTestCases', '--include-filter', 'GtsWebViewHostTestCases', '--include-filter', 'GtsWebViewTestCases', '--include-filter', 'GtsWellbeingHostTestCases', '--include-filter', 'GtsWellbeingPermissionPolicyTestCases', '--include-filter', 'GtsWellbeingTestCases', '--include-filter', 'GtsYouTubeTestCases', '--ignore-business-logic-failure'],
- retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
- target_module='all.GtsCameraTestCases_-_GtsYouTubeTestCases',
- target_plan=None,
- uri='gs://chromeos-arc-images/cts/bundle/android-gts-7_r3-6045416.zip',
- prerequisites=['bluetooth'],
- timeout=86400)
-
-parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.7.0_r3.CtsCheckpointTestCases b/server/site_tests/cheets_GTS/control.7.0_r4.CtsCheckpointTestCases
similarity index 86%
rename from server/site_tests/cheets_GTS/control.7.0_r3.CtsCheckpointTestCases
rename to server/site_tests/cheets_GTS/control.7.0_r4.CtsCheckpointTestCases
index 567ec28..71d7a59 100644
--- a/server/site_tests/cheets_GTS/control.7.0_r3.CtsCheckpointTestCases
+++ b/server/site_tests/cheets_GTS/control.7.0_r4.CtsCheckpointTestCases
@@ -5,7 +5,7 @@
# This file has been automatically generated. Do not edit!
AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.7.0_r3.CtsCheckpointTestCases'
+NAME = 'cheets_GTS.7.0_r4.CtsCheckpointTestCases'
ATTRIBUTES = 'suite:arc-gts'
DEPENDENCIES = 'arc'
JOB_RETRIES = 1
@@ -20,14 +20,14 @@
'cheets_GTS',
hosts=host_list,
iterations=1,
- tag='7.0_r3.CtsCheckpointTestCases',
- test_name='cheets_GTS.7.0_r3.CtsCheckpointTestCases',
+ tag='7.0_r4.CtsCheckpointTestCases',
+ test_name='cheets_GTS.7.0_r4.CtsCheckpointTestCases',
authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
run_template=['run', 'commandAndExit', 'gts', '--module', 'CtsCheckpointTestCases', '--ignore-business-logic-failure'],
retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
target_module='CtsCheckpointTestCases',
target_plan=None,
- uri='gs://chromeos-arc-images/cts/bundle/android-gts-7_r3-6045416.zip',
+ uri='gs://chromeos-arc-images/cts/bundle/android-gts-7_r4-6219464.zip',
timeout=720)
parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.7.0_r3.GtsAccountsHostTestCases b/server/site_tests/cheets_GTS/control.7.0_r4.GtsAccountsHostTestCases
similarity index 86%
rename from server/site_tests/cheets_GTS/control.7.0_r3.GtsAccountsHostTestCases
rename to server/site_tests/cheets_GTS/control.7.0_r4.GtsAccountsHostTestCases
index 86ed84e..d2c1e1c 100644
--- a/server/site_tests/cheets_GTS/control.7.0_r3.GtsAccountsHostTestCases
+++ b/server/site_tests/cheets_GTS/control.7.0_r4.GtsAccountsHostTestCases
@@ -5,7 +5,7 @@
# This file has been automatically generated. Do not edit!
AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.7.0_r3.GtsAccountsHostTestCases'
+NAME = 'cheets_GTS.7.0_r4.GtsAccountsHostTestCases'
ATTRIBUTES = 'suite:arc-gts'
DEPENDENCIES = 'arc'
JOB_RETRIES = 1
@@ -20,14 +20,14 @@
'cheets_GTS',
hosts=host_list,
iterations=1,
- tag='7.0_r3.GtsAccountsHostTestCases',
- test_name='cheets_GTS.7.0_r3.GtsAccountsHostTestCases',
+ tag='7.0_r4.GtsAccountsHostTestCases',
+ test_name='cheets_GTS.7.0_r4.GtsAccountsHostTestCases',
authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsAccountsHostTestCases', '--ignore-business-logic-failure'],
retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
target_module='GtsAccountsHostTestCases',
target_plan=None,
- uri='gs://chromeos-arc-images/cts/bundle/android-gts-7_r3-6045416.zip',
+ uri='gs://chromeos-arc-images/cts/bundle/android-gts-7_r4-6219464.zip',
timeout=720)
parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.7.0_r3.GtsAdminTestCases b/server/site_tests/cheets_GTS/control.7.0_r4.GtsAdminTestCases
similarity index 87%
rename from server/site_tests/cheets_GTS/control.7.0_r3.GtsAdminTestCases
rename to server/site_tests/cheets_GTS/control.7.0_r4.GtsAdminTestCases
index a1c4425..7eedf19 100644
--- a/server/site_tests/cheets_GTS/control.7.0_r3.GtsAdminTestCases
+++ b/server/site_tests/cheets_GTS/control.7.0_r4.GtsAdminTestCases
@@ -5,7 +5,7 @@
# This file has been automatically generated. Do not edit!
AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.7.0_r3.GtsAdminTestCases'
+NAME = 'cheets_GTS.7.0_r4.GtsAdminTestCases'
ATTRIBUTES = 'suite:arc-gts, suite:bvt-perbuild'
DEPENDENCIES = 'arc'
JOB_RETRIES = 1
@@ -21,14 +21,14 @@
hosts=host_list,
iterations=1,
max_retry=3,
- tag='7.0_r3.GtsAdminTestCases',
- test_name='cheets_GTS.7.0_r3.GtsAdminTestCases',
+ tag='7.0_r4.GtsAdminTestCases',
+ test_name='cheets_GTS.7.0_r4.GtsAdminTestCases',
authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsAdminTestCases', '--ignore-business-logic-failure'],
retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
target_module='GtsAdminTestCases',
target_plan=None,
- uri='gs://chromeos-arc-images/cts/bundle/android-gts-7_r3-6045416.zip',
+ uri='gs://chromeos-arc-images/cts/bundle/android-gts-7_r4-6219464.zip',
timeout=720)
parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.7.0_r3.GtsAfwTestCases b/server/site_tests/cheets_GTS/control.7.0_r4.GtsAfwTestCases
similarity index 87%
rename from server/site_tests/cheets_GTS/control.7.0_r3.GtsAfwTestCases
rename to server/site_tests/cheets_GTS/control.7.0_r4.GtsAfwTestCases
index f7d87e7..1a608ed 100644
--- a/server/site_tests/cheets_GTS/control.7.0_r3.GtsAfwTestCases
+++ b/server/site_tests/cheets_GTS/control.7.0_r4.GtsAfwTestCases
@@ -5,7 +5,7 @@
# This file has been automatically generated. Do not edit!
AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.7.0_r3.GtsAfwTestCases'
+NAME = 'cheets_GTS.7.0_r4.GtsAfwTestCases'
ATTRIBUTES = 'suite:arc-gts'
DEPENDENCIES = 'arc'
JOB_RETRIES = 1
@@ -20,14 +20,14 @@
'cheets_GTS',
hosts=host_list,
iterations=1,
- tag='7.0_r3.GtsAfwTestCases',
- test_name='cheets_GTS.7.0_r3.GtsAfwTestCases',
+ tag='7.0_r4.GtsAfwTestCases',
+ test_name='cheets_GTS.7.0_r4.GtsAfwTestCases',
authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsAfwTestCases', '--ignore-business-logic-failure'],
retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
target_module='GtsAfwTestCases',
target_plan=None,
- uri='gs://chromeos-arc-images/cts/bundle/android-gts-7_r3-6045416.zip',
+ uri='gs://chromeos-arc-images/cts/bundle/android-gts-7_r4-6219464.zip',
timeout=720)
parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.7.0_r3.GtsAndroidAutoDeviceTestCases b/server/site_tests/cheets_GTS/control.7.0_r4.GtsAndroidAutoDeviceTestCases
similarity index 85%
rename from server/site_tests/cheets_GTS/control.7.0_r3.GtsAndroidAutoDeviceTestCases
rename to server/site_tests/cheets_GTS/control.7.0_r4.GtsAndroidAutoDeviceTestCases
index e3b5d58..cc44bd5 100644
--- a/server/site_tests/cheets_GTS/control.7.0_r3.GtsAndroidAutoDeviceTestCases
+++ b/server/site_tests/cheets_GTS/control.7.0_r4.GtsAndroidAutoDeviceTestCases
@@ -5,7 +5,7 @@
# This file has been automatically generated. Do not edit!
AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.7.0_r3.GtsAndroidAutoDeviceTestCases'
+NAME = 'cheets_GTS.7.0_r4.GtsAndroidAutoDeviceTestCases'
ATTRIBUTES = 'suite:arc-gts'
DEPENDENCIES = 'arc'
JOB_RETRIES = 1
@@ -20,14 +20,14 @@
'cheets_GTS',
hosts=host_list,
iterations=1,
- tag='7.0_r3.GtsAndroidAutoDeviceTestCases',
- test_name='cheets_GTS.7.0_r3.GtsAndroidAutoDeviceTestCases',
+ tag='7.0_r4.GtsAndroidAutoDeviceTestCases',
+ test_name='cheets_GTS.7.0_r4.GtsAndroidAutoDeviceTestCases',
authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsAndroidAutoDeviceTestCases', '--ignore-business-logic-failure'],
retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
target_module='GtsAndroidAutoDeviceTestCases',
target_plan=None,
- uri='gs://chromeos-arc-images/cts/bundle/android-gts-7_r3-6045416.zip',
+ uri='gs://chromeos-arc-images/cts/bundle/android-gts-7_r4-6219464.zip',
timeout=720)
parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.7.0_r3.GtsApp b/server/site_tests/cheets_GTS/control.7.0_r4.GtsApp
similarity index 90%
rename from server/site_tests/cheets_GTS/control.7.0_r3.GtsApp
rename to server/site_tests/cheets_GTS/control.7.0_r4.GtsApp
index f17dae1..9a498bd 100644
--- a/server/site_tests/cheets_GTS/control.7.0_r3.GtsApp
+++ b/server/site_tests/cheets_GTS/control.7.0_r4.GtsApp
@@ -5,7 +5,7 @@
# This file has been automatically generated. Do not edit!
AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.7.0_r3.GtsApp'
+NAME = 'cheets_GTS.7.0_r4.GtsApp'
ATTRIBUTES = 'suite:arc-gts'
DEPENDENCIES = 'arc'
JOB_RETRIES = 1
@@ -20,14 +20,14 @@
'cheets_GTS',
hosts=host_list,
iterations=1,
- tag='7.0_r3.GtsApp',
- test_name='cheets_GTS.7.0_r3.GtsApp',
+ tag='7.0_r4.GtsApp',
+ test_name='cheets_GTS.7.0_r4.GtsApp',
authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
run_template=['run', 'commandAndExit', 'gts', '--include-filter', 'GtsAppBlacklistDeviceTestCases', '--include-filter', 'GtsAppTestCases', '--ignore-business-logic-failure'],
retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
target_module='GtsApp',
target_plan=None,
- uri='gs://chromeos-arc-images/cts/bundle/android-gts-7_r3-6045416.zip',
+ uri='gs://chromeos-arc-images/cts/bundle/android-gts-7_r4-6219464.zip',
timeout=1080)
parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.7.0_r3.GtsArtManagerHostTestCases b/server/site_tests/cheets_GTS/control.7.0_r4.GtsArtManagerHostTestCases
similarity index 85%
rename from server/site_tests/cheets_GTS/control.7.0_r3.GtsArtManagerHostTestCases
rename to server/site_tests/cheets_GTS/control.7.0_r4.GtsArtManagerHostTestCases
index 5f4769f..4660cb7 100644
--- a/server/site_tests/cheets_GTS/control.7.0_r3.GtsArtManagerHostTestCases
+++ b/server/site_tests/cheets_GTS/control.7.0_r4.GtsArtManagerHostTestCases
@@ -5,7 +5,7 @@
# This file has been automatically generated. Do not edit!
AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.7.0_r3.GtsArtManagerHostTestCases'
+NAME = 'cheets_GTS.7.0_r4.GtsArtManagerHostTestCases'
ATTRIBUTES = 'suite:arc-gts'
DEPENDENCIES = 'arc'
JOB_RETRIES = 1
@@ -20,14 +20,14 @@
'cheets_GTS',
hosts=host_list,
iterations=1,
- tag='7.0_r3.GtsArtManagerHostTestCases',
- test_name='cheets_GTS.7.0_r3.GtsArtManagerHostTestCases',
+ tag='7.0_r4.GtsArtManagerHostTestCases',
+ test_name='cheets_GTS.7.0_r4.GtsArtManagerHostTestCases',
authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsArtManagerHostTestCases', '--ignore-business-logic-failure'],
retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
target_module='GtsArtManagerHostTestCases',
target_plan=None,
- uri='gs://chromeos-arc-images/cts/bundle/android-gts-7_r3-6045416.zip',
+ uri='gs://chromeos-arc-images/cts/bundle/android-gts-7_r4-6219464.zip',
timeout=720)
parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.7.0_r3.GtsAssistIntentTestCases b/server/site_tests/cheets_GTS/control.7.0_r4.GtsAssistIntentTestCases
similarity index 86%
rename from server/site_tests/cheets_GTS/control.7.0_r3.GtsAssistIntentTestCases
rename to server/site_tests/cheets_GTS/control.7.0_r4.GtsAssistIntentTestCases
index 7ac4e96..56495ee 100644
--- a/server/site_tests/cheets_GTS/control.7.0_r3.GtsAssistIntentTestCases
+++ b/server/site_tests/cheets_GTS/control.7.0_r4.GtsAssistIntentTestCases
@@ -5,7 +5,7 @@
# This file has been automatically generated. Do not edit!
AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.7.0_r3.GtsAssistIntentTestCases'
+NAME = 'cheets_GTS.7.0_r4.GtsAssistIntentTestCases'
ATTRIBUTES = 'suite:arc-gts'
DEPENDENCIES = 'arc'
JOB_RETRIES = 1
@@ -20,14 +20,14 @@
'cheets_GTS',
hosts=host_list,
iterations=1,
- tag='7.0_r3.GtsAssistIntentTestCases',
- test_name='cheets_GTS.7.0_r3.GtsAssistIntentTestCases',
+ tag='7.0_r4.GtsAssistIntentTestCases',
+ test_name='cheets_GTS.7.0_r4.GtsAssistIntentTestCases',
authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsAssistIntentTestCases', '--ignore-business-logic-failure'],
retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
target_module='GtsAssistIntentTestCases',
target_plan=None,
- uri='gs://chromeos-arc-images/cts/bundle/android-gts-7_r3-6045416.zip',
+ uri='gs://chromeos-arc-images/cts/bundle/android-gts-7_r4-6219464.zip',
timeout=720)
parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.7.0_r3.GtsAssistantHostTestCases b/server/site_tests/cheets_GTS/control.7.0_r4.GtsAssistantHostTestCases
similarity index 86%
rename from server/site_tests/cheets_GTS/control.7.0_r3.GtsAssistantHostTestCases
rename to server/site_tests/cheets_GTS/control.7.0_r4.GtsAssistantHostTestCases
index 91ce467..f277d40 100644
--- a/server/site_tests/cheets_GTS/control.7.0_r3.GtsAssistantHostTestCases
+++ b/server/site_tests/cheets_GTS/control.7.0_r4.GtsAssistantHostTestCases
@@ -5,7 +5,7 @@
# This file has been automatically generated. Do not edit!
AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.7.0_r3.GtsAssistantHostTestCases'
+NAME = 'cheets_GTS.7.0_r4.GtsAssistantHostTestCases'
ATTRIBUTES = 'suite:arc-gts'
DEPENDENCIES = 'arc'
JOB_RETRIES = 1
@@ -20,14 +20,14 @@
'cheets_GTS',
hosts=host_list,
iterations=1,
- tag='7.0_r3.GtsAssistantHostTestCases',
- test_name='cheets_GTS.7.0_r3.GtsAssistantHostTestCases',
+ tag='7.0_r4.GtsAssistantHostTestCases',
+ test_name='cheets_GTS.7.0_r4.GtsAssistantHostTestCases',
authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsAssistantHostTestCases', '--ignore-business-logic-failure'],
retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
target_module='GtsAssistantHostTestCases',
target_plan=None,
- uri='gs://chromeos-arc-images/cts/bundle/android-gts-7_r3-6045416.zip',
+ uri='gs://chromeos-arc-images/cts/bundle/android-gts-7_r4-6219464.zip',
timeout=720)
parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.7.0_r3.GtsBackup b/server/site_tests/cheets_GTS/control.7.0_r4.GtsBackup
similarity index 89%
rename from server/site_tests/cheets_GTS/control.7.0_r3.GtsBackup
rename to server/site_tests/cheets_GTS/control.7.0_r4.GtsBackup
index c5ec90f..a8e00f3 100644
--- a/server/site_tests/cheets_GTS/control.7.0_r3.GtsBackup
+++ b/server/site_tests/cheets_GTS/control.7.0_r4.GtsBackup
@@ -5,7 +5,7 @@
# This file has been automatically generated. Do not edit!
AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.7.0_r3.GtsBackup'
+NAME = 'cheets_GTS.7.0_r4.GtsBackup'
ATTRIBUTES = 'suite:arc-gts'
DEPENDENCIES = 'arc'
JOB_RETRIES = 1
@@ -20,14 +20,14 @@
'cheets_GTS',
hosts=host_list,
iterations=1,
- tag='7.0_r3.GtsBackup',
- test_name='cheets_GTS.7.0_r3.GtsBackup',
+ tag='7.0_r4.GtsBackup',
+ test_name='cheets_GTS.7.0_r4.GtsBackup',
authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
run_template=['run', 'commandAndExit', 'gts', '--include-filter', 'GtsBackupHostTestCases', '--include-filter', 'GtsBackupTestCases', '--ignore-business-logic-failure'],
retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
target_module='GtsBackup',
target_plan=None,
- uri='gs://chromeos-arc-images/cts/bundle/android-gts-7_r3-6045416.zip',
+ uri='gs://chromeos-arc-images/cts/bundle/android-gts-7_r4-6219464.zip',
timeout=1080)
parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.7.0_r3.GtsBootStatsTestCases b/server/site_tests/cheets_GTS/control.7.0_r4.GtsBootStatsTestCases
similarity index 86%
rename from server/site_tests/cheets_GTS/control.7.0_r3.GtsBootStatsTestCases
rename to server/site_tests/cheets_GTS/control.7.0_r4.GtsBootStatsTestCases
index ee89f1c..aa0ff27 100644
--- a/server/site_tests/cheets_GTS/control.7.0_r3.GtsBootStatsTestCases
+++ b/server/site_tests/cheets_GTS/control.7.0_r4.GtsBootStatsTestCases
@@ -5,7 +5,7 @@
# This file has been automatically generated. Do not edit!
AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.7.0_r3.GtsBootStatsTestCases'
+NAME = 'cheets_GTS.7.0_r4.GtsBootStatsTestCases'
ATTRIBUTES = 'suite:arc-gts'
DEPENDENCIES = 'arc'
JOB_RETRIES = 1
@@ -20,14 +20,14 @@
'cheets_GTS',
hosts=host_list,
iterations=1,
- tag='7.0_r3.GtsBootStatsTestCases',
- test_name='cheets_GTS.7.0_r3.GtsBootStatsTestCases',
+ tag='7.0_r4.GtsBootStatsTestCases',
+ test_name='cheets_GTS.7.0_r4.GtsBootStatsTestCases',
authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsBootStatsTestCases', '--ignore-business-logic-failure'],
retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
target_module='GtsBootStatsTestCases',
target_plan=None,
- uri='gs://chromeos-arc-images/cts/bundle/android-gts-7_r3-6045416.zip',
+ uri='gs://chromeos-arc-images/cts/bundle/android-gts-7_r4-6219464.zip',
timeout=720)
parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.7.0_r3.GtsCallLogTestCases b/server/site_tests/cheets_GTS/control.7.0_r4.GtsCallLogTestCases
similarity index 86%
rename from server/site_tests/cheets_GTS/control.7.0_r3.GtsCallLogTestCases
rename to server/site_tests/cheets_GTS/control.7.0_r4.GtsCallLogTestCases
index 2d7ce42..a55143c 100644
--- a/server/site_tests/cheets_GTS/control.7.0_r3.GtsCallLogTestCases
+++ b/server/site_tests/cheets_GTS/control.7.0_r4.GtsCallLogTestCases
@@ -5,7 +5,7 @@
# This file has been automatically generated. Do not edit!
AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.7.0_r3.GtsCallLogTestCases'
+NAME = 'cheets_GTS.7.0_r4.GtsCallLogTestCases'
ATTRIBUTES = 'suite:arc-gts'
DEPENDENCIES = 'arc'
JOB_RETRIES = 1
@@ -20,14 +20,14 @@
'cheets_GTS',
hosts=host_list,
iterations=1,
- tag='7.0_r3.GtsCallLogTestCases',
- test_name='cheets_GTS.7.0_r3.GtsCallLogTestCases',
+ tag='7.0_r4.GtsCallLogTestCases',
+ test_name='cheets_GTS.7.0_r4.GtsCallLogTestCases',
authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsCallLogTestCases', '--ignore-business-logic-failure'],
retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
target_module='GtsCallLogTestCases',
target_plan=None,
- uri='gs://chromeos-arc-images/cts/bundle/android-gts-7_r3-6045416.zip',
+ uri='gs://chromeos-arc-images/cts/bundle/android-gts-7_r4-6219464.zip',
timeout=720)
parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.7.0_r3.GtsCameraTestCases b/server/site_tests/cheets_GTS/control.7.0_r4.GtsCameraTestCases
similarity index 87%
rename from server/site_tests/cheets_GTS/control.7.0_r3.GtsCameraTestCases
rename to server/site_tests/cheets_GTS/control.7.0_r4.GtsCameraTestCases
index 89c1ce4..d22edbf 100644
--- a/server/site_tests/cheets_GTS/control.7.0_r3.GtsCameraTestCases
+++ b/server/site_tests/cheets_GTS/control.7.0_r4.GtsCameraTestCases
@@ -5,7 +5,7 @@
# This file has been automatically generated. Do not edit!
AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.7.0_r3.GtsCameraTestCases'
+NAME = 'cheets_GTS.7.0_r4.GtsCameraTestCases'
ATTRIBUTES = 'suite:arc-gts'
DEPENDENCIES = 'arc'
JOB_RETRIES = 1
@@ -20,14 +20,14 @@
'cheets_GTS',
hosts=host_list,
iterations=1,
- tag='7.0_r3.GtsCameraTestCases',
- test_name='cheets_GTS.7.0_r3.GtsCameraTestCases',
+ tag='7.0_r4.GtsCameraTestCases',
+ test_name='cheets_GTS.7.0_r4.GtsCameraTestCases',
authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsCameraTestCases', '--ignore-business-logic-failure'],
retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
target_module='GtsCameraTestCases',
target_plan=None,
- uri='gs://chromeos-arc-images/cts/bundle/android-gts-7_r3-6045416.zip',
+ uri='gs://chromeos-arc-images/cts/bundle/android-gts-7_r4-6219464.zip',
timeout=720)
parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.7.0_r3.GtsCastHostTestCases b/server/site_tests/cheets_GTS/control.7.0_r4.GtsCastHostTestCases
similarity index 86%
rename from server/site_tests/cheets_GTS/control.7.0_r3.GtsCastHostTestCases
rename to server/site_tests/cheets_GTS/control.7.0_r4.GtsCastHostTestCases
index 1d3f2d9..5f4325f 100644
--- a/server/site_tests/cheets_GTS/control.7.0_r3.GtsCastHostTestCases
+++ b/server/site_tests/cheets_GTS/control.7.0_r4.GtsCastHostTestCases
@@ -5,7 +5,7 @@
# This file has been automatically generated. Do not edit!
AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.7.0_r3.GtsCastHostTestCases'
+NAME = 'cheets_GTS.7.0_r4.GtsCastHostTestCases'
ATTRIBUTES = 'suite:arc-gts'
DEPENDENCIES = 'arc'
JOB_RETRIES = 1
@@ -20,14 +20,14 @@
'cheets_GTS',
hosts=host_list,
iterations=1,
- tag='7.0_r3.GtsCastHostTestCases',
- test_name='cheets_GTS.7.0_r3.GtsCastHostTestCases',
+ tag='7.0_r4.GtsCastHostTestCases',
+ test_name='cheets_GTS.7.0_r4.GtsCastHostTestCases',
authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsCastHostTestCases', '--ignore-business-logic-failure'],
retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
target_module='GtsCastHostTestCases',
target_plan=None,
- uri='gs://chromeos-arc-images/cts/bundle/android-gts-7_r3-6045416.zip',
+ uri='gs://chromeos-arc-images/cts/bundle/android-gts-7_r4-6219464.zip',
timeout=720)
parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.7.0_r3.GtsContacts b/server/site_tests/cheets_GTS/control.7.0_r4.GtsContacts
similarity index 89%
rename from server/site_tests/cheets_GTS/control.7.0_r3.GtsContacts
rename to server/site_tests/cheets_GTS/control.7.0_r4.GtsContacts
index 33daebc..0c99201 100644
--- a/server/site_tests/cheets_GTS/control.7.0_r3.GtsContacts
+++ b/server/site_tests/cheets_GTS/control.7.0_r4.GtsContacts
@@ -5,7 +5,7 @@
# This file has been automatically generated. Do not edit!
AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.7.0_r3.GtsContacts'
+NAME = 'cheets_GTS.7.0_r4.GtsContacts'
ATTRIBUTES = 'suite:arc-gts'
DEPENDENCIES = 'arc'
JOB_RETRIES = 1
@@ -21,14 +21,14 @@
hosts=host_list,
iterations=1,
max_retry=3,
- tag='7.0_r3.GtsContacts',
- test_name='cheets_GTS.7.0_r3.GtsContacts',
+ tag='7.0_r4.GtsContacts',
+ test_name='cheets_GTS.7.0_r4.GtsContacts',
authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
run_template=['run', 'commandAndExit', 'gts', '--include-filter', 'GtsContactsAppDeviceTestCases', '--include-filter', 'GtsContactsTest', '--ignore-business-logic-failure'],
retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
target_module='GtsContacts',
target_plan=None,
- uri='gs://chromeos-arc-images/cts/bundle/android-gts-7_r3-6045416.zip',
+ uri='gs://chromeos-arc-images/cts/bundle/android-gts-7_r4-6219464.zip',
timeout=360)
parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.7.0_r3.GtsContentTestCases b/server/site_tests/cheets_GTS/control.7.0_r4.GtsContentTestCases
similarity index 86%
rename from server/site_tests/cheets_GTS/control.7.0_r3.GtsContentTestCases
rename to server/site_tests/cheets_GTS/control.7.0_r4.GtsContentTestCases
index f0d3503..32f685e 100644
--- a/server/site_tests/cheets_GTS/control.7.0_r3.GtsContentTestCases
+++ b/server/site_tests/cheets_GTS/control.7.0_r4.GtsContentTestCases
@@ -5,7 +5,7 @@
# This file has been automatically generated. Do not edit!
AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.7.0_r3.GtsContentTestCases'
+NAME = 'cheets_GTS.7.0_r4.GtsContentTestCases'
ATTRIBUTES = 'suite:arc-gts'
DEPENDENCIES = 'arc'
JOB_RETRIES = 1
@@ -20,14 +20,14 @@
'cheets_GTS',
hosts=host_list,
iterations=1,
- tag='7.0_r3.GtsContentTestCases',
- test_name='cheets_GTS.7.0_r3.GtsContentTestCases',
+ tag='7.0_r4.GtsContentTestCases',
+ test_name='cheets_GTS.7.0_r4.GtsContentTestCases',
authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsContentTestCases', '--ignore-business-logic-failure'],
retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
target_module='GtsContentTestCases',
target_plan=None,
- uri='gs://chromeos-arc-images/cts/bundle/android-gts-7_r3-6045416.zip',
+ uri='gs://chromeos-arc-images/cts/bundle/android-gts-7_r4-6219464.zip',
timeout=720)
parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.7.0_r3.GtsDeviceConfigTestCases b/server/site_tests/cheets_GTS/control.7.0_r4.GtsDeviceConfigTestCases
similarity index 86%
rename from server/site_tests/cheets_GTS/control.7.0_r3.GtsDeviceConfigTestCases
rename to server/site_tests/cheets_GTS/control.7.0_r4.GtsDeviceConfigTestCases
index 443d5c3..eb0f0ab 100644
--- a/server/site_tests/cheets_GTS/control.7.0_r3.GtsDeviceConfigTestCases
+++ b/server/site_tests/cheets_GTS/control.7.0_r4.GtsDeviceConfigTestCases
@@ -5,7 +5,7 @@
# This file has been automatically generated. Do not edit!
AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.7.0_r3.GtsDeviceConfigTestCases'
+NAME = 'cheets_GTS.7.0_r4.GtsDeviceConfigTestCases'
ATTRIBUTES = 'suite:arc-gts'
DEPENDENCIES = 'arc'
JOB_RETRIES = 1
@@ -20,14 +20,14 @@
'cheets_GTS',
hosts=host_list,
iterations=1,
- tag='7.0_r3.GtsDeviceConfigTestCases',
- test_name='cheets_GTS.7.0_r3.GtsDeviceConfigTestCases',
+ tag='7.0_r4.GtsDeviceConfigTestCases',
+ test_name='cheets_GTS.7.0_r4.GtsDeviceConfigTestCases',
authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsDeviceConfigTestCases', '--ignore-business-logic-failure'],
retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
target_module='GtsDeviceConfigTestCases',
target_plan=None,
- uri='gs://chromeos-arc-images/cts/bundle/android-gts-7_r3-6045416.zip',
+ uri='gs://chromeos-arc-images/cts/bundle/android-gts-7_r4-6219464.zip',
timeout=720)
parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.7.0_r3.GtsDexModuleRegistrationTestCases b/server/site_tests/cheets_GTS/control.7.0_r4.GtsDexModuleRegistrationTestCases
similarity index 85%
rename from server/site_tests/cheets_GTS/control.7.0_r3.GtsDexModuleRegistrationTestCases
rename to server/site_tests/cheets_GTS/control.7.0_r4.GtsDexModuleRegistrationTestCases
index 36f0588..a3a40d0 100644
--- a/server/site_tests/cheets_GTS/control.7.0_r3.GtsDexModuleRegistrationTestCases
+++ b/server/site_tests/cheets_GTS/control.7.0_r4.GtsDexModuleRegistrationTestCases
@@ -5,7 +5,7 @@
# This file has been automatically generated. Do not edit!
AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.7.0_r3.GtsDexModuleRegistrationTestCases'
+NAME = 'cheets_GTS.7.0_r4.GtsDexModuleRegistrationTestCases'
ATTRIBUTES = 'suite:arc-gts'
DEPENDENCIES = 'arc'
JOB_RETRIES = 1
@@ -20,14 +20,14 @@
'cheets_GTS',
hosts=host_list,
iterations=1,
- tag='7.0_r3.GtsDexModuleRegistrationTestCases',
- test_name='cheets_GTS.7.0_r3.GtsDexModuleRegistrationTestCases',
+ tag='7.0_r4.GtsDexModuleRegistrationTestCases',
+ test_name='cheets_GTS.7.0_r4.GtsDexModuleRegistrationTestCases',
authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsDexModuleRegistrationTestCases', '--ignore-business-logic-failure'],
retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
target_module='GtsDexModuleRegistrationTestCases',
target_plan=None,
- uri='gs://chromeos-arc-images/cts/bundle/android-gts-7_r3-6045416.zip',
+ uri='gs://chromeos-arc-images/cts/bundle/android-gts-7_r4-6219464.zip',
timeout=720)
parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.7.0_r3.GtsDialerDeviceTestCases b/server/site_tests/cheets_GTS/control.7.0_r4.GtsDialerDeviceTestCases
similarity index 86%
rename from server/site_tests/cheets_GTS/control.7.0_r3.GtsDialerDeviceTestCases
rename to server/site_tests/cheets_GTS/control.7.0_r4.GtsDialerDeviceTestCases
index 2c40f0d..650aecc 100644
--- a/server/site_tests/cheets_GTS/control.7.0_r3.GtsDialerDeviceTestCases
+++ b/server/site_tests/cheets_GTS/control.7.0_r4.GtsDialerDeviceTestCases
@@ -5,7 +5,7 @@
# This file has been automatically generated. Do not edit!
AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.7.0_r3.GtsDialerDeviceTestCases'
+NAME = 'cheets_GTS.7.0_r4.GtsDialerDeviceTestCases'
ATTRIBUTES = 'suite:arc-gts'
DEPENDENCIES = 'arc'
JOB_RETRIES = 1
@@ -20,14 +20,14 @@
'cheets_GTS',
hosts=host_list,
iterations=1,
- tag='7.0_r3.GtsDialerDeviceTestCases',
- test_name='cheets_GTS.7.0_r3.GtsDialerDeviceTestCases',
+ tag='7.0_r4.GtsDialerDeviceTestCases',
+ test_name='cheets_GTS.7.0_r4.GtsDialerDeviceTestCases',
authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsDialerDeviceTestCases', '--ignore-business-logic-failure'],
retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
target_module='GtsDialerDeviceTestCases',
target_plan=None,
- uri='gs://chromeos-arc-images/cts/bundle/android-gts-7_r3-6045416.zip',
+ uri='gs://chromeos-arc-images/cts/bundle/android-gts-7_r4-6219464.zip',
timeout=720)
parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.7.0_r3.GtsDozeDeviceTestCases b/server/site_tests/cheets_GTS/control.7.0_r4.GtsDozeDeviceTestCases
similarity index 86%
rename from server/site_tests/cheets_GTS/control.7.0_r3.GtsDozeDeviceTestCases
rename to server/site_tests/cheets_GTS/control.7.0_r4.GtsDozeDeviceTestCases
index a4a5732..c1d831e 100644
--- a/server/site_tests/cheets_GTS/control.7.0_r3.GtsDozeDeviceTestCases
+++ b/server/site_tests/cheets_GTS/control.7.0_r4.GtsDozeDeviceTestCases
@@ -5,7 +5,7 @@
# This file has been automatically generated. Do not edit!
AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.7.0_r3.GtsDozeDeviceTestCases'
+NAME = 'cheets_GTS.7.0_r4.GtsDozeDeviceTestCases'
ATTRIBUTES = 'suite:arc-gts'
DEPENDENCIES = 'arc'
JOB_RETRIES = 1
@@ -20,14 +20,14 @@
'cheets_GTS',
hosts=host_list,
iterations=1,
- tag='7.0_r3.GtsDozeDeviceTestCases',
- test_name='cheets_GTS.7.0_r3.GtsDozeDeviceTestCases',
+ tag='7.0_r4.GtsDozeDeviceTestCases',
+ test_name='cheets_GTS.7.0_r4.GtsDozeDeviceTestCases',
authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsDozeDeviceTestCases', '--ignore-business-logic-failure'],
retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
target_module='GtsDozeDeviceTestCases',
target_plan=None,
- uri='gs://chromeos-arc-images/cts/bundle/android-gts-7_r3-6045416.zip',
+ uri='gs://chromeos-arc-images/cts/bundle/android-gts-7_r4-6219464.zip',
timeout=720)
parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.7.0_r3.GtsEdiHostTestCases b/server/site_tests/cheets_GTS/control.7.0_r4.GtsEdiHostTestCases
similarity index 86%
rename from server/site_tests/cheets_GTS/control.7.0_r3.GtsEdiHostTestCases
rename to server/site_tests/cheets_GTS/control.7.0_r4.GtsEdiHostTestCases
index c4e57db..1e371db 100644
--- a/server/site_tests/cheets_GTS/control.7.0_r3.GtsEdiHostTestCases
+++ b/server/site_tests/cheets_GTS/control.7.0_r4.GtsEdiHostTestCases
@@ -5,7 +5,7 @@
# This file has been automatically generated. Do not edit!
AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.7.0_r3.GtsEdiHostTestCases'
+NAME = 'cheets_GTS.7.0_r4.GtsEdiHostTestCases'
ATTRIBUTES = 'suite:arc-gts'
DEPENDENCIES = 'arc'
JOB_RETRIES = 1
@@ -20,14 +20,14 @@
'cheets_GTS',
hosts=host_list,
iterations=1,
- tag='7.0_r3.GtsEdiHostTestCases',
- test_name='cheets_GTS.7.0_r3.GtsEdiHostTestCases',
+ tag='7.0_r4.GtsEdiHostTestCases',
+ test_name='cheets_GTS.7.0_r4.GtsEdiHostTestCases',
authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsEdiHostTestCases', '--ignore-business-logic-failure'],
retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
target_module='GtsEdiHostTestCases',
target_plan=None,
- uri='gs://chromeos-arc-images/cts/bundle/android-gts-7_r3-6045416.zip',
+ uri='gs://chromeos-arc-images/cts/bundle/android-gts-7_r4-6219464.zip',
timeout=720)
parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.7.0_r3.GtsExoPlayerTestCases b/server/site_tests/cheets_GTS/control.7.0_r4.GtsExoPlayerTestCases
similarity index 86%
rename from server/site_tests/cheets_GTS/control.7.0_r3.GtsExoPlayerTestCases
rename to server/site_tests/cheets_GTS/control.7.0_r4.GtsExoPlayerTestCases
index 793d4d8..0d591ce 100644
--- a/server/site_tests/cheets_GTS/control.7.0_r3.GtsExoPlayerTestCases
+++ b/server/site_tests/cheets_GTS/control.7.0_r4.GtsExoPlayerTestCases
@@ -5,7 +5,7 @@
# This file has been automatically generated. Do not edit!
AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.7.0_r3.GtsExoPlayerTestCases'
+NAME = 'cheets_GTS.7.0_r4.GtsExoPlayerTestCases'
ATTRIBUTES = 'suite:arc-gts'
DEPENDENCIES = 'arc'
JOB_RETRIES = 1
@@ -20,14 +20,14 @@
'cheets_GTS',
hosts=host_list,
iterations=1,
- tag='7.0_r3.GtsExoPlayerTestCases',
- test_name='cheets_GTS.7.0_r3.GtsExoPlayerTestCases',
+ tag='7.0_r4.GtsExoPlayerTestCases',
+ test_name='cheets_GTS.7.0_r4.GtsExoPlayerTestCases',
authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsExoPlayerTestCases', '--ignore-business-logic-failure'],
retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
target_module='GtsExoPlayerTestCases',
target_plan=None,
- uri='gs://chromeos-arc-images/cts/bundle/android-gts-7_r3-6045416.zip',
+ uri='gs://chromeos-arc-images/cts/bundle/android-gts-7_r4-6219464.zip',
timeout=5400)
parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.7.0_r3.GtsFeaturesTestCases b/server/site_tests/cheets_GTS/control.7.0_r4.GtsFeaturesTestCases
similarity index 86%
rename from server/site_tests/cheets_GTS/control.7.0_r3.GtsFeaturesTestCases
rename to server/site_tests/cheets_GTS/control.7.0_r4.GtsFeaturesTestCases
index 3bf8b08..dc0c1d9 100644
--- a/server/site_tests/cheets_GTS/control.7.0_r3.GtsFeaturesTestCases
+++ b/server/site_tests/cheets_GTS/control.7.0_r4.GtsFeaturesTestCases
@@ -5,7 +5,7 @@
# This file has been automatically generated. Do not edit!
AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.7.0_r3.GtsFeaturesTestCases'
+NAME = 'cheets_GTS.7.0_r4.GtsFeaturesTestCases'
ATTRIBUTES = 'suite:arc-gts'
DEPENDENCIES = 'arc'
JOB_RETRIES = 1
@@ -20,14 +20,14 @@
'cheets_GTS',
hosts=host_list,
iterations=1,
- tag='7.0_r3.GtsFeaturesTestCases',
- test_name='cheets_GTS.7.0_r3.GtsFeaturesTestCases',
+ tag='7.0_r4.GtsFeaturesTestCases',
+ test_name='cheets_GTS.7.0_r4.GtsFeaturesTestCases',
authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsFeaturesTestCases', '--ignore-business-logic-failure'],
retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
target_module='GtsFeaturesTestCases',
target_plan=None,
- uri='gs://chromeos-arc-images/cts/bundle/android-gts-7_r3-6045416.zip',
+ uri='gs://chromeos-arc-images/cts/bundle/android-gts-7_r4-6219464.zip',
timeout=720)
parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.7.0_r3.GtsGameDeviceHostTestCases b/server/site_tests/cheets_GTS/control.7.0_r4.GtsGameDeviceHostTestCases
similarity index 85%
rename from server/site_tests/cheets_GTS/control.7.0_r3.GtsGameDeviceHostTestCases
rename to server/site_tests/cheets_GTS/control.7.0_r4.GtsGameDeviceHostTestCases
index 1bb0aa9..0d148a2 100644
--- a/server/site_tests/cheets_GTS/control.7.0_r3.GtsGameDeviceHostTestCases
+++ b/server/site_tests/cheets_GTS/control.7.0_r4.GtsGameDeviceHostTestCases
@@ -5,7 +5,7 @@
# This file has been automatically generated. Do not edit!
AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.7.0_r3.GtsGameDeviceHostTestCases'
+NAME = 'cheets_GTS.7.0_r4.GtsGameDeviceHostTestCases'
ATTRIBUTES = 'suite:arc-gts'
DEPENDENCIES = 'arc'
JOB_RETRIES = 1
@@ -20,14 +20,14 @@
'cheets_GTS',
hosts=host_list,
iterations=1,
- tag='7.0_r3.GtsGameDeviceHostTestCases',
- test_name='cheets_GTS.7.0_r3.GtsGameDeviceHostTestCases',
+ tag='7.0_r4.GtsGameDeviceHostTestCases',
+ test_name='cheets_GTS.7.0_r4.GtsGameDeviceHostTestCases',
authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsGameDeviceHostTestCases', '--ignore-business-logic-failure'],
retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
target_module='GtsGameDeviceHostTestCases',
target_plan=None,
- uri='gs://chromeos-arc-images/cts/bundle/android-gts-7_r3-6045416.zip',
+ uri='gs://chromeos-arc-images/cts/bundle/android-gts-7_r4-6219464.zip',
timeout=720)
parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.7.0_r3.GtsGmscoreHostTestCases b/server/site_tests/cheets_GTS/control.7.0_r4.GtsGmscoreHostTestCases
similarity index 86%
rename from server/site_tests/cheets_GTS/control.7.0_r3.GtsGmscoreHostTestCases
rename to server/site_tests/cheets_GTS/control.7.0_r4.GtsGmscoreHostTestCases
index c64a7a4..a652ff1 100644
--- a/server/site_tests/cheets_GTS/control.7.0_r3.GtsGmscoreHostTestCases
+++ b/server/site_tests/cheets_GTS/control.7.0_r4.GtsGmscoreHostTestCases
@@ -5,7 +5,7 @@
# This file has been automatically generated. Do not edit!
AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.7.0_r3.GtsGmscoreHostTestCases'
+NAME = 'cheets_GTS.7.0_r4.GtsGmscoreHostTestCases'
ATTRIBUTES = 'suite:arc-gts'
DEPENDENCIES = 'arc'
JOB_RETRIES = 1
@@ -20,14 +20,14 @@
'cheets_GTS',
hosts=host_list,
iterations=1,
- tag='7.0_r3.GtsGmscoreHostTestCases',
- test_name='cheets_GTS.7.0_r3.GtsGmscoreHostTestCases',
+ tag='7.0_r4.GtsGmscoreHostTestCases',
+ test_name='cheets_GTS.7.0_r4.GtsGmscoreHostTestCases',
authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsGmscoreHostTestCases', '--ignore-business-logic-failure'],
retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
target_module='GtsGmscoreHostTestCases',
target_plan=None,
- uri='gs://chromeos-arc-images/cts/bundle/android-gts-7_r3-6045416.zip',
+ uri='gs://chromeos-arc-images/cts/bundle/android-gts-7_r4-6219464.zip',
prerequisites=['bluetooth'],
timeout=3600)
diff --git a/server/site_tests/cheets_GTS/control.7.0_r3.GtsGraphicsHostTestCases b/server/site_tests/cheets_GTS/control.7.0_r4.GtsGraphicsHostTestCases
similarity index 86%
rename from server/site_tests/cheets_GTS/control.7.0_r3.GtsGraphicsHostTestCases
rename to server/site_tests/cheets_GTS/control.7.0_r4.GtsGraphicsHostTestCases
index 2619dba..ab279e0 100644
--- a/server/site_tests/cheets_GTS/control.7.0_r3.GtsGraphicsHostTestCases
+++ b/server/site_tests/cheets_GTS/control.7.0_r4.GtsGraphicsHostTestCases
@@ -5,7 +5,7 @@
# This file has been automatically generated. Do not edit!
AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.7.0_r3.GtsGraphicsHostTestCases'
+NAME = 'cheets_GTS.7.0_r4.GtsGraphicsHostTestCases'
ATTRIBUTES = 'suite:arc-gts'
DEPENDENCIES = 'arc'
JOB_RETRIES = 1
@@ -20,14 +20,14 @@
'cheets_GTS',
hosts=host_list,
iterations=1,
- tag='7.0_r3.GtsGraphicsHostTestCases',
- test_name='cheets_GTS.7.0_r3.GtsGraphicsHostTestCases',
+ tag='7.0_r4.GtsGraphicsHostTestCases',
+ test_name='cheets_GTS.7.0_r4.GtsGraphicsHostTestCases',
authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsGraphicsHostTestCases', '--ignore-business-logic-failure'],
retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
target_module='GtsGraphicsHostTestCases',
target_plan=None,
- uri='gs://chromeos-arc-images/cts/bundle/android-gts-7_r3-6045416.zip',
+ uri='gs://chromeos-arc-images/cts/bundle/android-gts-7_r4-6219464.zip',
timeout=720)
parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.7.0_r3.GtsHomeHostTestCases b/server/site_tests/cheets_GTS/control.7.0_r4.GtsHomeHostTestCases
similarity index 86%
rename from server/site_tests/cheets_GTS/control.7.0_r3.GtsHomeHostTestCases
rename to server/site_tests/cheets_GTS/control.7.0_r4.GtsHomeHostTestCases
index 6aacb07..262c47d 100644
--- a/server/site_tests/cheets_GTS/control.7.0_r3.GtsHomeHostTestCases
+++ b/server/site_tests/cheets_GTS/control.7.0_r4.GtsHomeHostTestCases
@@ -5,7 +5,7 @@
# This file has been automatically generated. Do not edit!
AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.7.0_r3.GtsHomeHostTestCases'
+NAME = 'cheets_GTS.7.0_r4.GtsHomeHostTestCases'
ATTRIBUTES = 'suite:arc-gts'
DEPENDENCIES = 'arc'
JOB_RETRIES = 1
@@ -20,14 +20,14 @@
'cheets_GTS',
hosts=host_list,
iterations=1,
- tag='7.0_r3.GtsHomeHostTestCases',
- test_name='cheets_GTS.7.0_r3.GtsHomeHostTestCases',
+ tag='7.0_r4.GtsHomeHostTestCases',
+ test_name='cheets_GTS.7.0_r4.GtsHomeHostTestCases',
authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsHomeHostTestCases', '--ignore-business-logic-failure'],
retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
target_module='GtsHomeHostTestCases',
target_plan=None,
- uri='gs://chromeos-arc-images/cts/bundle/android-gts-7_r3-6045416.zip',
+ uri='gs://chromeos-arc-images/cts/bundle/android-gts-7_r4-6219464.zip',
timeout=720)
parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.7.0_r3.GtsIncident b/server/site_tests/cheets_GTS/control.7.0_r4.GtsIncident
similarity index 89%
rename from server/site_tests/cheets_GTS/control.7.0_r3.GtsIncident
rename to server/site_tests/cheets_GTS/control.7.0_r4.GtsIncident
index 7da0a94..4403a81 100644
--- a/server/site_tests/cheets_GTS/control.7.0_r3.GtsIncident
+++ b/server/site_tests/cheets_GTS/control.7.0_r4.GtsIncident
@@ -5,7 +5,7 @@
# This file has been automatically generated. Do not edit!
AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.7.0_r3.GtsIncident'
+NAME = 'cheets_GTS.7.0_r4.GtsIncident'
ATTRIBUTES = 'suite:arc-gts'
DEPENDENCIES = 'arc'
JOB_RETRIES = 1
@@ -20,14 +20,14 @@
'cheets_GTS',
hosts=host_list,
iterations=1,
- tag='7.0_r3.GtsIncident',
- test_name='cheets_GTS.7.0_r3.GtsIncident',
+ tag='7.0_r4.GtsIncident',
+ test_name='cheets_GTS.7.0_r4.GtsIncident',
authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
run_template=['run', 'commandAndExit', 'gts', '--include-filter', 'GtsIncidentConfirmationTestCases', '--include-filter', 'GtsIncidentManagerTestCases', '--ignore-business-logic-failure'],
retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
target_module='GtsIncident',
target_plan=None,
- uri='gs://chromeos-arc-images/cts/bundle/android-gts-7_r3-6045416.zip',
+ uri='gs://chromeos-arc-images/cts/bundle/android-gts-7_r4-6219464.zip',
timeout=1080)
parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.7.0_r3.GtsInstallPackagesWhitelistDeviceTestCases b/server/site_tests/cheets_GTS/control.7.0_r4.GtsInstallPackagesWhitelistDeviceTestCases
similarity index 85%
rename from server/site_tests/cheets_GTS/control.7.0_r3.GtsInstallPackagesWhitelistDeviceTestCases
rename to server/site_tests/cheets_GTS/control.7.0_r4.GtsInstallPackagesWhitelistDeviceTestCases
index 5c5e3d1..da05a2f 100644
--- a/server/site_tests/cheets_GTS/control.7.0_r3.GtsInstallPackagesWhitelistDeviceTestCases
+++ b/server/site_tests/cheets_GTS/control.7.0_r4.GtsInstallPackagesWhitelistDeviceTestCases
@@ -5,7 +5,7 @@
# This file has been automatically generated. Do not edit!
AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.7.0_r3.GtsInstallPackagesWhitelistDeviceTestCases'
+NAME = 'cheets_GTS.7.0_r4.GtsInstallPackagesWhitelistDeviceTestCases'
ATTRIBUTES = 'suite:arc-gts'
DEPENDENCIES = 'arc'
JOB_RETRIES = 1
@@ -20,14 +20,14 @@
'cheets_GTS',
hosts=host_list,
iterations=1,
- tag='7.0_r3.GtsInstallPackagesWhitelistDeviceTestCases',
- test_name='cheets_GTS.7.0_r3.GtsInstallPackagesWhitelistDeviceTestCases',
+ tag='7.0_r4.GtsInstallPackagesWhitelistDeviceTestCases',
+ test_name='cheets_GTS.7.0_r4.GtsInstallPackagesWhitelistDeviceTestCases',
authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsInstallPackagesWhitelistDeviceTestCases', '--ignore-business-logic-failure'],
retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
target_module='GtsInstallPackagesWhitelistDeviceTestCases',
target_plan=None,
- uri='gs://chromeos-arc-images/cts/bundle/android-gts-7_r3-6045416.zip',
+ uri='gs://chromeos-arc-images/cts/bundle/android-gts-7_r4-6219464.zip',
timeout=720)
parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.7.0_r3.GtsInstantAppsHostTestCases b/server/site_tests/cheets_GTS/control.7.0_r4.GtsInstantAppsHostTestCases
similarity index 85%
rename from server/site_tests/cheets_GTS/control.7.0_r3.GtsInstantAppsHostTestCases
rename to server/site_tests/cheets_GTS/control.7.0_r4.GtsInstantAppsHostTestCases
index 4b695ad..d4ae01d 100644
--- a/server/site_tests/cheets_GTS/control.7.0_r3.GtsInstantAppsHostTestCases
+++ b/server/site_tests/cheets_GTS/control.7.0_r4.GtsInstantAppsHostTestCases
@@ -5,7 +5,7 @@
# This file has been automatically generated. Do not edit!
AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.7.0_r3.GtsInstantAppsHostTestCases'
+NAME = 'cheets_GTS.7.0_r4.GtsInstantAppsHostTestCases'
ATTRIBUTES = 'suite:arc-gts'
DEPENDENCIES = 'arc'
JOB_RETRIES = 1
@@ -20,14 +20,14 @@
'cheets_GTS',
hosts=host_list,
iterations=1,
- tag='7.0_r3.GtsInstantAppsHostTestCases',
- test_name='cheets_GTS.7.0_r3.GtsInstantAppsHostTestCases',
+ tag='7.0_r4.GtsInstantAppsHostTestCases',
+ test_name='cheets_GTS.7.0_r4.GtsInstantAppsHostTestCases',
authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsInstantAppsHostTestCases', '--ignore-business-logic-failure'],
retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
target_module='GtsInstantAppsHostTestCases',
target_plan=None,
- uri='gs://chromeos-arc-images/cts/bundle/android-gts-7_r3-6045416.zip',
+ uri='gs://chromeos-arc-images/cts/bundle/android-gts-7_r4-6219464.zip',
timeout=720)
parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.7.0_r3.GtsLargeApkHostTestCases b/server/site_tests/cheets_GTS/control.7.0_r4.GtsLargeApkHostTestCases
similarity index 86%
rename from server/site_tests/cheets_GTS/control.7.0_r3.GtsLargeApkHostTestCases
rename to server/site_tests/cheets_GTS/control.7.0_r4.GtsLargeApkHostTestCases
index 88f3c6c..c05e104 100644
--- a/server/site_tests/cheets_GTS/control.7.0_r3.GtsLargeApkHostTestCases
+++ b/server/site_tests/cheets_GTS/control.7.0_r4.GtsLargeApkHostTestCases
@@ -5,7 +5,7 @@
# This file has been automatically generated. Do not edit!
AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.7.0_r3.GtsLargeApkHostTestCases'
+NAME = 'cheets_GTS.7.0_r4.GtsLargeApkHostTestCases'
ATTRIBUTES = 'suite:arc-gts'
DEPENDENCIES = 'arc'
JOB_RETRIES = 1
@@ -20,14 +20,14 @@
'cheets_GTS',
hosts=host_list,
iterations=1,
- tag='7.0_r3.GtsLargeApkHostTestCases',
- test_name='cheets_GTS.7.0_r3.GtsLargeApkHostTestCases',
+ tag='7.0_r4.GtsLargeApkHostTestCases',
+ test_name='cheets_GTS.7.0_r4.GtsLargeApkHostTestCases',
authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsLargeApkHostTestCases', '--ignore-business-logic-failure'],
retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
target_module='GtsLargeApkHostTestCases',
target_plan=None,
- uri='gs://chromeos-arc-images/cts/bundle/android-gts-7_r3-6045416.zip',
+ uri='gs://chromeos-arc-images/cts/bundle/android-gts-7_r4-6219464.zip',
timeout=720)
parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.7.0_r3.GtsLauncherHostTestCases b/server/site_tests/cheets_GTS/control.7.0_r4.GtsLauncherHostTestCases
similarity index 86%
rename from server/site_tests/cheets_GTS/control.7.0_r3.GtsLauncherHostTestCases
rename to server/site_tests/cheets_GTS/control.7.0_r4.GtsLauncherHostTestCases
index 0b9fbc3..c8238d7 100644
--- a/server/site_tests/cheets_GTS/control.7.0_r3.GtsLauncherHostTestCases
+++ b/server/site_tests/cheets_GTS/control.7.0_r4.GtsLauncherHostTestCases
@@ -5,7 +5,7 @@
# This file has been automatically generated. Do not edit!
AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.7.0_r3.GtsLauncherHostTestCases'
+NAME = 'cheets_GTS.7.0_r4.GtsLauncherHostTestCases'
ATTRIBUTES = 'suite:arc-gts'
DEPENDENCIES = 'arc'
JOB_RETRIES = 1
@@ -20,14 +20,14 @@
'cheets_GTS',
hosts=host_list,
iterations=1,
- tag='7.0_r3.GtsLauncherHostTestCases',
- test_name='cheets_GTS.7.0_r3.GtsLauncherHostTestCases',
+ tag='7.0_r4.GtsLauncherHostTestCases',
+ test_name='cheets_GTS.7.0_r4.GtsLauncherHostTestCases',
authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsLauncherHostTestCases', '--ignore-business-logic-failure'],
retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
target_module='GtsLauncherHostTestCases',
target_plan=None,
- uri='gs://chromeos-arc-images/cts/bundle/android-gts-7_r3-6045416.zip',
+ uri='gs://chromeos-arc-images/cts/bundle/android-gts-7_r4-6219464.zip',
timeout=720)
parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.7.0_r3.GtsLocation b/server/site_tests/cheets_GTS/control.7.0_r4.GtsLocation
similarity index 89%
rename from server/site_tests/cheets_GTS/control.7.0_r3.GtsLocation
rename to server/site_tests/cheets_GTS/control.7.0_r4.GtsLocation
index b81bca2..2bb93a0 100644
--- a/server/site_tests/cheets_GTS/control.7.0_r3.GtsLocation
+++ b/server/site_tests/cheets_GTS/control.7.0_r4.GtsLocation
@@ -5,7 +5,7 @@
# This file has been automatically generated. Do not edit!
AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.7.0_r3.GtsLocation'
+NAME = 'cheets_GTS.7.0_r4.GtsLocation'
ATTRIBUTES = 'suite:arc-gts'
DEPENDENCIES = 'arc'
JOB_RETRIES = 1
@@ -20,14 +20,14 @@
'cheets_GTS',
hosts=host_list,
iterations=1,
- tag='7.0_r3.GtsLocation',
- test_name='cheets_GTS.7.0_r3.GtsLocation',
+ tag='7.0_r4.GtsLocation',
+ test_name='cheets_GTS.7.0_r4.GtsLocation',
authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
run_template=['run', 'commandAndExit', 'gts', '--include-filter', 'GtsLocationHostTestCases', '--include-filter', 'GtsLocationTestCases', '--ignore-business-logic-failure'],
retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
target_module='GtsLocation',
target_plan=None,
- uri='gs://chromeos-arc-images/cts/bundle/android-gts-7_r3-6045416.zip',
+ uri='gs://chromeos-arc-images/cts/bundle/android-gts-7_r4-6219464.zip',
timeout=1080)
parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.7.0_r3.GtsMediaTestCases b/server/site_tests/cheets_GTS/control.7.0_r4.GtsMediaTestCases
similarity index 87%
rename from server/site_tests/cheets_GTS/control.7.0_r3.GtsMediaTestCases
rename to server/site_tests/cheets_GTS/control.7.0_r4.GtsMediaTestCases
index 57820fe..9439471 100644
--- a/server/site_tests/cheets_GTS/control.7.0_r3.GtsMediaTestCases
+++ b/server/site_tests/cheets_GTS/control.7.0_r4.GtsMediaTestCases
@@ -5,7 +5,7 @@
# This file has been automatically generated. Do not edit!
AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.7.0_r3.GtsMediaTestCases'
+NAME = 'cheets_GTS.7.0_r4.GtsMediaTestCases'
ATTRIBUTES = 'suite:arc-gts'
DEPENDENCIES = 'arc'
JOB_RETRIES = 1
@@ -20,14 +20,14 @@
'cheets_GTS',
hosts=host_list,
iterations=1,
- tag='7.0_r3.GtsMediaTestCases',
- test_name='cheets_GTS.7.0_r3.GtsMediaTestCases',
+ tag='7.0_r4.GtsMediaTestCases',
+ test_name='cheets_GTS.7.0_r4.GtsMediaTestCases',
authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsMediaTestCases', '--ignore-business-logic-failure'],
retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
target_module='GtsMediaTestCases',
target_plan=None,
- uri='gs://chromeos-arc-images/cts/bundle/android-gts-7_r3-6045416.zip',
+ uri='gs://chromeos-arc-images/cts/bundle/android-gts-7_r4-6219464.zip',
timeout=14400)
parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.7.0_r3.GtsMemory b/server/site_tests/cheets_GTS/control.7.0_r4.GtsMemory
similarity index 89%
rename from server/site_tests/cheets_GTS/control.7.0_r3.GtsMemory
rename to server/site_tests/cheets_GTS/control.7.0_r4.GtsMemory
index 49bad96..0fee9a0 100644
--- a/server/site_tests/cheets_GTS/control.7.0_r3.GtsMemory
+++ b/server/site_tests/cheets_GTS/control.7.0_r4.GtsMemory
@@ -5,7 +5,7 @@
# This file has been automatically generated. Do not edit!
AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.7.0_r3.GtsMemory'
+NAME = 'cheets_GTS.7.0_r4.GtsMemory'
ATTRIBUTES = 'suite:arc-gts, suite:bvt-perbuild'
DEPENDENCIES = 'arc'
JOB_RETRIES = 1
@@ -21,14 +21,14 @@
hosts=host_list,
iterations=1,
max_retry=3,
- tag='7.0_r3.GtsMemory',
- test_name='cheets_GTS.7.0_r3.GtsMemory',
+ tag='7.0_r4.GtsMemory',
+ test_name='cheets_GTS.7.0_r4.GtsMemory',
authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
run_template=['run', 'commandAndExit', 'gts', '--include-filter', 'GtsMemoryHostTestCases', '--include-filter', 'GtsMemoryTestCases', '--ignore-business-logic-failure'],
retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
target_module='GtsMemory',
target_plan=None,
- uri='gs://chromeos-arc-images/cts/bundle/android-gts-7_r3-6045416.zip',
+ uri='gs://chromeos-arc-images/cts/bundle/android-gts-7_r4-6219464.zip',
timeout=1080)
parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.7.0_r3.GtsModuleMetadataTestCases b/server/site_tests/cheets_GTS/control.7.0_r4.GtsModuleMetadataTestCases
similarity index 85%
rename from server/site_tests/cheets_GTS/control.7.0_r3.GtsModuleMetadataTestCases
rename to server/site_tests/cheets_GTS/control.7.0_r4.GtsModuleMetadataTestCases
index 0624389..c939e76 100644
--- a/server/site_tests/cheets_GTS/control.7.0_r3.GtsModuleMetadataTestCases
+++ b/server/site_tests/cheets_GTS/control.7.0_r4.GtsModuleMetadataTestCases
@@ -5,7 +5,7 @@
# This file has been automatically generated. Do not edit!
AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.7.0_r3.GtsModuleMetadataTestCases'
+NAME = 'cheets_GTS.7.0_r4.GtsModuleMetadataTestCases'
ATTRIBUTES = 'suite:arc-gts'
DEPENDENCIES = 'arc'
JOB_RETRIES = 1
@@ -20,14 +20,14 @@
'cheets_GTS',
hosts=host_list,
iterations=1,
- tag='7.0_r3.GtsModuleMetadataTestCases',
- test_name='cheets_GTS.7.0_r3.GtsModuleMetadataTestCases',
+ tag='7.0_r4.GtsModuleMetadataTestCases',
+ test_name='cheets_GTS.7.0_r4.GtsModuleMetadataTestCases',
authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsModuleMetadataTestCases', '--ignore-business-logic-failure'],
retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
target_module='GtsModuleMetadataTestCases',
target_plan=None,
- uri='gs://chromeos-arc-images/cts/bundle/android-gts-7_r3-6045416.zip',
+ uri='gs://chromeos-arc-images/cts/bundle/android-gts-7_r4-6219464.zip',
timeout=720)
parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.7.0_r3.GtsNet b/server/site_tests/cheets_GTS/control.7.0_r4.GtsNet
similarity index 90%
rename from server/site_tests/cheets_GTS/control.7.0_r3.GtsNet
rename to server/site_tests/cheets_GTS/control.7.0_r4.GtsNet
index 16ebd56..d48f59c 100644
--- a/server/site_tests/cheets_GTS/control.7.0_r3.GtsNet
+++ b/server/site_tests/cheets_GTS/control.7.0_r4.GtsNet
@@ -5,7 +5,7 @@
# This file has been automatically generated. Do not edit!
AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.7.0_r3.GtsNet'
+NAME = 'cheets_GTS.7.0_r4.GtsNet'
ATTRIBUTES = 'suite:arc-gts, suite:bvt-perbuild'
DEPENDENCIES = 'arc'
JOB_RETRIES = 1
@@ -21,14 +21,14 @@
hosts=host_list,
iterations=1,
max_retry=3,
- tag='7.0_r3.GtsNet',
- test_name='cheets_GTS.7.0_r3.GtsNet',
+ tag='7.0_r4.GtsNet',
+ test_name='cheets_GTS.7.0_r4.GtsNet',
authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
run_template=['run', 'commandAndExit', 'gts', '--include-filter', 'GtsNetStatsHostTestCases', '--include-filter', 'GtsNetTestCases', '--ignore-business-logic-failure'],
retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
target_module='GtsNet',
target_plan=None,
- uri='gs://chromeos-arc-images/cts/bundle/android-gts-7_r3-6045416.zip',
+ uri='gs://chromeos-arc-images/cts/bundle/android-gts-7_r4-6219464.zip',
timeout=1080)
parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.7.0_r3.GtsNetwork b/server/site_tests/cheets_GTS/control.7.0_r4.GtsNetwork
similarity index 89%
rename from server/site_tests/cheets_GTS/control.7.0_r3.GtsNetwork
rename to server/site_tests/cheets_GTS/control.7.0_r4.GtsNetwork
index d125848..d612fcc 100644
--- a/server/site_tests/cheets_GTS/control.7.0_r3.GtsNetwork
+++ b/server/site_tests/cheets_GTS/control.7.0_r4.GtsNetwork
@@ -5,7 +5,7 @@
# This file has been automatically generated. Do not edit!
AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.7.0_r3.GtsNetwork'
+NAME = 'cheets_GTS.7.0_r4.GtsNetwork'
ATTRIBUTES = 'suite:arc-gts'
DEPENDENCIES = 'arc'
JOB_RETRIES = 1
@@ -20,14 +20,14 @@
'cheets_GTS',
hosts=host_list,
iterations=1,
- tag='7.0_r3.GtsNetwork',
- test_name='cheets_GTS.7.0_r3.GtsNetwork',
+ tag='7.0_r4.GtsNetwork',
+ test_name='cheets_GTS.7.0_r4.GtsNetwork',
authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
run_template=['run', 'commandAndExit', 'gts', '--include-filter', 'GtsNetworkStackHostTestCases', '--include-filter', 'GtsNetworkWatchlistTestCases', '--ignore-business-logic-failure'],
retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
target_module='GtsNetwork',
target_plan=None,
- uri='gs://chromeos-arc-images/cts/bundle/android-gts-7_r3-6045416.zip',
+ uri='gs://chromeos-arc-images/cts/bundle/android-gts-7_r4-6219464.zip',
timeout=1080)
parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.7.0_r3.GtsNmgiarcTestCases b/server/site_tests/cheets_GTS/control.7.0_r4.GtsNmgiarcTestCases
similarity index 86%
rename from server/site_tests/cheets_GTS/control.7.0_r3.GtsNmgiarcTestCases
rename to server/site_tests/cheets_GTS/control.7.0_r4.GtsNmgiarcTestCases
index 38403c7..e1af15a 100644
--- a/server/site_tests/cheets_GTS/control.7.0_r3.GtsNmgiarcTestCases
+++ b/server/site_tests/cheets_GTS/control.7.0_r4.GtsNmgiarcTestCases
@@ -5,7 +5,7 @@
# This file has been automatically generated. Do not edit!
AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.7.0_r3.GtsNmgiarcTestCases'
+NAME = 'cheets_GTS.7.0_r4.GtsNmgiarcTestCases'
ATTRIBUTES = 'suite:arc-gts'
DEPENDENCIES = 'arc'
JOB_RETRIES = 1
@@ -20,14 +20,14 @@
'cheets_GTS',
hosts=host_list,
iterations=1,
- tag='7.0_r3.GtsNmgiarcTestCases',
- test_name='cheets_GTS.7.0_r3.GtsNmgiarcTestCases',
+ tag='7.0_r4.GtsNmgiarcTestCases',
+ test_name='cheets_GTS.7.0_r4.GtsNmgiarcTestCases',
authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsNmgiarcTestCases', '--ignore-business-logic-failure'],
retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
target_module='GtsNmgiarcTestCases',
target_plan=None,
- uri='gs://chromeos-arc-images/cts/bundle/android-gts-7_r3-6045416.zip',
+ uri='gs://chromeos-arc-images/cts/bundle/android-gts-7_r4-6219464.zip',
timeout=720)
parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.7.0_r3.GtsNoPermission b/server/site_tests/cheets_GTS/control.7.0_r4.GtsNoPermission
similarity index 88%
rename from server/site_tests/cheets_GTS/control.7.0_r3.GtsNoPermission
rename to server/site_tests/cheets_GTS/control.7.0_r4.GtsNoPermission
index 61fa455..45d523c 100644
--- a/server/site_tests/cheets_GTS/control.7.0_r3.GtsNoPermission
+++ b/server/site_tests/cheets_GTS/control.7.0_r4.GtsNoPermission
@@ -5,7 +5,7 @@
# This file has been automatically generated. Do not edit!
AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.7.0_r3.GtsNoPermission'
+NAME = 'cheets_GTS.7.0_r4.GtsNoPermission'
ATTRIBUTES = 'suite:arc-gts'
DEPENDENCIES = 'arc'
JOB_RETRIES = 1
@@ -20,14 +20,14 @@
'cheets_GTS',
hosts=host_list,
iterations=1,
- tag='7.0_r3.GtsNoPermission',
- test_name='cheets_GTS.7.0_r3.GtsNoPermission',
+ tag='7.0_r4.GtsNoPermission',
+ test_name='cheets_GTS.7.0_r4.GtsNoPermission',
authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
run_template=['run', 'commandAndExit', 'gts', '--include-filter', 'GtsNoPermissionTestCases', '--include-filter', 'GtsNoPermissionTestCases25', '--ignore-business-logic-failure'],
retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
target_module='GtsNoPermission',
target_plan=None,
- uri='gs://chromeos-arc-images/cts/bundle/android-gts-7_r3-6045416.zip',
+ uri='gs://chromeos-arc-images/cts/bundle/android-gts-7_r4-6219464.zip',
timeout=1080)
parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.7.0_r3.GtsNotificationTestCases b/server/site_tests/cheets_GTS/control.7.0_r4.GtsNotificationTestCases
similarity index 86%
rename from server/site_tests/cheets_GTS/control.7.0_r3.GtsNotificationTestCases
rename to server/site_tests/cheets_GTS/control.7.0_r4.GtsNotificationTestCases
index f4542d7..68ef2a5 100644
--- a/server/site_tests/cheets_GTS/control.7.0_r3.GtsNotificationTestCases
+++ b/server/site_tests/cheets_GTS/control.7.0_r4.GtsNotificationTestCases
@@ -5,7 +5,7 @@
# This file has been automatically generated. Do not edit!
AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.7.0_r3.GtsNotificationTestCases'
+NAME = 'cheets_GTS.7.0_r4.GtsNotificationTestCases'
ATTRIBUTES = 'suite:arc-gts'
DEPENDENCIES = 'arc'
JOB_RETRIES = 1
@@ -20,14 +20,14 @@
'cheets_GTS',
hosts=host_list,
iterations=1,
- tag='7.0_r3.GtsNotificationTestCases',
- test_name='cheets_GTS.7.0_r3.GtsNotificationTestCases',
+ tag='7.0_r4.GtsNotificationTestCases',
+ test_name='cheets_GTS.7.0_r4.GtsNotificationTestCases',
authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsNotificationTestCases', '--ignore-business-logic-failure'],
retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
target_module='GtsNotificationTestCases',
target_plan=None,
- uri='gs://chromeos-arc-images/cts/bundle/android-gts-7_r3-6045416.zip',
+ uri='gs://chromeos-arc-images/cts/bundle/android-gts-7_r4-6219464.zip',
timeout=720)
parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.7.0_r3.GtsOemLockServiceTestCases b/server/site_tests/cheets_GTS/control.7.0_r4.GtsOemLockServiceTestCases
similarity index 85%
rename from server/site_tests/cheets_GTS/control.7.0_r3.GtsOemLockServiceTestCases
rename to server/site_tests/cheets_GTS/control.7.0_r4.GtsOemLockServiceTestCases
index 2e4ef57..c3184ff 100644
--- a/server/site_tests/cheets_GTS/control.7.0_r3.GtsOemLockServiceTestCases
+++ b/server/site_tests/cheets_GTS/control.7.0_r4.GtsOemLockServiceTestCases
@@ -5,7 +5,7 @@
# This file has been automatically generated. Do not edit!
AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.7.0_r3.GtsOemLockServiceTestCases'
+NAME = 'cheets_GTS.7.0_r4.GtsOemLockServiceTestCases'
ATTRIBUTES = 'suite:arc-gts'
DEPENDENCIES = 'arc'
JOB_RETRIES = 1
@@ -20,14 +20,14 @@
'cheets_GTS',
hosts=host_list,
iterations=1,
- tag='7.0_r3.GtsOemLockServiceTestCases',
- test_name='cheets_GTS.7.0_r3.GtsOemLockServiceTestCases',
+ tag='7.0_r4.GtsOemLockServiceTestCases',
+ test_name='cheets_GTS.7.0_r4.GtsOemLockServiceTestCases',
authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsOemLockServiceTestCases', '--ignore-business-logic-failure'],
retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
target_module='GtsOemLockServiceTestCases',
target_plan=None,
- uri='gs://chromeos-arc-images/cts/bundle/android-gts-7_r3-6045416.zip',
+ uri='gs://chromeos-arc-images/cts/bundle/android-gts-7_r4-6219464.zip',
timeout=720)
parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.7.0_r3.GtsOsTestCases b/server/site_tests/cheets_GTS/control.7.0_r4.GtsOsTestCases
similarity index 88%
rename from server/site_tests/cheets_GTS/control.7.0_r3.GtsOsTestCases
rename to server/site_tests/cheets_GTS/control.7.0_r4.GtsOsTestCases
index 6acc239..e4bf140 100644
--- a/server/site_tests/cheets_GTS/control.7.0_r3.GtsOsTestCases
+++ b/server/site_tests/cheets_GTS/control.7.0_r4.GtsOsTestCases
@@ -5,7 +5,7 @@
# This file has been automatically generated. Do not edit!
AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.7.0_r3.GtsOsTestCases'
+NAME = 'cheets_GTS.7.0_r4.GtsOsTestCases'
ATTRIBUTES = 'suite:arc-gts, suite:bvt-perbuild'
DEPENDENCIES = 'arc'
JOB_RETRIES = 1
@@ -21,14 +21,14 @@
hosts=host_list,
iterations=1,
max_retry=3,
- tag='7.0_r3.GtsOsTestCases',
- test_name='cheets_GTS.7.0_r3.GtsOsTestCases',
+ tag='7.0_r4.GtsOsTestCases',
+ test_name='cheets_GTS.7.0_r4.GtsOsTestCases',
authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsOsTestCases', '--ignore-business-logic-failure'],
retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
target_module='GtsOsTestCases',
target_plan=None,
- uri='gs://chromeos-arc-images/cts/bundle/android-gts-7_r3-6045416.zip',
+ uri='gs://chromeos-arc-images/cts/bundle/android-gts-7_r4-6219464.zip',
timeout=720)
parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.7.0_r3.GtsPackage b/server/site_tests/cheets_GTS/control.7.0_r4.GtsPackage
similarity index 91%
rename from server/site_tests/cheets_GTS/control.7.0_r3.GtsPackage
rename to server/site_tests/cheets_GTS/control.7.0_r4.GtsPackage
index 1516d58..81aa117 100644
--- a/server/site_tests/cheets_GTS/control.7.0_r3.GtsPackage
+++ b/server/site_tests/cheets_GTS/control.7.0_r4.GtsPackage
@@ -5,7 +5,7 @@
# This file has been automatically generated. Do not edit!
AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.7.0_r3.GtsPackage'
+NAME = 'cheets_GTS.7.0_r4.GtsPackage'
ATTRIBUTES = 'suite:arc-gts'
DEPENDENCIES = 'arc'
JOB_RETRIES = 1
@@ -20,14 +20,14 @@
'cheets_GTS',
hosts=host_list,
iterations=1,
- tag='7.0_r3.GtsPackage',
- test_name='cheets_GTS.7.0_r3.GtsPackage',
+ tag='7.0_r4.GtsPackage',
+ test_name='cheets_GTS.7.0_r4.GtsPackage',
authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
run_template=['run', 'commandAndExit', 'gts', '--include-filter', 'GtsPackageInstallTestCases', '--include-filter', 'GtsPackageInstallerTapjackingTestCases', '--include-filter', 'GtsPackageManagerHostTestCases', '--include-filter', 'GtsPackageNameCertPairsDeviceTestCases', '--include-filter', 'GtsPackageUninstallTestCases', '--ignore-business-logic-failure'],
retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
target_module='GtsPackage',
target_plan=None,
- uri='gs://chromeos-arc-images/cts/bundle/android-gts-7_r3-6045416.zip',
+ uri='gs://chromeos-arc-images/cts/bundle/android-gts-7_r4-6219464.zip',
timeout=2160)
parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.7.0_r3.GtsPartnerBookmarksTestCases b/server/site_tests/cheets_GTS/control.7.0_r4.GtsPartnerBookmarksTestCases
similarity index 85%
rename from server/site_tests/cheets_GTS/control.7.0_r3.GtsPartnerBookmarksTestCases
rename to server/site_tests/cheets_GTS/control.7.0_r4.GtsPartnerBookmarksTestCases
index 7809cd5..de11e64 100644
--- a/server/site_tests/cheets_GTS/control.7.0_r3.GtsPartnerBookmarksTestCases
+++ b/server/site_tests/cheets_GTS/control.7.0_r4.GtsPartnerBookmarksTestCases
@@ -5,7 +5,7 @@
# This file has been automatically generated. Do not edit!
AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.7.0_r3.GtsPartnerBookmarksTestCases'
+NAME = 'cheets_GTS.7.0_r4.GtsPartnerBookmarksTestCases'
ATTRIBUTES = 'suite:arc-gts'
DEPENDENCIES = 'arc'
JOB_RETRIES = 1
@@ -20,14 +20,14 @@
'cheets_GTS',
hosts=host_list,
iterations=1,
- tag='7.0_r3.GtsPartnerBookmarksTestCases',
- test_name='cheets_GTS.7.0_r3.GtsPartnerBookmarksTestCases',
+ tag='7.0_r4.GtsPartnerBookmarksTestCases',
+ test_name='cheets_GTS.7.0_r4.GtsPartnerBookmarksTestCases',
authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsPartnerBookmarksTestCases', '--ignore-business-logic-failure'],
retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
target_module='GtsPartnerBookmarksTestCases',
target_plan=None,
- uri='gs://chromeos-arc-images/cts/bundle/android-gts-7_r3-6045416.zip',
+ uri='gs://chromeos-arc-images/cts/bundle/android-gts-7_r4-6219464.zip',
timeout=720)
parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.7.0_r3.GtsLocation b/server/site_tests/cheets_GTS/control.7.0_r4.GtsPermission
similarity index 66%
copy from server/site_tests/cheets_GTS/control.7.0_r3.GtsLocation
copy to server/site_tests/cheets_GTS/control.7.0_r4.GtsPermission
index b81bca2..3219df9 100644
--- a/server/site_tests/cheets_GTS/control.7.0_r3.GtsLocation
+++ b/server/site_tests/cheets_GTS/control.7.0_r4.GtsPermission
@@ -5,14 +5,14 @@
# This file has been automatically generated. Do not edit!
AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.7.0_r3.GtsLocation'
+NAME = 'cheets_GTS.7.0_r4.GtsPermission'
ATTRIBUTES = 'suite:arc-gts'
DEPENDENCIES = 'arc'
JOB_RETRIES = 1
TEST_TYPE = 'server'
TIME = 'MEDIUM'
MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsLocationHostTestCases, GtsLocationTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
+DOC = 'Run module GtsPermissionControllerHostTestCases, GtsPermissionTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
def run_TS(machine):
host_list = [hosts.create_host(machine)]
@@ -20,14 +20,14 @@
'cheets_GTS',
hosts=host_list,
iterations=1,
- tag='7.0_r3.GtsLocation',
- test_name='cheets_GTS.7.0_r3.GtsLocation',
+ tag='7.0_r4.GtsPermission',
+ test_name='cheets_GTS.7.0_r4.GtsPermission',
authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
- run_template=['run', 'commandAndExit', 'gts', '--include-filter', 'GtsLocationHostTestCases', '--include-filter', 'GtsLocationTestCases', '--ignore-business-logic-failure'],
+ run_template=['run', 'commandAndExit', 'gts', '--include-filter', 'GtsPermissionControllerHostTestCases', '--include-filter', 'GtsPermissionTestCases', '--ignore-business-logic-failure'],
retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
- target_module='GtsLocation',
+ target_module='GtsPermission',
target_plan=None,
- uri='gs://chromeos-arc-images/cts/bundle/android-gts-7_r3-6045416.zip',
+ uri='gs://chromeos-arc-images/cts/bundle/android-gts-7_r4-6219464.zip',
timeout=1080)
parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.7.0_r3.GtsPlacementTestCases b/server/site_tests/cheets_GTS/control.7.0_r4.GtsPlacementTestCases
similarity index 87%
rename from server/site_tests/cheets_GTS/control.7.0_r3.GtsPlacementTestCases
rename to server/site_tests/cheets_GTS/control.7.0_r4.GtsPlacementTestCases
index 5a004c3..592e3e9 100644
--- a/server/site_tests/cheets_GTS/control.7.0_r3.GtsPlacementTestCases
+++ b/server/site_tests/cheets_GTS/control.7.0_r4.GtsPlacementTestCases
@@ -5,7 +5,7 @@
# This file has been automatically generated. Do not edit!
AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.7.0_r3.GtsPlacementTestCases'
+NAME = 'cheets_GTS.7.0_r4.GtsPlacementTestCases'
ATTRIBUTES = 'suite:arc-gts, suite:bvt-perbuild'
DEPENDENCIES = 'arc'
JOB_RETRIES = 1
@@ -21,14 +21,14 @@
hosts=host_list,
iterations=1,
max_retry=3,
- tag='7.0_r3.GtsPlacementTestCases',
- test_name='cheets_GTS.7.0_r3.GtsPlacementTestCases',
+ tag='7.0_r4.GtsPlacementTestCases',
+ test_name='cheets_GTS.7.0_r4.GtsPlacementTestCases',
authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsPlacementTestCases', '--ignore-business-logic-failure'],
retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
target_module='GtsPlacementTestCases',
target_plan=None,
- uri='gs://chromeos-arc-images/cts/bundle/android-gts-7_r3-6045416.zip',
+ uri='gs://chromeos-arc-images/cts/bundle/android-gts-7_r4-6219464.zip',
timeout=720)
parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.7.0_r3.GtsPlay b/server/site_tests/cheets_GTS/control.7.0_r4.GtsPlay
similarity index 90%
rename from server/site_tests/cheets_GTS/control.7.0_r3.GtsPlay
rename to server/site_tests/cheets_GTS/control.7.0_r4.GtsPlay
index 444128a..4d63fde 100644
--- a/server/site_tests/cheets_GTS/control.7.0_r3.GtsPlay
+++ b/server/site_tests/cheets_GTS/control.7.0_r4.GtsPlay
@@ -5,7 +5,7 @@
# This file has been automatically generated. Do not edit!
AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.7.0_r3.GtsPlay'
+NAME = 'cheets_GTS.7.0_r4.GtsPlay'
ATTRIBUTES = 'suite:arc-gts'
DEPENDENCIES = 'arc'
JOB_RETRIES = 1
@@ -20,14 +20,14 @@
'cheets_GTS',
hosts=host_list,
iterations=1,
- tag='7.0_r3.GtsPlay',
- test_name='cheets_GTS.7.0_r3.GtsPlay',
+ tag='7.0_r4.GtsPlay',
+ test_name='cheets_GTS.7.0_r4.GtsPlay',
authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
run_template=['run', 'commandAndExit', 'gts', '--include-filter', 'GtsPlayAutoInstallTestCases', '--include-filter', 'GtsPlayStoreHostTestCases', '--ignore-business-logic-failure'],
retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
target_module='GtsPlay',
target_plan=None,
- uri='gs://chromeos-arc-images/cts/bundle/android-gts-7_r3-6045416.zip',
+ uri='gs://chromeos-arc-images/cts/bundle/android-gts-7_r4-6219464.zip',
timeout=1080)
parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.7.0_r3.GtsPrintTestCases b/server/site_tests/cheets_GTS/control.7.0_r4.GtsPrintTestCases
similarity index 87%
rename from server/site_tests/cheets_GTS/control.7.0_r3.GtsPrintTestCases
rename to server/site_tests/cheets_GTS/control.7.0_r4.GtsPrintTestCases
index 8539c8f..b562ab0 100644
--- a/server/site_tests/cheets_GTS/control.7.0_r3.GtsPrintTestCases
+++ b/server/site_tests/cheets_GTS/control.7.0_r4.GtsPrintTestCases
@@ -5,7 +5,7 @@
# This file has been automatically generated. Do not edit!
AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.7.0_r3.GtsPrintTestCases'
+NAME = 'cheets_GTS.7.0_r4.GtsPrintTestCases'
ATTRIBUTES = 'suite:arc-gts'
DEPENDENCIES = 'arc'
JOB_RETRIES = 1
@@ -20,14 +20,14 @@
'cheets_GTS',
hosts=host_list,
iterations=1,
- tag='7.0_r3.GtsPrintTestCases',
- test_name='cheets_GTS.7.0_r3.GtsPrintTestCases',
+ tag='7.0_r4.GtsPrintTestCases',
+ test_name='cheets_GTS.7.0_r4.GtsPrintTestCases',
authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsPrintTestCases', '--ignore-business-logic-failure'],
retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
target_module='GtsPrintTestCases',
target_plan=None,
- uri='gs://chromeos-arc-images/cts/bundle/android-gts-7_r3-6045416.zip',
+ uri='gs://chromeos-arc-images/cts/bundle/android-gts-7_r4-6219464.zip',
timeout=720)
parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.7.0_r3.GtsPrivacyTestCases b/server/site_tests/cheets_GTS/control.7.0_r4.GtsPrivacyTestCases
similarity index 87%
rename from server/site_tests/cheets_GTS/control.7.0_r3.GtsPrivacyTestCases
rename to server/site_tests/cheets_GTS/control.7.0_r4.GtsPrivacyTestCases
index 04faabd..e31694d 100644
--- a/server/site_tests/cheets_GTS/control.7.0_r3.GtsPrivacyTestCases
+++ b/server/site_tests/cheets_GTS/control.7.0_r4.GtsPrivacyTestCases
@@ -5,7 +5,7 @@
# This file has been automatically generated. Do not edit!
AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.7.0_r3.GtsPrivacyTestCases'
+NAME = 'cheets_GTS.7.0_r4.GtsPrivacyTestCases'
ATTRIBUTES = 'suite:arc-gts, suite:bvt-perbuild'
DEPENDENCIES = 'arc'
JOB_RETRIES = 1
@@ -21,14 +21,14 @@
hosts=host_list,
iterations=1,
max_retry=3,
- tag='7.0_r3.GtsPrivacyTestCases',
- test_name='cheets_GTS.7.0_r3.GtsPrivacyTestCases',
+ tag='7.0_r4.GtsPrivacyTestCases',
+ test_name='cheets_GTS.7.0_r4.GtsPrivacyTestCases',
authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsPrivacyTestCases', '--ignore-business-logic-failure'],
retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
target_module='GtsPrivacyTestCases',
target_plan=None,
- uri='gs://chromeos-arc-images/cts/bundle/android-gts-7_r3-6045416.zip',
+ uri='gs://chromeos-arc-images/cts/bundle/android-gts-7_r4-6219464.zip',
timeout=720)
parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.7.0_r3.GtsPrivilegedUpdatePreparer b/server/site_tests/cheets_GTS/control.7.0_r4.GtsPrivilegedUpdatePreparer
similarity index 85%
rename from server/site_tests/cheets_GTS/control.7.0_r3.GtsPrivilegedUpdatePreparer
rename to server/site_tests/cheets_GTS/control.7.0_r4.GtsPrivilegedUpdatePreparer
index de39a04..2f15af4 100644
--- a/server/site_tests/cheets_GTS/control.7.0_r3.GtsPrivilegedUpdatePreparer
+++ b/server/site_tests/cheets_GTS/control.7.0_r4.GtsPrivilegedUpdatePreparer
@@ -5,7 +5,7 @@
# This file has been automatically generated. Do not edit!
AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.7.0_r3.GtsPrivilegedUpdatePreparer'
+NAME = 'cheets_GTS.7.0_r4.GtsPrivilegedUpdatePreparer'
ATTRIBUTES = 'suite:arc-gts'
DEPENDENCIES = 'arc'
JOB_RETRIES = 1
@@ -20,14 +20,14 @@
'cheets_GTS',
hosts=host_list,
iterations=1,
- tag='7.0_r3.GtsPrivilegedUpdatePreparer',
- test_name='cheets_GTS.7.0_r3.GtsPrivilegedUpdatePreparer',
+ tag='7.0_r4.GtsPrivilegedUpdatePreparer',
+ test_name='cheets_GTS.7.0_r4.GtsPrivilegedUpdatePreparer',
authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsPrivilegedUpdatePreparer', '--ignore-business-logic-failure'],
retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
target_module='GtsPrivilegedUpdatePreparer',
target_plan=None,
- uri='gs://chromeos-arc-images/cts/bundle/android-gts-7_r3-6045416.zip',
+ uri='gs://chromeos-arc-images/cts/bundle/android-gts-7_r4-6219464.zip',
timeout=720)
parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.7.0_r3.GtsPropertiesTestCases b/server/site_tests/cheets_GTS/control.7.0_r4.GtsPropertiesTestCases
similarity index 86%
rename from server/site_tests/cheets_GTS/control.7.0_r3.GtsPropertiesTestCases
rename to server/site_tests/cheets_GTS/control.7.0_r4.GtsPropertiesTestCases
index e94f928..59431c5 100644
--- a/server/site_tests/cheets_GTS/control.7.0_r3.GtsPropertiesTestCases
+++ b/server/site_tests/cheets_GTS/control.7.0_r4.GtsPropertiesTestCases
@@ -5,7 +5,7 @@
# This file has been automatically generated. Do not edit!
AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.7.0_r3.GtsPropertiesTestCases'
+NAME = 'cheets_GTS.7.0_r4.GtsPropertiesTestCases'
ATTRIBUTES = 'suite:arc-gts'
DEPENDENCIES = 'arc'
JOB_RETRIES = 1
@@ -20,14 +20,14 @@
'cheets_GTS',
hosts=host_list,
iterations=1,
- tag='7.0_r3.GtsPropertiesTestCases',
- test_name='cheets_GTS.7.0_r3.GtsPropertiesTestCases',
+ tag='7.0_r4.GtsPropertiesTestCases',
+ test_name='cheets_GTS.7.0_r4.GtsPropertiesTestCases',
authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsPropertiesTestCases', '--ignore-business-logic-failure'],
retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
target_module='GtsPropertiesTestCases',
target_plan=None,
- uri='gs://chromeos-arc-images/cts/bundle/android-gts-7_r3-6045416.zip',
+ uri='gs://chromeos-arc-images/cts/bundle/android-gts-7_r4-6219464.zip',
timeout=720)
parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.7.0_r3.GtsRegulationComplianceTestCases b/server/site_tests/cheets_GTS/control.7.0_r4.GtsRegulationComplianceTestCases
similarity index 85%
rename from server/site_tests/cheets_GTS/control.7.0_r3.GtsRegulationComplianceTestCases
rename to server/site_tests/cheets_GTS/control.7.0_r4.GtsRegulationComplianceTestCases
index 65d6f1f..44704cc 100644
--- a/server/site_tests/cheets_GTS/control.7.0_r3.GtsRegulationComplianceTestCases
+++ b/server/site_tests/cheets_GTS/control.7.0_r4.GtsRegulationComplianceTestCases
@@ -5,7 +5,7 @@
# This file has been automatically generated. Do not edit!
AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.7.0_r3.GtsRegulationComplianceTestCases'
+NAME = 'cheets_GTS.7.0_r4.GtsRegulationComplianceTestCases'
ATTRIBUTES = 'suite:arc-gts'
DEPENDENCIES = 'arc'
JOB_RETRIES = 1
@@ -20,14 +20,14 @@
'cheets_GTS',
hosts=host_list,
iterations=1,
- tag='7.0_r3.GtsRegulationComplianceTestCases',
- test_name='cheets_GTS.7.0_r3.GtsRegulationComplianceTestCases',
+ tag='7.0_r4.GtsRegulationComplianceTestCases',
+ test_name='cheets_GTS.7.0_r4.GtsRegulationComplianceTestCases',
authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsRegulationComplianceTestCases', '--ignore-business-logic-failure'],
retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
target_module='GtsRegulationComplianceTestCases',
target_plan=None,
- uri='gs://chromeos-arc-images/cts/bundle/android-gts-7_r3-6045416.zip',
+ uri='gs://chromeos-arc-images/cts/bundle/android-gts-7_r4-6219464.zip',
timeout=720)
parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.7.0_r3.GtsRlzTestCases b/server/site_tests/cheets_GTS/control.7.0_r4.GtsRlzTestCases
similarity index 87%
rename from server/site_tests/cheets_GTS/control.7.0_r3.GtsRlzTestCases
rename to server/site_tests/cheets_GTS/control.7.0_r4.GtsRlzTestCases
index 4cf4681..a78e19d 100644
--- a/server/site_tests/cheets_GTS/control.7.0_r3.GtsRlzTestCases
+++ b/server/site_tests/cheets_GTS/control.7.0_r4.GtsRlzTestCases
@@ -5,7 +5,7 @@
# This file has been automatically generated. Do not edit!
AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.7.0_r3.GtsRlzTestCases'
+NAME = 'cheets_GTS.7.0_r4.GtsRlzTestCases'
ATTRIBUTES = 'suite:arc-gts'
DEPENDENCIES = 'arc'
JOB_RETRIES = 1
@@ -20,14 +20,14 @@
'cheets_GTS',
hosts=host_list,
iterations=1,
- tag='7.0_r3.GtsRlzTestCases',
- test_name='cheets_GTS.7.0_r3.GtsRlzTestCases',
+ tag='7.0_r4.GtsRlzTestCases',
+ test_name='cheets_GTS.7.0_r4.GtsRlzTestCases',
authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsRlzTestCases', '--ignore-business-logic-failure'],
retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
target_module='GtsRlzTestCases',
target_plan=None,
- uri='gs://chromeos-arc-images/cts/bundle/android-gts-7_r3-6045416.zip',
+ uri='gs://chromeos-arc-images/cts/bundle/android-gts-7_r4-6219464.zip',
timeout=720)
parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.7.0_r3.GtsSample b/server/site_tests/cheets_GTS/control.7.0_r4.GtsSample
similarity index 90%
rename from server/site_tests/cheets_GTS/control.7.0_r3.GtsSample
rename to server/site_tests/cheets_GTS/control.7.0_r4.GtsSample
index 3f040cb..bc53788 100644
--- a/server/site_tests/cheets_GTS/control.7.0_r3.GtsSample
+++ b/server/site_tests/cheets_GTS/control.7.0_r4.GtsSample
@@ -5,7 +5,7 @@
# This file has been automatically generated. Do not edit!
AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.7.0_r3.GtsSample'
+NAME = 'cheets_GTS.7.0_r4.GtsSample'
ATTRIBUTES = 'suite:arc-gts'
DEPENDENCIES = 'arc'
JOB_RETRIES = 1
@@ -20,14 +20,14 @@
'cheets_GTS',
hosts=host_list,
iterations=1,
- tag='7.0_r3.GtsSample',
- test_name='cheets_GTS.7.0_r3.GtsSample',
+ tag='7.0_r4.GtsSample',
+ test_name='cheets_GTS.7.0_r4.GtsSample',
authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
run_template=['run', 'commandAndExit', 'gts', '--include-filter', 'GtsSampleDeviceTestCases', '--include-filter', 'GtsSampleDynamicConfigTestCases', '--include-filter', 'GtsSampleHostTestCases', '--ignore-business-logic-failure'],
retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
target_module='GtsSample',
target_plan=None,
- uri='gs://chromeos-arc-images/cts/bundle/android-gts-7_r3-6045416.zip',
+ uri='gs://chromeos-arc-images/cts/bundle/android-gts-7_r4-6219464.zip',
timeout=1440)
parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.7.0_r3.GtsScreenshotHostTestCases b/server/site_tests/cheets_GTS/control.7.0_r4.GtsScreenshotHostTestCases
similarity index 85%
rename from server/site_tests/cheets_GTS/control.7.0_r3.GtsScreenshotHostTestCases
rename to server/site_tests/cheets_GTS/control.7.0_r4.GtsScreenshotHostTestCases
index 3589545..6985c0e 100644
--- a/server/site_tests/cheets_GTS/control.7.0_r3.GtsScreenshotHostTestCases
+++ b/server/site_tests/cheets_GTS/control.7.0_r4.GtsScreenshotHostTestCases
@@ -5,7 +5,7 @@
# This file has been automatically generated. Do not edit!
AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.7.0_r3.GtsScreenshotHostTestCases'
+NAME = 'cheets_GTS.7.0_r4.GtsScreenshotHostTestCases'
ATTRIBUTES = 'suite:arc-gts'
DEPENDENCIES = 'arc'
JOB_RETRIES = 1
@@ -20,14 +20,14 @@
'cheets_GTS',
hosts=host_list,
iterations=1,
- tag='7.0_r3.GtsScreenshotHostTestCases',
- test_name='cheets_GTS.7.0_r3.GtsScreenshotHostTestCases',
+ tag='7.0_r4.GtsScreenshotHostTestCases',
+ test_name='cheets_GTS.7.0_r4.GtsScreenshotHostTestCases',
authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsScreenshotHostTestCases', '--ignore-business-logic-failure'],
retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
target_module='GtsScreenshotHostTestCases',
target_plan=None,
- uri='gs://chromeos-arc-images/cts/bundle/android-gts-7_r3-6045416.zip',
+ uri='gs://chromeos-arc-images/cts/bundle/android-gts-7_r4-6219464.zip',
timeout=720)
parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.7.0_r3.GtsSearchHostTestCases b/server/site_tests/cheets_GTS/control.7.0_r4.GtsSearchHostTestCases
similarity index 86%
rename from server/site_tests/cheets_GTS/control.7.0_r3.GtsSearchHostTestCases
rename to server/site_tests/cheets_GTS/control.7.0_r4.GtsSearchHostTestCases
index 7344d82..1dd4753 100644
--- a/server/site_tests/cheets_GTS/control.7.0_r3.GtsSearchHostTestCases
+++ b/server/site_tests/cheets_GTS/control.7.0_r4.GtsSearchHostTestCases
@@ -5,7 +5,7 @@
# This file has been automatically generated. Do not edit!
AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.7.0_r3.GtsSearchHostTestCases'
+NAME = 'cheets_GTS.7.0_r4.GtsSearchHostTestCases'
ATTRIBUTES = 'suite:arc-gts'
DEPENDENCIES = 'arc'
JOB_RETRIES = 1
@@ -20,14 +20,14 @@
'cheets_GTS',
hosts=host_list,
iterations=1,
- tag='7.0_r3.GtsSearchHostTestCases',
- test_name='cheets_GTS.7.0_r3.GtsSearchHostTestCases',
+ tag='7.0_r4.GtsSearchHostTestCases',
+ test_name='cheets_GTS.7.0_r4.GtsSearchHostTestCases',
authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsSearchHostTestCases', '--ignore-business-logic-failure'],
retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
target_module='GtsSearchHostTestCases',
target_plan=None,
- uri='gs://chromeos-arc-images/cts/bundle/android-gts-7_r3-6045416.zip',
+ uri='gs://chromeos-arc-images/cts/bundle/android-gts-7_r4-6219464.zip',
timeout=720)
parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.7.0_r3.GtsSecurity b/server/site_tests/cheets_GTS/control.7.0_r4.GtsSecurity
similarity index 89%
rename from server/site_tests/cheets_GTS/control.7.0_r3.GtsSecurity
rename to server/site_tests/cheets_GTS/control.7.0_r4.GtsSecurity
index 60c18ef..1094d83 100644
--- a/server/site_tests/cheets_GTS/control.7.0_r3.GtsSecurity
+++ b/server/site_tests/cheets_GTS/control.7.0_r4.GtsSecurity
@@ -5,7 +5,7 @@
# This file has been automatically generated. Do not edit!
AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.7.0_r3.GtsSecurity'
+NAME = 'cheets_GTS.7.0_r4.GtsSecurity'
ATTRIBUTES = 'suite:arc-gts'
DEPENDENCIES = 'arc'
JOB_RETRIES = 1
@@ -20,14 +20,14 @@
'cheets_GTS',
hosts=host_list,
iterations=1,
- tag='7.0_r3.GtsSecurity',
- test_name='cheets_GTS.7.0_r3.GtsSecurity',
+ tag='7.0_r4.GtsSecurity',
+ test_name='cheets_GTS.7.0_r4.GtsSecurity',
authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
run_template=['run', 'commandAndExit', 'gts', '--include-filter', 'GtsSecurityHostTestCases', '--include-filter', 'GtsSecurityTestCases', '--ignore-business-logic-failure'],
retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
target_module='GtsSecurity',
target_plan=None,
- uri='gs://chromeos-arc-images/cts/bundle/android-gts-7_r3-6045416.zip',
+ uri='gs://chromeos-arc-images/cts/bundle/android-gts-7_r4-6219464.zip',
timeout=1080)
parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.7.0_r3.GtsSensorHostTestCases b/server/site_tests/cheets_GTS/control.7.0_r4.GtsSensorHostTestCases
similarity index 86%
rename from server/site_tests/cheets_GTS/control.7.0_r3.GtsSensorHostTestCases
rename to server/site_tests/cheets_GTS/control.7.0_r4.GtsSensorHostTestCases
index a4a46b0..7f5950a 100644
--- a/server/site_tests/cheets_GTS/control.7.0_r3.GtsSensorHostTestCases
+++ b/server/site_tests/cheets_GTS/control.7.0_r4.GtsSensorHostTestCases
@@ -5,7 +5,7 @@
# This file has been automatically generated. Do not edit!
AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.7.0_r3.GtsSensorHostTestCases'
+NAME = 'cheets_GTS.7.0_r4.GtsSensorHostTestCases'
ATTRIBUTES = 'suite:arc-gts'
DEPENDENCIES = 'arc'
JOB_RETRIES = 1
@@ -20,14 +20,14 @@
'cheets_GTS',
hosts=host_list,
iterations=1,
- tag='7.0_r3.GtsSensorHostTestCases',
- test_name='cheets_GTS.7.0_r3.GtsSensorHostTestCases',
+ tag='7.0_r4.GtsSensorHostTestCases',
+ test_name='cheets_GTS.7.0_r4.GtsSensorHostTestCases',
authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsSensorHostTestCases', '--ignore-business-logic-failure'],
retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
target_module='GtsSensorHostTestCases',
target_plan=None,
- uri='gs://chromeos-arc-images/cts/bundle/android-gts-7_r3-6045416.zip',
+ uri='gs://chromeos-arc-images/cts/bundle/android-gts-7_r4-6219464.zip',
timeout=720)
parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.7.0_r3.GtsSettings b/server/site_tests/cheets_GTS/control.7.0_r4.GtsSettings
similarity index 89%
rename from server/site_tests/cheets_GTS/control.7.0_r3.GtsSettings
rename to server/site_tests/cheets_GTS/control.7.0_r4.GtsSettings
index 18e5e44..b238a59 100644
--- a/server/site_tests/cheets_GTS/control.7.0_r3.GtsSettings
+++ b/server/site_tests/cheets_GTS/control.7.0_r4.GtsSettings
@@ -5,7 +5,7 @@
# This file has been automatically generated. Do not edit!
AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.7.0_r3.GtsSettings'
+NAME = 'cheets_GTS.7.0_r4.GtsSettings'
ATTRIBUTES = 'suite:arc-gts'
DEPENDENCIES = 'arc'
JOB_RETRIES = 1
@@ -20,14 +20,14 @@
'cheets_GTS',
hosts=host_list,
iterations=1,
- tag='7.0_r3.GtsSettings',
- test_name='cheets_GTS.7.0_r3.GtsSettings',
+ tag='7.0_r4.GtsSettings',
+ test_name='cheets_GTS.7.0_r4.GtsSettings',
authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
run_template=['run', 'commandAndExit', 'gts', '--include-filter', 'GtsSettingsHostTestCases', '--include-filter', 'GtsSettingsTestCases', '--ignore-business-logic-failure'],
retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
target_module='GtsSettings',
target_plan=None,
- uri='gs://chromeos-arc-images/cts/bundle/android-gts-7_r3-6045416.zip',
+ uri='gs://chromeos-arc-images/cts/bundle/android-gts-7_r4-6219464.zip',
timeout=1080)
parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.7.0_r3.GtsSetupWizard b/server/site_tests/cheets_GTS/control.7.0_r4.GtsSetupWizard
similarity index 88%
rename from server/site_tests/cheets_GTS/control.7.0_r3.GtsSetupWizard
rename to server/site_tests/cheets_GTS/control.7.0_r4.GtsSetupWizard
index fb14bad..9b8eb39 100644
--- a/server/site_tests/cheets_GTS/control.7.0_r3.GtsSetupWizard
+++ b/server/site_tests/cheets_GTS/control.7.0_r4.GtsSetupWizard
@@ -5,7 +5,7 @@
# This file has been automatically generated. Do not edit!
AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.7.0_r3.GtsSetupWizard'
+NAME = 'cheets_GTS.7.0_r4.GtsSetupWizard'
ATTRIBUTES = 'suite:arc-gts'
DEPENDENCIES = 'arc'
JOB_RETRIES = 1
@@ -20,14 +20,14 @@
'cheets_GTS',
hosts=host_list,
iterations=1,
- tag='7.0_r3.GtsSetupWizard',
- test_name='cheets_GTS.7.0_r3.GtsSetupWizard',
+ tag='7.0_r4.GtsSetupWizard',
+ test_name='cheets_GTS.7.0_r4.GtsSetupWizard',
authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
run_template=['run', 'commandAndExit', 'gts', '--include-filter', 'GtsSetupWizardHostTestCases', '--include-filter', 'GtsSetupWizardNoPermissionTestCases', '--ignore-business-logic-failure'],
retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
target_module='GtsSetupWizard',
target_plan=None,
- uri='gs://chromeos-arc-images/cts/bundle/android-gts-7_r3-6045416.zip',
+ uri='gs://chromeos-arc-images/cts/bundle/android-gts-7_r4-6219464.zip',
timeout=1080)
parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.7.0_r3.GtsSimAppDialogTestCases b/server/site_tests/cheets_GTS/control.7.0_r4.GtsSimAppDialogTestCases
similarity index 86%
rename from server/site_tests/cheets_GTS/control.7.0_r3.GtsSimAppDialogTestCases
rename to server/site_tests/cheets_GTS/control.7.0_r4.GtsSimAppDialogTestCases
index 125a26d..9636915 100644
--- a/server/site_tests/cheets_GTS/control.7.0_r3.GtsSimAppDialogTestCases
+++ b/server/site_tests/cheets_GTS/control.7.0_r4.GtsSimAppDialogTestCases
@@ -5,7 +5,7 @@
# This file has been automatically generated. Do not edit!
AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.7.0_r3.GtsSimAppDialogTestCases'
+NAME = 'cheets_GTS.7.0_r4.GtsSimAppDialogTestCases'
ATTRIBUTES = 'suite:arc-gts'
DEPENDENCIES = 'arc'
JOB_RETRIES = 1
@@ -20,14 +20,14 @@
'cheets_GTS',
hosts=host_list,
iterations=1,
- tag='7.0_r3.GtsSimAppDialogTestCases',
- test_name='cheets_GTS.7.0_r3.GtsSimAppDialogTestCases',
+ tag='7.0_r4.GtsSimAppDialogTestCases',
+ test_name='cheets_GTS.7.0_r4.GtsSimAppDialogTestCases',
authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsSimAppDialogTestCases', '--ignore-business-logic-failure'],
retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
target_module='GtsSimAppDialogTestCases',
target_plan=None,
- uri='gs://chromeos-arc-images/cts/bundle/android-gts-7_r3-6045416.zip',
+ uri='gs://chromeos-arc-images/cts/bundle/android-gts-7_r4-6219464.zip',
timeout=720)
parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.7.0_r3.GtsSmartBatteryDeviceTestCases b/server/site_tests/cheets_GTS/control.7.0_r4.GtsSmartBatteryDeviceTestCases
similarity index 85%
rename from server/site_tests/cheets_GTS/control.7.0_r3.GtsSmartBatteryDeviceTestCases
rename to server/site_tests/cheets_GTS/control.7.0_r4.GtsSmartBatteryDeviceTestCases
index dea28c6..609f1c0 100644
--- a/server/site_tests/cheets_GTS/control.7.0_r3.GtsSmartBatteryDeviceTestCases
+++ b/server/site_tests/cheets_GTS/control.7.0_r4.GtsSmartBatteryDeviceTestCases
@@ -5,7 +5,7 @@
# This file has been automatically generated. Do not edit!
AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.7.0_r3.GtsSmartBatteryDeviceTestCases'
+NAME = 'cheets_GTS.7.0_r4.GtsSmartBatteryDeviceTestCases'
ATTRIBUTES = 'suite:arc-gts'
DEPENDENCIES = 'arc'
JOB_RETRIES = 1
@@ -20,14 +20,14 @@
'cheets_GTS',
hosts=host_list,
iterations=1,
- tag='7.0_r3.GtsSmartBatteryDeviceTestCases',
- test_name='cheets_GTS.7.0_r3.GtsSmartBatteryDeviceTestCases',
+ tag='7.0_r4.GtsSmartBatteryDeviceTestCases',
+ test_name='cheets_GTS.7.0_r4.GtsSmartBatteryDeviceTestCases',
authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsSmartBatteryDeviceTestCases', '--ignore-business-logic-failure'],
retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
target_module='GtsSmartBatteryDeviceTestCases',
target_plan=None,
- uri='gs://chromeos-arc-images/cts/bundle/android-gts-7_r3-6045416.zip',
+ uri='gs://chromeos-arc-images/cts/bundle/android-gts-7_r4-6219464.zip',
timeout=720)
parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.7.0_r3.GtsSsaidHostTestCases b/server/site_tests/cheets_GTS/control.7.0_r4.GtsSsaidHostTestCases
similarity index 86%
rename from server/site_tests/cheets_GTS/control.7.0_r3.GtsSsaidHostTestCases
rename to server/site_tests/cheets_GTS/control.7.0_r4.GtsSsaidHostTestCases
index f00847e..d09d30f 100644
--- a/server/site_tests/cheets_GTS/control.7.0_r3.GtsSsaidHostTestCases
+++ b/server/site_tests/cheets_GTS/control.7.0_r4.GtsSsaidHostTestCases
@@ -5,7 +5,7 @@
# This file has been automatically generated. Do not edit!
AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.7.0_r3.GtsSsaidHostTestCases'
+NAME = 'cheets_GTS.7.0_r4.GtsSsaidHostTestCases'
ATTRIBUTES = 'suite:arc-gts'
DEPENDENCIES = 'arc'
JOB_RETRIES = 1
@@ -20,14 +20,14 @@
'cheets_GTS',
hosts=host_list,
iterations=1,
- tag='7.0_r3.GtsSsaidHostTestCases',
- test_name='cheets_GTS.7.0_r3.GtsSsaidHostTestCases',
+ tag='7.0_r4.GtsSsaidHostTestCases',
+ test_name='cheets_GTS.7.0_r4.GtsSsaidHostTestCases',
authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsSsaidHostTestCases', '--ignore-business-logic-failure'],
retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
target_module='GtsSsaidHostTestCases',
target_plan=None,
- uri='gs://chromeos-arc-images/cts/bundle/android-gts-7_r3-6045416.zip',
+ uri='gs://chromeos-arc-images/cts/bundle/android-gts-7_r4-6219464.zip',
timeout=720)
parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.7.0_r3.GtsStagedInstallHostTestCases b/server/site_tests/cheets_GTS/control.7.0_r4.GtsStagedInstallHostTestCases
similarity index 85%
rename from server/site_tests/cheets_GTS/control.7.0_r3.GtsStagedInstallHostTestCases
rename to server/site_tests/cheets_GTS/control.7.0_r4.GtsStagedInstallHostTestCases
index 7cc3182..dd1a337 100644
--- a/server/site_tests/cheets_GTS/control.7.0_r3.GtsStagedInstallHostTestCases
+++ b/server/site_tests/cheets_GTS/control.7.0_r4.GtsStagedInstallHostTestCases
@@ -5,7 +5,7 @@
# This file has been automatically generated. Do not edit!
AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.7.0_r3.GtsStagedInstallHostTestCases'
+NAME = 'cheets_GTS.7.0_r4.GtsStagedInstallHostTestCases'
ATTRIBUTES = 'suite:arc-gts'
DEPENDENCIES = 'arc'
JOB_RETRIES = 1
@@ -20,14 +20,14 @@
'cheets_GTS',
hosts=host_list,
iterations=1,
- tag='7.0_r3.GtsStagedInstallHostTestCases',
- test_name='cheets_GTS.7.0_r3.GtsStagedInstallHostTestCases',
+ tag='7.0_r4.GtsStagedInstallHostTestCases',
+ test_name='cheets_GTS.7.0_r4.GtsStagedInstallHostTestCases',
authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsStagedInstallHostTestCases', '--ignore-business-logic-failure'],
retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
target_module='GtsStagedInstallHostTestCases',
target_plan=None,
- uri='gs://chromeos-arc-images/cts/bundle/android-gts-7_r3-6045416.zip',
+ uri='gs://chromeos-arc-images/cts/bundle/android-gts-7_r4-6219464.zip',
timeout=720)
parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.7.0_r3.GtsStatsdHostTestCases b/server/site_tests/cheets_GTS/control.7.0_r4.GtsStatsdHostTestCases
similarity index 86%
rename from server/site_tests/cheets_GTS/control.7.0_r3.GtsStatsdHostTestCases
rename to server/site_tests/cheets_GTS/control.7.0_r4.GtsStatsdHostTestCases
index 30013d9..382107f 100644
--- a/server/site_tests/cheets_GTS/control.7.0_r3.GtsStatsdHostTestCases
+++ b/server/site_tests/cheets_GTS/control.7.0_r4.GtsStatsdHostTestCases
@@ -5,7 +5,7 @@
# This file has been automatically generated. Do not edit!
AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.7.0_r3.GtsStatsdHostTestCases'
+NAME = 'cheets_GTS.7.0_r4.GtsStatsdHostTestCases'
ATTRIBUTES = 'suite:arc-gts'
DEPENDENCIES = 'arc'
JOB_RETRIES = 1
@@ -20,14 +20,14 @@
'cheets_GTS',
hosts=host_list,
iterations=1,
- tag='7.0_r3.GtsStatsdHostTestCases',
- test_name='cheets_GTS.7.0_r3.GtsStatsdHostTestCases',
+ tag='7.0_r4.GtsStatsdHostTestCases',
+ test_name='cheets_GTS.7.0_r4.GtsStatsdHostTestCases',
authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsStatsdHostTestCases', '--ignore-business-logic-failure'],
retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
target_module='GtsStatsdHostTestCases',
target_plan=None,
- uri='gs://chromeos-arc-images/cts/bundle/android-gts-7_r3-6045416.zip',
+ uri='gs://chromeos-arc-images/cts/bundle/android-gts-7_r4-6219464.zip',
timeout=720)
parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.7.0_r3.GtsStorageTestCases b/server/site_tests/cheets_GTS/control.7.0_r4.GtsStorageTestCases
similarity index 86%
rename from server/site_tests/cheets_GTS/control.7.0_r3.GtsStorageTestCases
rename to server/site_tests/cheets_GTS/control.7.0_r4.GtsStorageTestCases
index 6ada8b4..425b39a 100644
--- a/server/site_tests/cheets_GTS/control.7.0_r3.GtsStorageTestCases
+++ b/server/site_tests/cheets_GTS/control.7.0_r4.GtsStorageTestCases
@@ -5,7 +5,7 @@
# This file has been automatically generated. Do not edit!
AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.7.0_r3.GtsStorageTestCases'
+NAME = 'cheets_GTS.7.0_r4.GtsStorageTestCases'
ATTRIBUTES = 'suite:arc-gts'
DEPENDENCIES = 'arc'
JOB_RETRIES = 1
@@ -20,14 +20,14 @@
'cheets_GTS',
hosts=host_list,
iterations=1,
- tag='7.0_r3.GtsStorageTestCases',
- test_name='cheets_GTS.7.0_r3.GtsStorageTestCases',
+ tag='7.0_r4.GtsStorageTestCases',
+ test_name='cheets_GTS.7.0_r4.GtsStorageTestCases',
authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsStorageTestCases', '--ignore-business-logic-failure'],
retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
target_module='GtsStorageTestCases',
target_plan=None,
- uri='gs://chromeos-arc-images/cts/bundle/android-gts-7_r3-6045416.zip',
+ uri='gs://chromeos-arc-images/cts/bundle/android-gts-7_r4-6219464.zip',
timeout=720)
parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.7.0_r3.GtsSupervisionTestCases b/server/site_tests/cheets_GTS/control.7.0_r4.GtsSupervisionTestCases
similarity index 86%
rename from server/site_tests/cheets_GTS/control.7.0_r3.GtsSupervisionTestCases
rename to server/site_tests/cheets_GTS/control.7.0_r4.GtsSupervisionTestCases
index 88f4136..1941d30 100644
--- a/server/site_tests/cheets_GTS/control.7.0_r3.GtsSupervisionTestCases
+++ b/server/site_tests/cheets_GTS/control.7.0_r4.GtsSupervisionTestCases
@@ -5,7 +5,7 @@
# This file has been automatically generated. Do not edit!
AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.7.0_r3.GtsSupervisionTestCases'
+NAME = 'cheets_GTS.7.0_r4.GtsSupervisionTestCases'
ATTRIBUTES = 'suite:arc-gts'
DEPENDENCIES = 'arc'
JOB_RETRIES = 1
@@ -20,14 +20,14 @@
'cheets_GTS',
hosts=host_list,
iterations=1,
- tag='7.0_r3.GtsSupervisionTestCases',
- test_name='cheets_GTS.7.0_r3.GtsSupervisionTestCases',
+ tag='7.0_r4.GtsSupervisionTestCases',
+ test_name='cheets_GTS.7.0_r4.GtsSupervisionTestCases',
authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsSupervisionTestCases', '--ignore-business-logic-failure'],
retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
target_module='GtsSupervisionTestCases',
target_plan=None,
- uri='gs://chromeos-arc-images/cts/bundle/android-gts-7_r3-6045416.zip',
+ uri='gs://chromeos-arc-images/cts/bundle/android-gts-7_r4-6219464.zip',
timeout=720)
parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.7.0_r3.GtsSuspendApps b/server/site_tests/cheets_GTS/control.7.0_r4.GtsSuspendApps
similarity index 88%
rename from server/site_tests/cheets_GTS/control.7.0_r3.GtsSuspendApps
rename to server/site_tests/cheets_GTS/control.7.0_r4.GtsSuspendApps
index 70b31cd..03b2a94 100644
--- a/server/site_tests/cheets_GTS/control.7.0_r3.GtsSuspendApps
+++ b/server/site_tests/cheets_GTS/control.7.0_r4.GtsSuspendApps
@@ -5,7 +5,7 @@
# This file has been automatically generated. Do not edit!
AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.7.0_r3.GtsSuspendApps'
+NAME = 'cheets_GTS.7.0_r4.GtsSuspendApps'
ATTRIBUTES = 'suite:arc-gts'
DEPENDENCIES = 'arc'
JOB_RETRIES = 1
@@ -20,14 +20,14 @@
'cheets_GTS',
hosts=host_list,
iterations=1,
- tag='7.0_r3.GtsSuspendApps',
- test_name='cheets_GTS.7.0_r3.GtsSuspendApps',
+ tag='7.0_r4.GtsSuspendApps',
+ test_name='cheets_GTS.7.0_r4.GtsSuspendApps',
authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
run_template=['run', 'commandAndExit', 'gts', '--include-filter', 'GtsSuspendAppsPermissionTestCases', '--include-filter', 'GtsSuspendAppsTestCases', '--ignore-business-logic-failure'],
retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
target_module='GtsSuspendApps',
target_plan=None,
- uri='gs://chromeos-arc-images/cts/bundle/android-gts-7_r3-6045416.zip',
+ uri='gs://chromeos-arc-images/cts/bundle/android-gts-7_r4-6219464.zip',
timeout=1080)
parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.7.0_r3.GtsTelecomManagerTests b/server/site_tests/cheets_GTS/control.7.0_r4.GtsTelecomManagerTests
similarity index 86%
rename from server/site_tests/cheets_GTS/control.7.0_r3.GtsTelecomManagerTests
rename to server/site_tests/cheets_GTS/control.7.0_r4.GtsTelecomManagerTests
index bc79a95..11a32b2 100644
--- a/server/site_tests/cheets_GTS/control.7.0_r3.GtsTelecomManagerTests
+++ b/server/site_tests/cheets_GTS/control.7.0_r4.GtsTelecomManagerTests
@@ -5,7 +5,7 @@
# This file has been automatically generated. Do not edit!
AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.7.0_r3.GtsTelecomManagerTests'
+NAME = 'cheets_GTS.7.0_r4.GtsTelecomManagerTests'
ATTRIBUTES = 'suite:arc-gts'
DEPENDENCIES = 'arc'
JOB_RETRIES = 1
@@ -20,14 +20,14 @@
'cheets_GTS',
hosts=host_list,
iterations=1,
- tag='7.0_r3.GtsTelecomManagerTests',
- test_name='cheets_GTS.7.0_r3.GtsTelecomManagerTests',
+ tag='7.0_r4.GtsTelecomManagerTests',
+ test_name='cheets_GTS.7.0_r4.GtsTelecomManagerTests',
authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsTelecomManagerTests', '--ignore-business-logic-failure'],
retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
target_module='GtsTelecomManagerTests',
target_plan=None,
- uri='gs://chromeos-arc-images/cts/bundle/android-gts-7_r3-6045416.zip',
+ uri='gs://chromeos-arc-images/cts/bundle/android-gts-7_r4-6219464.zip',
timeout=720)
parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.7.0_r3.GtsTelephonyTestCases b/server/site_tests/cheets_GTS/control.7.0_r4.GtsTelephonyTestCases
similarity index 86%
rename from server/site_tests/cheets_GTS/control.7.0_r3.GtsTelephonyTestCases
rename to server/site_tests/cheets_GTS/control.7.0_r4.GtsTelephonyTestCases
index 05c76a5..2d751fc 100644
--- a/server/site_tests/cheets_GTS/control.7.0_r3.GtsTelephonyTestCases
+++ b/server/site_tests/cheets_GTS/control.7.0_r4.GtsTelephonyTestCases
@@ -5,7 +5,7 @@
# This file has been automatically generated. Do not edit!
AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.7.0_r3.GtsTelephonyTestCases'
+NAME = 'cheets_GTS.7.0_r4.GtsTelephonyTestCases'
ATTRIBUTES = 'suite:arc-gts'
DEPENDENCIES = 'arc'
JOB_RETRIES = 1
@@ -20,14 +20,14 @@
'cheets_GTS',
hosts=host_list,
iterations=1,
- tag='7.0_r3.GtsTelephonyTestCases',
- test_name='cheets_GTS.7.0_r3.GtsTelephonyTestCases',
+ tag='7.0_r4.GtsTelephonyTestCases',
+ test_name='cheets_GTS.7.0_r4.GtsTelephonyTestCases',
authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsTelephonyTestCases', '--ignore-business-logic-failure'],
retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
target_module='GtsTelephonyTestCases',
target_plan=None,
- uri='gs://chromeos-arc-images/cts/bundle/android-gts-7_r3-6045416.zip',
+ uri='gs://chromeos-arc-images/cts/bundle/android-gts-7_r4-6219464.zip',
timeout=720)
parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.7.0_r3.GtsTestHarnessModeTestCases b/server/site_tests/cheets_GTS/control.7.0_r4.GtsTestHarnessModeTestCases
similarity index 85%
rename from server/site_tests/cheets_GTS/control.7.0_r3.GtsTestHarnessModeTestCases
rename to server/site_tests/cheets_GTS/control.7.0_r4.GtsTestHarnessModeTestCases
index 01996cb..0ef166a 100644
--- a/server/site_tests/cheets_GTS/control.7.0_r3.GtsTestHarnessModeTestCases
+++ b/server/site_tests/cheets_GTS/control.7.0_r4.GtsTestHarnessModeTestCases
@@ -5,7 +5,7 @@
# This file has been automatically generated. Do not edit!
AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.7.0_r3.GtsTestHarnessModeTestCases'
+NAME = 'cheets_GTS.7.0_r4.GtsTestHarnessModeTestCases'
ATTRIBUTES = 'suite:arc-gts'
DEPENDENCIES = 'arc'
JOB_RETRIES = 1
@@ -20,14 +20,14 @@
'cheets_GTS',
hosts=host_list,
iterations=1,
- tag='7.0_r3.GtsTestHarnessModeTestCases',
- test_name='cheets_GTS.7.0_r3.GtsTestHarnessModeTestCases',
+ tag='7.0_r4.GtsTestHarnessModeTestCases',
+ test_name='cheets_GTS.7.0_r4.GtsTestHarnessModeTestCases',
authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsTestHarnessModeTestCases', '--ignore-business-logic-failure'],
retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
target_module='GtsTestHarnessModeTestCases',
target_plan=None,
- uri='gs://chromeos-arc-images/cts/bundle/android-gts-7_r3-6045416.zip',
+ uri='gs://chromeos-arc-images/cts/bundle/android-gts-7_r4-6219464.zip',
timeout=720)
parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.7.0_r3.GtsTetheringTestCases b/server/site_tests/cheets_GTS/control.7.0_r4.GtsTetheringTestCases
similarity index 86%
rename from server/site_tests/cheets_GTS/control.7.0_r3.GtsTetheringTestCases
rename to server/site_tests/cheets_GTS/control.7.0_r4.GtsTetheringTestCases
index ddc59ad..56e186c 100644
--- a/server/site_tests/cheets_GTS/control.7.0_r3.GtsTetheringTestCases
+++ b/server/site_tests/cheets_GTS/control.7.0_r4.GtsTetheringTestCases
@@ -5,7 +5,7 @@
# This file has been automatically generated. Do not edit!
AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.7.0_r3.GtsTetheringTestCases'
+NAME = 'cheets_GTS.7.0_r4.GtsTetheringTestCases'
ATTRIBUTES = 'suite:arc-gts'
DEPENDENCIES = 'arc'
JOB_RETRIES = 1
@@ -20,14 +20,14 @@
'cheets_GTS',
hosts=host_list,
iterations=1,
- tag='7.0_r3.GtsTetheringTestCases',
- test_name='cheets_GTS.7.0_r3.GtsTetheringTestCases',
+ tag='7.0_r4.GtsTetheringTestCases',
+ test_name='cheets_GTS.7.0_r4.GtsTetheringTestCases',
authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsTetheringTestCases', '--ignore-business-logic-failure'],
retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
target_module='GtsTetheringTestCases',
target_plan=None,
- uri='gs://chromeos-arc-images/cts/bundle/android-gts-7_r3-6045416.zip',
+ uri='gs://chromeos-arc-images/cts/bundle/android-gts-7_r4-6219464.zip',
timeout=720)
parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.7.0_r3.GtsTv b/server/site_tests/cheets_GTS/control.7.0_r4.GtsTv
similarity index 90%
rename from server/site_tests/cheets_GTS/control.7.0_r3.GtsTv
rename to server/site_tests/cheets_GTS/control.7.0_r4.GtsTv
index 312c2d9..48e7488 100644
--- a/server/site_tests/cheets_GTS/control.7.0_r3.GtsTv
+++ b/server/site_tests/cheets_GTS/control.7.0_r4.GtsTv
@@ -5,7 +5,7 @@
# This file has been automatically generated. Do not edit!
AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.7.0_r3.GtsTv'
+NAME = 'cheets_GTS.7.0_r4.GtsTv'
ATTRIBUTES = 'suite:arc-gts'
DEPENDENCIES = 'arc'
JOB_RETRIES = 1
@@ -20,14 +20,14 @@
'cheets_GTS',
hosts=host_list,
iterations=1,
- tag='7.0_r3.GtsTv',
- test_name='cheets_GTS.7.0_r3.GtsTv',
+ tag='7.0_r4.GtsTv',
+ test_name='cheets_GTS.7.0_r4.GtsTv',
authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
run_template=['run', 'commandAndExit', 'gts', '--include-filter', 'GtsTvBugReportTestCases', '--include-filter', 'GtsTvHostTestCases', '--include-filter', 'GtsTvTestCases', '--ignore-business-logic-failure'],
retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
target_module='GtsTv',
target_plan=None,
- uri='gs://chromeos-arc-images/cts/bundle/android-gts-7_r3-6045416.zip',
+ uri='gs://chromeos-arc-images/cts/bundle/android-gts-7_r4-6219464.zip',
timeout=1440)
parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.7.0_r3.GtsUnofficialApisUsageTestCases b/server/site_tests/cheets_GTS/control.7.0_r4.GtsUnofficialApisUsageTestCases
similarity index 85%
rename from server/site_tests/cheets_GTS/control.7.0_r3.GtsUnofficialApisUsageTestCases
rename to server/site_tests/cheets_GTS/control.7.0_r4.GtsUnofficialApisUsageTestCases
index 92e9b91..d20a113 100644
--- a/server/site_tests/cheets_GTS/control.7.0_r3.GtsUnofficialApisUsageTestCases
+++ b/server/site_tests/cheets_GTS/control.7.0_r4.GtsUnofficialApisUsageTestCases
@@ -5,7 +5,7 @@
# This file has been automatically generated. Do not edit!
AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.7.0_r3.GtsUnofficialApisUsageTestCases'
+NAME = 'cheets_GTS.7.0_r4.GtsUnofficialApisUsageTestCases'
ATTRIBUTES = 'suite:arc-gts'
DEPENDENCIES = 'arc'
JOB_RETRIES = 1
@@ -20,14 +20,14 @@
'cheets_GTS',
hosts=host_list,
iterations=1,
- tag='7.0_r3.GtsUnofficialApisUsageTestCases',
- test_name='cheets_GTS.7.0_r3.GtsUnofficialApisUsageTestCases',
+ tag='7.0_r4.GtsUnofficialApisUsageTestCases',
+ test_name='cheets_GTS.7.0_r4.GtsUnofficialApisUsageTestCases',
authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsUnofficialApisUsageTestCases', '--ignore-business-logic-failure'],
retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
target_module='GtsUnofficialApisUsageTestCases',
target_plan=None,
- uri='gs://chromeos-arc-images/cts/bundle/android-gts-7_r3-6045416.zip',
+ uri='gs://chromeos-arc-images/cts/bundle/android-gts-7_r4-6219464.zip',
timeout=720)
parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.7.0_r3.GtsUsageStatsTestCases b/server/site_tests/cheets_GTS/control.7.0_r4.GtsUsageStatsTestCases
similarity index 86%
rename from server/site_tests/cheets_GTS/control.7.0_r3.GtsUsageStatsTestCases
rename to server/site_tests/cheets_GTS/control.7.0_r4.GtsUsageStatsTestCases
index 18135d2..1b077be 100644
--- a/server/site_tests/cheets_GTS/control.7.0_r3.GtsUsageStatsTestCases
+++ b/server/site_tests/cheets_GTS/control.7.0_r4.GtsUsageStatsTestCases
@@ -5,7 +5,7 @@
# This file has been automatically generated. Do not edit!
AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.7.0_r3.GtsUsageStatsTestCases'
+NAME = 'cheets_GTS.7.0_r4.GtsUsageStatsTestCases'
ATTRIBUTES = 'suite:arc-gts'
DEPENDENCIES = 'arc'
JOB_RETRIES = 1
@@ -20,14 +20,14 @@
'cheets_GTS',
hosts=host_list,
iterations=1,
- tag='7.0_r3.GtsUsageStatsTestCases',
- test_name='cheets_GTS.7.0_r3.GtsUsageStatsTestCases',
+ tag='7.0_r4.GtsUsageStatsTestCases',
+ test_name='cheets_GTS.7.0_r4.GtsUsageStatsTestCases',
authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsUsageStatsTestCases', '--ignore-business-logic-failure'],
retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
target_module='GtsUsageStatsTestCases',
target_plan=None,
- uri='gs://chromeos-arc-images/cts/bundle/android-gts-7_r3-6045416.zip',
+ uri='gs://chromeos-arc-images/cts/bundle/android-gts-7_r4-6219464.zip',
timeout=720)
parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.7.0_r3.GtsViewTestCases b/server/site_tests/cheets_GTS/control.7.0_r4.GtsViewTestCases
similarity index 87%
rename from server/site_tests/cheets_GTS/control.7.0_r3.GtsViewTestCases
rename to server/site_tests/cheets_GTS/control.7.0_r4.GtsViewTestCases
index b1fb90a..5c212af 100644
--- a/server/site_tests/cheets_GTS/control.7.0_r3.GtsViewTestCases
+++ b/server/site_tests/cheets_GTS/control.7.0_r4.GtsViewTestCases
@@ -5,7 +5,7 @@
# This file has been automatically generated. Do not edit!
AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.7.0_r3.GtsViewTestCases'
+NAME = 'cheets_GTS.7.0_r4.GtsViewTestCases'
ATTRIBUTES = 'suite:arc-gts'
DEPENDENCIES = 'arc'
JOB_RETRIES = 1
@@ -20,14 +20,14 @@
'cheets_GTS',
hosts=host_list,
iterations=1,
- tag='7.0_r3.GtsViewTestCases',
- test_name='cheets_GTS.7.0_r3.GtsViewTestCases',
+ tag='7.0_r4.GtsViewTestCases',
+ test_name='cheets_GTS.7.0_r4.GtsViewTestCases',
authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsViewTestCases', '--ignore-business-logic-failure'],
retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
target_module='GtsViewTestCases',
target_plan=None,
- uri='gs://chromeos-arc-images/cts/bundle/android-gts-7_r3-6045416.zip',
+ uri='gs://chromeos-arc-images/cts/bundle/android-gts-7_r4-6219464.zip',
timeout=720)
parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.7.0_r3.GtsWebView b/server/site_tests/cheets_GTS/control.7.0_r4.GtsWebView
similarity index 89%
rename from server/site_tests/cheets_GTS/control.7.0_r3.GtsWebView
rename to server/site_tests/cheets_GTS/control.7.0_r4.GtsWebView
index 8bc9bf1..bc47d5b 100644
--- a/server/site_tests/cheets_GTS/control.7.0_r3.GtsWebView
+++ b/server/site_tests/cheets_GTS/control.7.0_r4.GtsWebView
@@ -5,7 +5,7 @@
# This file has been automatically generated. Do not edit!
AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.7.0_r3.GtsWebView'
+NAME = 'cheets_GTS.7.0_r4.GtsWebView'
ATTRIBUTES = 'suite:arc-gts'
DEPENDENCIES = 'arc'
JOB_RETRIES = 1
@@ -20,14 +20,14 @@
'cheets_GTS',
hosts=host_list,
iterations=1,
- tag='7.0_r3.GtsWebView',
- test_name='cheets_GTS.7.0_r3.GtsWebView',
+ tag='7.0_r4.GtsWebView',
+ test_name='cheets_GTS.7.0_r4.GtsWebView',
authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
run_template=['run', 'commandAndExit', 'gts', '--include-filter', 'GtsWebViewHostTestCases', '--include-filter', 'GtsWebViewTestCases', '--ignore-business-logic-failure'],
retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
target_module='GtsWebView',
target_plan=None,
- uri='gs://chromeos-arc-images/cts/bundle/android-gts-7_r3-6045416.zip',
+ uri='gs://chromeos-arc-images/cts/bundle/android-gts-7_r4-6219464.zip',
timeout=1080)
parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.7.0_r3.GtsWellbeing b/server/site_tests/cheets_GTS/control.7.0_r4.GtsWellbeing
similarity index 89%
rename from server/site_tests/cheets_GTS/control.7.0_r3.GtsWellbeing
rename to server/site_tests/cheets_GTS/control.7.0_r4.GtsWellbeing
index 5e2216b..6904a1e 100644
--- a/server/site_tests/cheets_GTS/control.7.0_r3.GtsWellbeing
+++ b/server/site_tests/cheets_GTS/control.7.0_r4.GtsWellbeing
@@ -5,7 +5,7 @@
# This file has been automatically generated. Do not edit!
AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.7.0_r3.GtsWellbeing'
+NAME = 'cheets_GTS.7.0_r4.GtsWellbeing'
ATTRIBUTES = 'suite:arc-gts'
DEPENDENCIES = 'arc'
JOB_RETRIES = 1
@@ -20,14 +20,14 @@
'cheets_GTS',
hosts=host_list,
iterations=1,
- tag='7.0_r3.GtsWellbeing',
- test_name='cheets_GTS.7.0_r3.GtsWellbeing',
+ tag='7.0_r4.GtsWellbeing',
+ test_name='cheets_GTS.7.0_r4.GtsWellbeing',
authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
run_template=['run', 'commandAndExit', 'gts', '--include-filter', 'GtsWellbeingHostTestCases', '--include-filter', 'GtsWellbeingPermissionPolicyTestCases', '--include-filter', 'GtsWellbeingTestCases', '--ignore-business-logic-failure'],
retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
target_module='GtsWellbeing',
target_plan=None,
- uri='gs://chromeos-arc-images/cts/bundle/android-gts-7_r3-6045416.zip',
+ uri='gs://chromeos-arc-images/cts/bundle/android-gts-7_r4-6219464.zip',
timeout=1440)
parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.7.0_r3.GtsYouTubeTestCases b/server/site_tests/cheets_GTS/control.7.0_r4.GtsYouTubeTestCases
similarity index 87%
rename from server/site_tests/cheets_GTS/control.7.0_r3.GtsYouTubeTestCases
rename to server/site_tests/cheets_GTS/control.7.0_r4.GtsYouTubeTestCases
index c4c09ad..c28ecfbb 100644
--- a/server/site_tests/cheets_GTS/control.7.0_r3.GtsYouTubeTestCases
+++ b/server/site_tests/cheets_GTS/control.7.0_r4.GtsYouTubeTestCases
@@ -5,7 +5,7 @@
# This file has been automatically generated. Do not edit!
AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.7.0_r3.GtsYouTubeTestCases'
+NAME = 'cheets_GTS.7.0_r4.GtsYouTubeTestCases'
ATTRIBUTES = 'suite:arc-gts'
DEPENDENCIES = 'arc'
JOB_RETRIES = 1
@@ -21,14 +21,14 @@
hosts=host_list,
iterations=1,
needs_push_media=True,
- tag='7.0_r3.GtsYouTubeTestCases',
- test_name='cheets_GTS.7.0_r3.GtsYouTubeTestCases',
+ tag='7.0_r4.GtsYouTubeTestCases',
+ test_name='cheets_GTS.7.0_r4.GtsYouTubeTestCases',
authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsYouTubeTestCases', '--ignore-business-logic-failure'],
retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
target_module='GtsYouTubeTestCases',
target_plan=None,
- uri='gs://chromeos-arc-images/cts/bundle/android-gts-7_r3-6045416.zip',
+ uri='gs://chromeos-arc-images/cts/bundle/android-gts-7_r4-6219464.zip',
timeout=3600)
parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.7.0_r3.GtsAssistantHostTestCases b/server/site_tests/cheets_GTS/control.7.0_r4.ReviewPermissionHelperGts
similarity index 72%
copy from server/site_tests/cheets_GTS/control.7.0_r3.GtsAssistantHostTestCases
copy to server/site_tests/cheets_GTS/control.7.0_r4.ReviewPermissionHelperGts
index 91ce467..aa9a70f 100644
--- a/server/site_tests/cheets_GTS/control.7.0_r3.GtsAssistantHostTestCases
+++ b/server/site_tests/cheets_GTS/control.7.0_r4.ReviewPermissionHelperGts
@@ -5,14 +5,14 @@
# This file has been automatically generated. Do not edit!
AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.7.0_r3.GtsAssistantHostTestCases'
+NAME = 'cheets_GTS.7.0_r4.ReviewPermissionHelperGts'
ATTRIBUTES = 'suite:arc-gts'
DEPENDENCIES = 'arc'
JOB_RETRIES = 1
TEST_TYPE = 'server'
TIME = 'MEDIUM'
MAX_RESULT_SIZE_KB = 307200
-DOC = 'Run module GtsAssistantHostTestCases of the Android Google Test Suite (GTS) in the ARC++ container.'
+DOC = 'Run module ReviewPermissionHelperGts of the Android Google Test Suite (GTS) in the ARC++ container.'
def run_TS(machine):
host_list = [hosts.create_host(machine)]
@@ -20,14 +20,14 @@
'cheets_GTS',
hosts=host_list,
iterations=1,
- tag='7.0_r3.GtsAssistantHostTestCases',
- test_name='cheets_GTS.7.0_r3.GtsAssistantHostTestCases',
+ tag='7.0_r4.ReviewPermissionHelperGts',
+ test_name='cheets_GTS.7.0_r4.ReviewPermissionHelperGts',
authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
- run_template=['run', 'commandAndExit', 'gts', '--module', 'GtsAssistantHostTestCases', '--ignore-business-logic-failure'],
+ run_template=['run', 'commandAndExit', 'gts', '--module', 'ReviewPermissionHelperGts', '--ignore-business-logic-failure'],
retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
- target_module='GtsAssistantHostTestCases',
+ target_module='ReviewPermissionHelperGts',
target_plan=None,
- uri='gs://chromeos-arc-images/cts/bundle/android-gts-7_r3-6045416.zip',
+ uri='gs://chromeos-arc-images/cts/bundle/android-gts-7_r4-6219464.zip',
timeout=720)
parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.7.0_r3.all.CtsCheckpointTestCases_-_GtsCallLogTestCases b/server/site_tests/cheets_GTS/control.7.0_r4.all.CtsCheckpointTestCases_-_GtsCallLogTestCases
similarity index 91%
rename from server/site_tests/cheets_GTS/control.7.0_r3.all.CtsCheckpointTestCases_-_GtsCallLogTestCases
rename to server/site_tests/cheets_GTS/control.7.0_r4.all.CtsCheckpointTestCases_-_GtsCallLogTestCases
index 466e9ca..583d92b 100644
--- a/server/site_tests/cheets_GTS/control.7.0_r3.all.CtsCheckpointTestCases_-_GtsCallLogTestCases
+++ b/server/site_tests/cheets_GTS/control.7.0_r4.all.CtsCheckpointTestCases_-_GtsCallLogTestCases
@@ -5,7 +5,7 @@
# This file has been automatically generated. Do not edit!
AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.7.0_r3.all.CtsCheckpointTestCases_-_GtsCallLogTestCases'
+NAME = 'cheets_GTS.7.0_r4.all.CtsCheckpointTestCases_-_GtsCallLogTestCases'
ATTRIBUTES = 'suite:arc-gts-qual'
DEPENDENCIES = 'arc'
JOB_RETRIES = 1
@@ -21,14 +21,14 @@
hosts=host_list,
iterations=1,
max_retry=9,
- tag='7.0_r3.all.CtsCheckpointTestCases_-_GtsCallLogTestCases',
- test_name='cheets_GTS.7.0_r3.all.CtsCheckpointTestCases_-_GtsCallLogTestCases',
+ tag='7.0_r4.all.CtsCheckpointTestCases_-_GtsCallLogTestCases',
+ test_name='cheets_GTS.7.0_r4.all.CtsCheckpointTestCases_-_GtsCallLogTestCases',
authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
run_template=['run', 'commandAndExit', 'gts', '--include-filter', 'CtsCheckpointTestCases', '--include-filter', 'GtsAccountsHostTestCases', '--include-filter', 'GtsAdminTestCases', '--include-filter', 'GtsAfwTestCases', '--include-filter', 'GtsAndroidAutoDeviceTestCases', '--include-filter', 'GtsAppBlacklistDeviceTestCases', '--include-filter', 'GtsAppTestCases', '--include-filter', 'GtsArtManagerHostTestCases', '--include-filter', 'GtsAssistIntentTestCases', '--include-filter', 'GtsAssistantHostTestCases', '--include-filter', 'GtsBackupHostTestCases', '--include-filter', 'GtsBackupTestCases', '--include-filter', 'GtsBootStatsTestCases', '--include-filter', 'GtsCallLogTestCases', '--ignore-business-logic-failure'],
retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
target_module='all.CtsCheckpointTestCases_-_GtsCallLogTestCases',
target_plan=None,
- uri='gs://chromeos-arc-images/cts/bundle/android-gts-7_r3-6045416.zip',
+ uri='gs://chromeos-arc-images/cts/bundle/android-gts-7_r4-6219464.zip',
timeout=86400)
parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.7.0_r4.all.GtsCameraTestCases_-_ReviewPermissionHelperGts b/server/site_tests/cheets_GTS/control.7.0_r4.all.GtsCameraTestCases_-_ReviewPermissionHelperGts
new file mode 100644
index 0000000..d701f9f
--- /dev/null
+++ b/server/site_tests/cheets_GTS/control.7.0_r4.all.GtsCameraTestCases_-_ReviewPermissionHelperGts
@@ -0,0 +1,36 @@
+# Copyright 2016 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file has been automatically generated. Do not edit!
+
+AUTHOR = 'ARC++ Team'
+NAME = 'cheets_GTS.7.0_r4.all.GtsCameraTestCases_-_ReviewPermissionHelperGts'
+ATTRIBUTES = 'suite:arc-gts-qual'
+DEPENDENCIES = 'arc'
+JOB_RETRIES = 1
+TEST_TYPE = 'server'
+TIME = 'LONG'
+MAX_RESULT_SIZE_KB = 307200
+DOC = 'Run module GtsCameraTestCases, GtsCastHostTestCases, GtsContactsAppDeviceTestCases, GtsContactsTest, GtsContentTestCases, GtsDeviceConfigTestCases, GtsDexModuleRegistrationTestCases, GtsDialerDeviceTestCases, GtsDozeDeviceTestCases, GtsEdiHostTestCases, GtsExoPlayerTestCases, GtsFeaturesTestCases, GtsGameDeviceHostTestCases, GtsGmscoreHostTestCases, GtsGraphicsHostTestCases, GtsHomeHostTestCases, GtsIncidentConfirmationTestCases, GtsIncidentManagerTestCases, GtsInstallPackagesWhitelistDeviceTestCases, GtsInstantAppsHostTestCases, GtsLargeApkHostTestCases, GtsLauncherHostTestCases, GtsLocationHostTestCases, GtsLocationTestCases, GtsMediaTestCases, GtsMemoryHostTestCases, GtsMemoryTestCases, GtsModuleMetadataTestCases, GtsNetStatsHostTestCases, GtsNetTestCases, GtsNetworkStackHostTestCases, GtsNetworkWatchlistTestCases, GtsNmgiarcTestCases, GtsNoPermissionTestCases, GtsNoPermissionTestCases25, GtsNotificationTestCases, GtsOemLockServiceTestCases, GtsOsTestCases, GtsPackageInstallTestCases, GtsPackageInstallerTapjackingTestCases, GtsPackageManagerHostTestCases, GtsPackageNameCertPairsDeviceTestCases, GtsPackageUninstallTestCases, GtsPartnerBookmarksTestCases, GtsPermissionControllerHostTestCases, GtsPermissionTestCases, GtsPlacementTestCases, GtsPlayAutoInstallTestCases, GtsPlayStoreHostTestCases, GtsPrintTestCases, GtsPrivacyTestCases, GtsPrivilegedUpdatePreparer, GtsPropertiesTestCases, GtsRegulationComplianceTestCases, GtsRlzTestCases, GtsSampleDeviceTestCases, GtsSampleDynamicConfigTestCases, GtsSampleHostTestCases, GtsScreenshotHostTestCases, GtsSearchHostTestCases, GtsSecurityHostTestCases, GtsSecurityTestCases, GtsSensorHostTestCases, GtsSettingsHostTestCases, GtsSettingsTestCases, GtsSetupWizardHostTestCases, GtsSetupWizardNoPermissionTestCases, GtsSimAppDialogTestCases, GtsSmartBatteryDeviceTestCases, GtsSsaidHostTestCases, GtsStagedInstallHostTestCases, GtsStatsdHostTestCases, GtsStorageTestCases, GtsSupervisionTestCases, GtsSuspendAppsPermissionTestCases, GtsSuspendAppsTestCases, GtsTelecomManagerTests, GtsTelephonyTestCases, GtsTestHarnessModeTestCases, GtsTetheringTestCases, GtsTvBugReportTestCases, GtsTvHostTestCases, GtsTvTestCases, GtsUnofficialApisUsageTestCases, GtsUsageStatsTestCases, GtsViewTestCases, GtsWebViewHostTestCases, GtsWebViewTestCases, GtsWellbeingHostTestCases, GtsWellbeingPermissionPolicyTestCases, GtsWellbeingTestCases, GtsYouTubeTestCases, ReviewPermissionHelperGts of the Android Google Test Suite (GTS) in the ARC++ container.'
+
+def run_TS(machine):
+ host_list = [hosts.create_host(machine)]
+ job.run_test(
+ 'cheets_GTS',
+ hosts=host_list,
+ iterations=1,
+ max_retry=9,
+ needs_push_media=True,
+ tag='7.0_r4.all.GtsCameraTestCases_-_ReviewPermissionHelperGts',
+ test_name='cheets_GTS.7.0_r4.all.GtsCameraTestCases_-_ReviewPermissionHelperGts',
+ authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
+ run_template=['run', 'commandAndExit', 'gts', '--include-filter', 'GtsCameraTestCases', '--include-filter', 'GtsCastHostTestCases', '--include-filter', 'GtsContactsAppDeviceTestCases', '--include-filter', 'GtsContactsTest', '--include-filter', 'GtsContentTestCases', '--include-filter', 'GtsDeviceConfigTestCases', '--include-filter', 'GtsDexModuleRegistrationTestCases', '--include-filter', 'GtsDialerDeviceTestCases', '--include-filter', 'GtsDozeDeviceTestCases', '--include-filter', 'GtsEdiHostTestCases', '--include-filter', 'GtsExoPlayerTestCases', '--include-filter', 'GtsFeaturesTestCases', '--include-filter', 'GtsGameDeviceHostTestCases', '--include-filter', 'GtsGmscoreHostTestCases', '--include-filter', 'GtsGraphicsHostTestCases', '--include-filter', 'GtsHomeHostTestCases', '--include-filter', 'GtsIncidentConfirmationTestCases', '--include-filter', 'GtsIncidentManagerTestCases', '--include-filter', 'GtsInstallPackagesWhitelistDeviceTestCases', '--include-filter', 'GtsInstantAppsHostTestCases', '--include-filter', 'GtsLargeApkHostTestCases', '--include-filter', 'GtsLauncherHostTestCases', '--include-filter', 'GtsLocationHostTestCases', '--include-filter', 'GtsLocationTestCases', '--include-filter', 'GtsMediaTestCases', '--include-filter', 'GtsMemoryHostTestCases', '--include-filter', 'GtsMemoryTestCases', '--include-filter', 'GtsModuleMetadataTestCases', '--include-filter', 'GtsNetStatsHostTestCases', '--include-filter', 'GtsNetTestCases', '--include-filter', 'GtsNetworkStackHostTestCases', '--include-filter', 'GtsNetworkWatchlistTestCases', '--include-filter', 'GtsNmgiarcTestCases', '--include-filter', 'GtsNoPermissionTestCases', '--include-filter', 'GtsNoPermissionTestCases25', '--include-filter', 'GtsNotificationTestCases', '--include-filter', 'GtsOemLockServiceTestCases', '--include-filter', 'GtsOsTestCases', '--include-filter', 'GtsPackageInstallTestCases', '--include-filter', 'GtsPackageInstallerTapjackingTestCases', '--include-filter', 'GtsPackageManagerHostTestCases', '--include-filter', 'GtsPackageNameCertPairsDeviceTestCases', '--include-filter', 'GtsPackageUninstallTestCases', '--include-filter', 'GtsPartnerBookmarksTestCases', '--include-filter', 'GtsPermissionControllerHostTestCases', '--include-filter', 'GtsPermissionTestCases', '--include-filter', 'GtsPlacementTestCases', '--include-filter', 'GtsPlayAutoInstallTestCases', '--include-filter', 'GtsPlayStoreHostTestCases', '--include-filter', 'GtsPrintTestCases', '--include-filter', 'GtsPrivacyTestCases', '--include-filter', 'GtsPrivilegedUpdatePreparer', '--include-filter', 'GtsPropertiesTestCases', '--include-filter', 'GtsRegulationComplianceTestCases', '--include-filter', 'GtsRlzTestCases', '--include-filter', 'GtsSampleDeviceTestCases', '--include-filter', 'GtsSampleDynamicConfigTestCases', '--include-filter', 'GtsSampleHostTestCases', '--include-filter', 'GtsScreenshotHostTestCases', '--include-filter', 'GtsSearchHostTestCases', '--include-filter', 'GtsSecurityHostTestCases', '--include-filter', 'GtsSecurityTestCases', '--include-filter', 'GtsSensorHostTestCases', '--include-filter', 'GtsSettingsHostTestCases', '--include-filter', 'GtsSettingsTestCases', '--include-filter', 'GtsSetupWizardHostTestCases', '--include-filter', 'GtsSetupWizardNoPermissionTestCases', '--include-filter', 'GtsSimAppDialogTestCases', '--include-filter', 'GtsSmartBatteryDeviceTestCases', '--include-filter', 'GtsSsaidHostTestCases', '--include-filter', 'GtsStagedInstallHostTestCases', '--include-filter', 'GtsStatsdHostTestCases', '--include-filter', 'GtsStorageTestCases', '--include-filter', 'GtsSupervisionTestCases', '--include-filter', 'GtsSuspendAppsPermissionTestCases', '--include-filter', 'GtsSuspendAppsTestCases', '--include-filter', 'GtsTelecomManagerTests', '--include-filter', 'GtsTelephonyTestCases', '--include-filter', 'GtsTestHarnessModeTestCases', '--include-filter', 'GtsTetheringTestCases', '--include-filter', 'GtsTvBugReportTestCases', '--include-filter', 'GtsTvHostTestCases', '--include-filter', 'GtsTvTestCases', '--include-filter', 'GtsUnofficialApisUsageTestCases', '--include-filter', 'GtsUsageStatsTestCases', '--include-filter', 'GtsViewTestCases', '--include-filter', 'GtsWebViewHostTestCases', '--include-filter', 'GtsWebViewTestCases', '--include-filter', 'GtsWellbeingHostTestCases', '--include-filter', 'GtsWellbeingPermissionPolicyTestCases', '--include-filter', 'GtsWellbeingTestCases', '--include-filter', 'GtsYouTubeTestCases', '--include-filter', 'ReviewPermissionHelperGts', '--ignore-business-logic-failure'],
+ retry_template=['run', 'commandAndExit', 'retry', '--retry', '{session_id}'],
+ target_module='all.GtsCameraTestCases_-_ReviewPermissionHelperGts',
+ target_plan=None,
+ uri='gs://chromeos-arc-images/cts/bundle/android-gts-7_r4-6219464.zip',
+ prerequisites=['bluetooth'],
+ timeout=86400)
+
+parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.7.0_r3.tradefed-run-collect-tests-only-internal b/server/site_tests/cheets_GTS/control.7.0_r4.tradefed-run-collect-tests-only-internal
similarity index 84%
rename from server/site_tests/cheets_GTS/control.7.0_r3.tradefed-run-collect-tests-only-internal
rename to server/site_tests/cheets_GTS/control.7.0_r4.tradefed-run-collect-tests-only-internal
index c7b2797..e4047ab 100644
--- a/server/site_tests/cheets_GTS/control.7.0_r3.tradefed-run-collect-tests-only-internal
+++ b/server/site_tests/cheets_GTS/control.7.0_r4.tradefed-run-collect-tests-only-internal
@@ -5,7 +5,7 @@
# This file has been automatically generated. Do not edit!
AUTHOR = 'ARC++ Team'
-NAME = 'cheets_GTS.7.0_r3.tradefed-run-collect-tests-only-internal'
+NAME = 'cheets_GTS.7.0_r4.tradefed-run-collect-tests-only-internal'
ATTRIBUTES = 'suite:arc-gts, suite:arc-gts-qual'
DEPENDENCIES = 'arc'
JOB_RETRIES = 0
@@ -21,14 +21,14 @@
hosts=host_list,
iterations=1,
max_retry=0,
- tag='7.0_r3.tradefed-run-collect-tests-only-internal',
- test_name='cheets_GTS.7.0_r3.tradefed-run-collect-tests-only-internal',
+ tag='7.0_r4.tradefed-run-collect-tests-only-internal',
+ test_name='cheets_GTS.7.0_r4.tradefed-run-collect-tests-only-internal',
authkey='gs://chromeos-arc-images/cts/bundle/gts-arc.json',
run_template=['run', 'commandAndExit', 'collect-tests-only', '--disable-reboot', '--module-arg', 'GtsYouTubeTestCases:skip-media-download:true'],
retry_template=None,
target_module=None,
target_plan=None,
- uri='gs://chromeos-arc-images/cts/bundle/android-gts-7_r3-6045416.zip',
+ uri='gs://chromeos-arc-images/cts/bundle/android-gts-7_r4-6219464.zip',
timeout=1800)
parallel_simple(run_TS, machines)
diff --git a/server/site_tests/cheets_GTS/control.GtsYouTubeTestCases b/server/site_tests/cheets_GTS/control.GtsYouTubeTestCases
index d9c91bb..0f38df3 100644
--- a/server/site_tests/cheets_GTS/control.GtsYouTubeTestCases
+++ b/server/site_tests/cheets_GTS/control.GtsYouTubeTestCases
@@ -20,7 +20,7 @@
'cheets_GTS',
hosts=host_list,
iterations=1,
- max_retry=2,
+ max_retry=5,
needs_push_media=True,
tag='GtsYouTubeTestCases',
test_name='cheets_GTS.GtsYouTubeTestCases',
diff --git a/server/site_tests/cheets_GTS/control.tradefed-run-test b/server/site_tests/cheets_GTS/control.tradefed-run-test
index 2c05b8c..e541f44 100644
--- a/server/site_tests/cheets_GTS/control.tradefed-run-test
+++ b/server/site_tests/cheets_GTS/control.tradefed-run-test
@@ -29,7 +29,7 @@
# Define the variables that we are going to use and set sensible defaults.
gts_module = ''
gts_retry = 5
-gts_revision = '7_r3-6045416' # TODO(ihf): Set this default value from generator.
+gts_revision = '7_r4-6219464' # TODO(ihf): Set this default value from generator.
gts_test = ''
gts_timeout = 600
diff --git a/server/site_tests/firmware_Cr50DeferredECReset/firmware_Cr50DeferredECReset.py b/server/site_tests/firmware_Cr50DeferredECReset/firmware_Cr50DeferredECReset.py
index 60606d8..6a0eb70 100644
--- a/server/site_tests/firmware_Cr50DeferredECReset/firmware_Cr50DeferredECReset.py
+++ b/server/site_tests/firmware_Cr50DeferredECReset/firmware_Cr50DeferredECReset.py
@@ -178,9 +178,11 @@
'' if expect_response else 'not ')
rv = self.ec.send_command_get_output('help', ['.*>'])[0].strip()
except error.TestFail as e:
- logging.info(str(e))
- if 'Timeout waiting for response' in str(e):
- if not expect_response:
+ msg = str(e)
+ logging.info(msg)
+ if not expect_response:
+ if ('Timeout waiting for response' in msg or
+ 'No data was sent from the pty' in msg):
return
raise e
else:
diff --git a/server/site_tests/firmware_Cr50DeviceState/firmware_Cr50DeviceState.py b/server/site_tests/firmware_Cr50DeviceState/firmware_Cr50DeviceState.py
index c3d7ed2..86c4421 100644
--- a/server/site_tests/firmware_Cr50DeviceState/firmware_Cr50DeviceState.py
+++ b/server/site_tests/firmware_Cr50DeviceState/firmware_Cr50DeviceState.py
@@ -103,6 +103,13 @@
INCREASE = '+'
DS_RESUME = 'DS'
+ MEM_SLEEP_PATH = '/sys/power/mem_sleep'
+ MEM_SLEEP_S0IX = 'echo %s > %s ; sleep 1' % ('s2idle', MEM_SLEEP_PATH)
+ MEM_SLEEP_S3 = 'echo %s > %s ; sleep 1' % ('deep', MEM_SLEEP_PATH)
+ POWER_STATE_PATH = '/sys/power/state'
+ POWER_STATE_S0IX = 'echo %s > %s &' % ('freeze', POWER_STATE_PATH)
+ POWER_STATE_S3 = 'echo %s > %s &' % ('mem', POWER_STATE_PATH)
+
def initialize(self, host, cmdline_args, full_args):
super(firmware_Cr50DeviceState, self).initialize(host, cmdline_args,
@@ -111,6 +118,23 @@
if not self.check_ec_capability():
raise error.TestNAError("Nothing needs to be tested on this device")
+ self.generate_suspend_commands()
+
+
+ def generate_suspend_commands(self):
+ """Generate the S3 and S0ix suspend commands"""
+ s0ix_cmds = []
+ s3_cmds = []
+ if self.host.path_exists(self.MEM_SLEEP_PATH):
+ s0ix_cmds.append(self.MEM_SLEEP_S0IX)
+ s3_cmds.append(self.MEM_SLEEP_S3)
+ s0ix_cmds.append(self.POWER_STATE_S0IX)
+ s3_cmds.append(self.POWER_STATE_S3)
+ self._s0ix_cmds = '; '.join(s0ix_cmds)
+ self._s3_cmds = '; '.join(s3_cmds)
+ logging.info('S0ix cmd: %r', self._s0ix_cmds)
+ logging.info('S3 cmd: %r', self._s3_cmds)
+
def log_sleep_debug_information(self):
"""Log some information used for debugging sleep issues"""
@@ -335,7 +359,9 @@
def ap_is_on_after_power_button_press(self):
"""Returns True if the AP is on after pressing the power button"""
- self.servo.power_short_press()
+ # TODO (mruthven): use self.servo.power_short_press() once kukui power
+ # button issues are figured out.
+ self.servo.power_key(1)
# Give the AP some time to turn on
time.sleep(self.cr50.SHORT_WAIT)
return self.cr50.ap_is_on()
@@ -358,9 +384,9 @@
self.trigger_s0()
else:
if state == 'S0ix':
- full_command = 'echo freeze > /sys/power/state &'
+ full_command = self._s0ix_cmds
elif state == 'S3':
- full_command = 'echo mem > /sys/power/state &'
+ full_command = self._s3_cmds
elif state == 'G3':
full_command = 'poweroff'
self.faft_client.system.run_shell_command(full_command)
diff --git a/server/site_tests/firmware_Cr50ECReset/firmware_Cr50ECReset.py b/server/site_tests/firmware_Cr50ECReset/firmware_Cr50ECReset.py
index b770a8f..98f22ec 100644
--- a/server/site_tests/firmware_Cr50ECReset/firmware_Cr50ECReset.py
+++ b/server/site_tests/firmware_Cr50ECReset/firmware_Cr50ECReset.py
@@ -49,9 +49,13 @@
time.sleep(self.EC_SETTLE_TIME)
try:
self.ec.send_command_get_output('time', ['.*>'])
- except error.TestFail, e:
- logging.info(e)
- if 'Timeout waiting for response' in str(e):
+ except error.TestFail as e:
+ # TODO(b/149760070): To detect if EC is responsive,
+ # send_command_get_output() should define and raise a Timeout error.
+ msg = str(e)
+ logging.info(msg)
+ if ('Timeout waiting for response' in msg or
+ 'No data was sent from the pty' in msg):
return False
raise
else:
diff --git a/server/site_tests/firmware_Cr50OpenWhileAPOff/firmware_Cr50OpenWhileAPOff.py b/server/site_tests/firmware_Cr50OpenWhileAPOff/firmware_Cr50OpenWhileAPOff.py
index 2c5afe6..0d8842f 100644
--- a/server/site_tests/firmware_Cr50OpenWhileAPOff/firmware_Cr50OpenWhileAPOff.py
+++ b/server/site_tests/firmware_Cr50OpenWhileAPOff/firmware_Cr50OpenWhileAPOff.py
@@ -114,8 +114,10 @@
# Verify the cr50 console responds to commands.
try:
logging.info(self.cr50.get_ccdstate())
- except error.TestFail, e:
- if 'Timeout waiting for response' in e.message:
+ except error.TestFail as e:
+ msg = str(e)
+ if ('Timeout waiting for response' in msg or
+ 'No data was sent from the pty' in msg):
raise error.TestFail('Could not restore Cr50 console')
raise
diff --git a/server/site_tests/firmware_Cr50RMAOpen/firmware_Cr50RMAOpen.py b/server/site_tests/firmware_Cr50RMAOpen/firmware_Cr50RMAOpen.py
index 1c24c80..11d20fa 100644
--- a/server/site_tests/firmware_Cr50RMAOpen/firmware_Cr50RMAOpen.py
+++ b/server/site_tests/firmware_Cr50RMAOpen/firmware_Cr50RMAOpen.py
@@ -65,7 +65,7 @@
if not self.cr50.has_command('rma_auth'):
raise error.TestNAError('Cannot test on Cr50 without RMA support')
- if not self.cr50.using_servo_v4():
+ if not self.cr50._servo.dts_mode_is_valid():
raise error.TestNAError('This messes with ccd settings. Use flex '
'cable to run the test.')
diff --git a/server/site_tests/network_WiFi_AttenuatedPerf/control.ht40_ch001 b/server/site_tests/network_WiFi_AttenuatedPerf/control.ht40_ch001
index 1663e6e..95e60d4 100644
--- a/server/site_tests/network_WiFi_AttenuatedPerf/control.ht40_ch001
+++ b/server/site_tests/network_WiFi_AttenuatedPerf/control.ht40_ch001
@@ -25,11 +25,13 @@
channel=1,
n_capabilities=[hostap_config.HostapConfig.N_CAPABILITY_HT40_PLUS],
mode=hostap_config.HostapConfig.MODE_11N_PURE)
+ attenuation_increment = 4
+ final_attenuation = 100
job.run_test('network_WiFi_AttenuatedPerf',
tag=NAME.split('.')[1],
host=host,
raw_cmdline_args=args,
- additional_params=ap_config)
+ additional_params=(ap_config, attenuation_increment, final_attenuation))
parallel_simple(run, machines)
diff --git a/server/site_tests/network_WiFi_AttenuatedPerf/control.ht40_ch006 b/server/site_tests/network_WiFi_AttenuatedPerf/control.ht40_ch006
index b694b9c..6001511 100644
--- a/server/site_tests/network_WiFi_AttenuatedPerf/control.ht40_ch006
+++ b/server/site_tests/network_WiFi_AttenuatedPerf/control.ht40_ch006
@@ -25,11 +25,13 @@
channel=6,
n_capabilities=[hostap_config.HostapConfig.N_CAPABILITY_HT40_PLUS],
mode=hostap_config.HostapConfig.MODE_11N_PURE)
+ attenuation_increment = 4
+ final_attenuation = 100
job.run_test('network_WiFi_AttenuatedPerf',
tag=NAME.split('.')[1],
host=host,
raw_cmdline_args=args,
- additional_params=ap_config)
+ additional_params=(ap_config, attenuation_increment, final_attenuation))
parallel_simple(run, machines)
diff --git a/server/site_tests/network_WiFi_AttenuatedPerf/control.ht40_ch011 b/server/site_tests/network_WiFi_AttenuatedPerf/control.ht40_ch011
index 660e321..8e673d9 100644
--- a/server/site_tests/network_WiFi_AttenuatedPerf/control.ht40_ch011
+++ b/server/site_tests/network_WiFi_AttenuatedPerf/control.ht40_ch011
@@ -25,11 +25,13 @@
channel=11,
n_capabilities=[hostap_config.HostapConfig.N_CAPABILITY_HT40_MINUS],
mode=hostap_config.HostapConfig.MODE_11N_PURE)
+ attenuation_increment = 4
+ final_attenuation = 100
job.run_test('network_WiFi_AttenuatedPerf',
tag=NAME.split('.')[1],
host=host,
raw_cmdline_args=args,
- additional_params=ap_config)
+ additional_params=(ap_config, attenuation_increment, final_attenuation))
parallel_simple(run, machines)
diff --git a/server/site_tests/network_WiFi_AttenuatedPerf/control.ht40_ch044 b/server/site_tests/network_WiFi_AttenuatedPerf/control.ht40_ch044
index 94e80da..243834a 100644
--- a/server/site_tests/network_WiFi_AttenuatedPerf/control.ht40_ch044
+++ b/server/site_tests/network_WiFi_AttenuatedPerf/control.ht40_ch044
@@ -25,11 +25,13 @@
channel=44,
n_capabilities=[hostap_config.HostapConfig.N_CAPABILITY_HT40_PLUS],
mode=hostap_config.HostapConfig.MODE_11N_PURE)
+ attenuation_increment = 4
+ final_attenuation = 100
job.run_test('network_WiFi_AttenuatedPerf',
tag=NAME.split('.')[1],
host=host,
raw_cmdline_args=args,
- additional_params=ap_config)
+ additional_params=(ap_config, attenuation_increment, final_attenuation))
parallel_simple(run, machines)
diff --git a/server/site_tests/network_WiFi_AttenuatedPerf/control.ht40_ch153 b/server/site_tests/network_WiFi_AttenuatedPerf/control.ht40_ch153
index 4351117..bd389fc 100644
--- a/server/site_tests/network_WiFi_AttenuatedPerf/control.ht40_ch153
+++ b/server/site_tests/network_WiFi_AttenuatedPerf/control.ht40_ch153
@@ -25,11 +25,13 @@
channel=153,
n_capabilities=[hostap_config.HostapConfig.N_CAPABILITY_HT40_MINUS],
mode=hostap_config.HostapConfig.MODE_11N_PURE)
+ attenuation_increment = 4
+ final_attenuation = 100
job.run_test('network_WiFi_AttenuatedPerf',
tag=NAME.split('.')[1],
host=host,
raw_cmdline_args=args,
- additional_params=ap_config)
+ additional_params=(ap_config, attenuation_increment, final_attenuation))
parallel_simple(run, machines)
diff --git a/server/site_tests/network_WiFi_AttenuatedPerf/control.vht40_ch036 b/server/site_tests/network_WiFi_AttenuatedPerf/control.vht40_ch036
new file mode 100644
index 0000000..6cdb8e5
--- /dev/null
+++ b/server/site_tests/network_WiFi_AttenuatedPerf/control.vht40_ch036
@@ -0,0 +1,38 @@
+# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'wiley, pstew, quiche'
+NAME = 'network_WiFi_AttenuatedPerf.vht40_ch036'
+ATTRIBUTES = "suite:wifi_atten_perf"
+TIME = 'SHORT'
+TEST_TYPE = 'Server'
+DEPENDENCIES = 'wificell'
+
+DOC = """
+This test uses netperf to measure the maximal receiving and transmitting
+throughput on a DUT with an open VHT40 802.11ac network across multiple
+attenuation levels.
+"""
+
+
+from autotest_lib.server.cros.network import hostap_config
+
+
+def run(machine):
+ host = hosts.create_host(machine)
+ ap_config = hostap_config.HostapConfig(
+ channel=36,
+ mode=hostap_config.HostapConfig.MODE_11AC_PURE,
+ n_capabilities=[hostap_config.HostapConfig.N_CAPABILITY_HT40],
+ vht_channel_width=hostap_config.HostapConfig.VHT_CHANNEL_WIDTH_40)
+ attenuation_increment = 6
+ final_attenuation = 100
+ job.run_test('network_WiFi_AttenuatedPerf',
+ tag=NAME.split('.')[1],
+ host=host,
+ raw_cmdline_args=args,
+ additional_params=(ap_config, attenuation_increment, final_attenuation))
+
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/network_WiFi_AttenuatedPerf/control.vht40_ch060 b/server/site_tests/network_WiFi_AttenuatedPerf/control.vht40_ch060
new file mode 100644
index 0000000..f7e3ec4
--- /dev/null
+++ b/server/site_tests/network_WiFi_AttenuatedPerf/control.vht40_ch060
@@ -0,0 +1,38 @@
+# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'wiley, pstew, quiche'
+NAME = 'network_WiFi_AttenuatedPerf.vht40_ch060'
+ATTRIBUTES = "suite:wifi_atten_perf"
+TIME = 'SHORT'
+TEST_TYPE = 'Server'
+DEPENDENCIES = 'wificell'
+
+DOC = """
+This test uses netperf to measure the maximal receiving and transmitting
+throughput on a DUT with an open VHT40 802.11ac network across multiple
+attenuation levels.
+"""
+
+
+from autotest_lib.server.cros.network import hostap_config
+
+
+def run(machine):
+ host = hosts.create_host(machine)
+ ap_config = hostap_config.HostapConfig(
+ channel=60,
+ mode=hostap_config.HostapConfig.MODE_11AC_PURE,
+ n_capabilities=[hostap_config.HostapConfig.N_CAPABILITY_HT40],
+ vht_channel_width=hostap_config.HostapConfig.VHT_CHANNEL_WIDTH_40)
+ attenuation_increment = 6
+ final_attenuation = 100
+ job.run_test('network_WiFi_AttenuatedPerf',
+ tag=NAME.split('.')[1],
+ host=host,
+ raw_cmdline_args=args,
+ additional_params=(ap_config, attenuation_increment, final_attenuation))
+
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/network_WiFi_AttenuatedPerf/control.vht40_ch149 b/server/site_tests/network_WiFi_AttenuatedPerf/control.vht40_ch149
new file mode 100644
index 0000000..ee874d2
--- /dev/null
+++ b/server/site_tests/network_WiFi_AttenuatedPerf/control.vht40_ch149
@@ -0,0 +1,38 @@
+# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'wiley, pstew, quiche'
+NAME = 'network_WiFi_AttenuatedPerf.vht40_ch149'
+ATTRIBUTES = "suite:wifi_atten_perf"
+TIME = 'SHORT'
+TEST_TYPE = 'Server'
+DEPENDENCIES = 'wificell'
+
+DOC = """
+This test uses netperf to measure the maximal receiving and transmitting
+throughput on a DUT with an open VHT40 802.11ac network across multiple
+attenuation levels.
+"""
+
+
+from autotest_lib.server.cros.network import hostap_config
+
+
+def run(machine):
+ host = hosts.create_host(machine)
+ ap_config = hostap_config.HostapConfig(
+ channel=149,
+ mode=hostap_config.HostapConfig.MODE_11AC_PURE,
+ n_capabilities=[hostap_config.HostapConfig.N_CAPABILITY_HT40],
+ vht_channel_width=hostap_config.HostapConfig.VHT_CHANNEL_WIDTH_40)
+ attenuation_increment = 6
+ final_attenuation = 100
+ job.run_test('network_WiFi_AttenuatedPerf',
+ tag=NAME.split('.')[1],
+ host=host,
+ raw_cmdline_args=args,
+ additional_params=(ap_config, attenuation_increment, final_attenuation))
+
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/network_WiFi_AttenuatedPerf/control.vht40_ch157 b/server/site_tests/network_WiFi_AttenuatedPerf/control.vht40_ch157
new file mode 100644
index 0000000..e45afcd
--- /dev/null
+++ b/server/site_tests/network_WiFi_AttenuatedPerf/control.vht40_ch157
@@ -0,0 +1,38 @@
+# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'wiley, pstew, quiche'
+NAME = 'network_WiFi_AttenuatedPerf.vht40_ch157'
+ATTRIBUTES = "suite:wifi_atten_perf"
+TIME = 'SHORT'
+TEST_TYPE = 'Server'
+DEPENDENCIES = 'wificell'
+
+DOC = """
+This test uses netperf to measure the maximal receiving and transmitting
+throughput on a DUT with an open VHT40 802.11ac network across multiple
+attenuation levels.
+"""
+
+
+from autotest_lib.server.cros.network import hostap_config
+
+
+def run(machine):
+ host = hosts.create_host(machine)
+ ap_config = hostap_config.HostapConfig(
+ channel=157,
+ mode=hostap_config.HostapConfig.MODE_11AC_PURE,
+ n_capabilities=[hostap_config.HostapConfig.N_CAPABILITY_HT40],
+ vht_channel_width=hostap_config.HostapConfig.VHT_CHANNEL_WIDTH_40)
+ attenuation_increment = 6
+ final_attenuation = 100
+ job.run_test('network_WiFi_AttenuatedPerf',
+ tag=NAME.split('.')[1],
+ host=host,
+ raw_cmdline_args=args,
+ additional_params=(ap_config, attenuation_increment, final_attenuation))
+
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/network_WiFi_AttenuatedPerf/control.vht80_ch042 b/server/site_tests/network_WiFi_AttenuatedPerf/control.vht80_ch042
new file mode 100644
index 0000000..42ff259
--- /dev/null
+++ b/server/site_tests/network_WiFi_AttenuatedPerf/control.vht80_ch042
@@ -0,0 +1,42 @@
+# Copyright 2014 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'arowa'
+NAME = 'network_WiFi_AttenuatedPerf.vht80_ch042'
+ATTRIBUTES = "suite:wifi_atten_perf"
+TIME = 'SHORT'
+TEST_TYPE = 'Server'
+DEPENDENCIES = 'wificell'
+
+DOC = """
+This test uses netperf to measure the maximal receiving and transmitting
+throughput on a DUT with an open VHT80 802.11ac network across multiple
+attenuation levels.
+"""
+
+
+from autotest_lib.server.cros.network import hostap_config
+
+
+def run(machine):
+ host = hosts.create_host(machine)
+ n_caps = [hostap_config.HostapConfig.N_CAPABILITY_HT40_PLUS]
+ ac_caps = [hostap_config.HostapConfig.AC_CAPABILITY_SHORT_GI_80]
+ ac_mode = hostap_config.HostapConfig.MODE_11AC_PURE
+ channel_width_80_mhz = hostap_config.HostapConfig.VHT_CHANNEL_WIDTH_80
+ ap_config = hostap_config.HostapConfig(
+ channel=44,
+ mode=ac_mode,
+ n_capabilities=n_caps,
+ vht_channel_width=channel_width_80_mhz,
+ vht_center_channel=42,
+ ac_capabilities=ac_caps)
+ attenuation_increment = 6
+ final_attenuation = 100
+ job.run_test('network_WiFi_AttenuatedPerf', tag=NAME.split('.')[1],
+ host=host, raw_cmdline_args=args,
+ additional_params=(ap_config, attenuation_increment, final_attenuation))
+
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/network_WiFi_AttenuatedPerf/control.vht80_ch155 b/server/site_tests/network_WiFi_AttenuatedPerf/control.vht80_ch155
new file mode 100644
index 0000000..c3c465a
--- /dev/null
+++ b/server/site_tests/network_WiFi_AttenuatedPerf/control.vht80_ch155
@@ -0,0 +1,42 @@
+# Copyright 2014 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+AUTHOR = 'arowa'
+NAME = 'network_WiFi_AttenuatedPerf.vht80_ch155'
+ATTRIBUTES = "suite:wifi_atten_perf"
+TIME = 'SHORT'
+TEST_TYPE = 'Server'
+DEPENDENCIES = 'wificell'
+
+DOC = """
+This test uses netperf to measure the maximal receiving and transmitting
+throughput on a DUT with an open VHT80 802.11ac network across multiple
+attenuation levels.
+"""
+
+
+from autotest_lib.server.cros.network import hostap_config
+
+
+def run(machine):
+ host = hosts.create_host(machine)
+ n_caps = [hostap_config.HostapConfig.N_CAPABILITY_HT40_PLUS]
+ ac_caps = [hostap_config.HostapConfig.AC_CAPABILITY_SHORT_GI_80]
+ ac_mode = hostap_config.HostapConfig.MODE_11AC_PURE
+ channel_width_80_mhz = hostap_config.HostapConfig.VHT_CHANNEL_WIDTH_80
+ ap_config = hostap_config.HostapConfig(
+ channel=157,
+ mode=ac_mode,
+ n_capabilities=n_caps,
+ vht_channel_width=channel_width_80_mhz,
+ vht_center_channel=155,
+ ac_capabilities=ac_caps)
+ attenuation_increment = 6
+ final_attenuation = 100
+ job.run_test('network_WiFi_AttenuatedPerf', tag=NAME.split('.')[1],
+ host=host, raw_cmdline_args=args,
+ additional_params=(ap_config, attenuation_increment, final_attenuation))
+
+
+parallel_simple(run, machines)
diff --git a/server/site_tests/network_WiFi_AttenuatedPerf/network_WiFi_AttenuatedPerf.py b/server/site_tests/network_WiFi_AttenuatedPerf/network_WiFi_AttenuatedPerf.py
index 34e14db..1bfbd6f 100644
--- a/server/site_tests/network_WiFi_AttenuatedPerf/network_WiFi_AttenuatedPerf.py
+++ b/server/site_tests/network_WiFi_AttenuatedPerf/network_WiFi_AttenuatedPerf.py
@@ -37,9 +37,6 @@
netperf_runner.NetperfConfig.TEST_TYPE_UDP_MAERTS),
]
- ATTENUATION_STEP = 4
- FINAL_ATTENUATION = 100
-
TSV_OUTPUT_DIR = 'tsvs'
DataPoint = collections.namedtuple('DataPoint',
@@ -54,8 +51,10 @@
@param additional_params list of dicts describing router configs.
"""
- self._ap_config = additional_params
+ self._ap_config = additional_params[0]
self.series_note = None
+ self._attenuation_increment = additional_params[1]
+ self._final_attenuation = additional_params[2]
if self.CMDLINE_SERIES_NOTE in commandline_args:
self.series_note = commandline_args[self.CMDLINE_SERIES_NOTE]
@@ -81,8 +80,8 @@
ignore_failures=True)
session.warmup_stations()
start_atten = self.context.attenuator.get_minimal_total_attenuation()
- for atten in range(start_atten, self.FINAL_ATTENUATION,
- self.ATTENUATION_STEP):
+ for atten in range(start_atten, self._final_attenuation,
+ self._attenuation_increment):
atten_tag = 'atten%03d' % atten
self.context.attenuator.set_total_attenuation(
atten, self._ap_config.frequency)
@@ -147,7 +146,8 @@
if max_atten is None:
raise error.TestFail('Did not succeed at any atten level')
- logging.info('Reached attenuation of: %d dB (signal %d)' % max_atten)
+ logging.info('Reached attenuation of: %d dB (signal %d)',
+ max_atten[0], max_atten[1])
self.write_perf_keyval({'ch%03d_max_atten' % self._ap_config.channel:
max_atten[0]})
self.write_perf_keyval({'ch%03d_min_signal' % self._ap_config.channel:
diff --git a/server/site_tests/platform_ExternalUSBBootStress/control.50 b/server/site_tests/platform_ExternalUSBBootStress/control.50
index 1360560..673ea7a 100644
--- a/server/site_tests/platform_ExternalUSBBootStress/control.50
+++ b/server/site_tests/platform_ExternalUSBBootStress/control.50
@@ -14,7 +14,7 @@
TEST_TYPE = "server"
# Stop running the test due to crbug.com/654478
# ATTRIBUTES = "suite:usb_detect_stress"
-# DEPENDENCIES = "servo, usb_detect"
+# DEPENDENCIES = "servo"
DOC = """
diff --git a/server/site_tests/platform_ExternalUsbPeripherals/control.crashes b/server/site_tests/platform_ExternalUsbPeripherals/control.crashes
index 75fdcd9..1a7874f 100644
--- a/server/site_tests/platform_ExternalUsbPeripherals/control.crashes
+++ b/server/site_tests/platform_ExternalUsbPeripherals/control.crashes
@@ -13,7 +13,7 @@
TEST_CLASS = "platform"
TEST_TYPE = "server"
#ATTRIBUTES = "suite:usb_detect"
-DEPENDENCIES = "servo, usb_detect"
+DEPENDENCIES = "servo"
DOC = """
This test uses servo to connect/disconnect servo USB hub before and
diff --git a/server/site_tests/platform_ExternalUsbPeripherals/control.detect b/server/site_tests/platform_ExternalUsbPeripherals/control.detect
index 28382ed..1815e9d 100644
--- a/server/site_tests/platform_ExternalUsbPeripherals/control.detect
+++ b/server/site_tests/platform_ExternalUsbPeripherals/control.detect
@@ -13,7 +13,7 @@
TEST_CLASS = "platform"
TEST_TYPE = "server"
ATTRIBUTES = "suite:usb_detect_stress"
-DEPENDENCIES = "servo, usb_detect"
+DEPENDENCIES = "servo"
DOC = """
This test uses servo to connect/disconnect servo USB hub before and
diff --git a/server/site_tests/platform_ExternalUsbPeripherals/control.detect.crash_check_short b/server/site_tests/platform_ExternalUsbPeripherals/control.detect.crash_check_short
index 3ab886a..6a5c0ce 100644
--- a/server/site_tests/platform_ExternalUsbPeripherals/control.detect.crash_check_short
+++ b/server/site_tests/platform_ExternalUsbPeripherals/control.detect.crash_check_short
@@ -13,7 +13,7 @@
TEST_CLASS = "platform"
TEST_TYPE = "server"
ATTRIBUTES = "suite:usb_detect"
-DEPENDENCIES = "servo, usb_detect"
+DEPENDENCIES = "servo"
DOC = """
This test uses servo to connect/disconnect servo USB hub before and
diff --git a/server/site_tests/platform_ExternalUsbPeripherals/control.detect.lid_close_open_short b/server/site_tests/platform_ExternalUsbPeripherals/control.detect.lid_close_open_short
index 422b1cf..b9f427c 100644
--- a/server/site_tests/platform_ExternalUsbPeripherals/control.detect.lid_close_open_short
+++ b/server/site_tests/platform_ExternalUsbPeripherals/control.detect.lid_close_open_short
@@ -13,7 +13,7 @@
TEST_CLASS = "platform"
TEST_TYPE = "server"
ATTRIBUTES = "suite:usb_detect"
-DEPENDENCIES = "servo, usb_detect, use_lid"
+DEPENDENCIES = "servo, use_lid"
JOB_RETRIES = 1
DOC = """
This test uses servo to connect/disconnect servo USB hub before and
diff --git a/server/site_tests/platform_ExternalUsbPeripherals/control.detect.login_unplug_closelid_openlid_plug b/server/site_tests/platform_ExternalUsbPeripherals/control.detect.login_unplug_closelid_openlid_plug
index b00068b..e8666d7 100644
--- a/server/site_tests/platform_ExternalUsbPeripherals/control.detect.login_unplug_closelid_openlid_plug
+++ b/server/site_tests/platform_ExternalUsbPeripherals/control.detect.login_unplug_closelid_openlid_plug
@@ -13,7 +13,7 @@
TEST_CLASS = "platform"
TEST_TYPE = "server"
ATTRIBUTES = "suite:usb_detect"
-DEPENDENCIES = "servo, usb_detect, use_lid"
+DEPENDENCIES = "servo, use_lid"
JOB_RETRIES = 2
DOC = """
diff --git a/server/site_tests/platform_ExternalUsbPeripherals/control.detect.long_cycle b/server/site_tests/platform_ExternalUsbPeripherals/control.detect.long_cycle
index cfef39e..8519d34 100644
--- a/server/site_tests/platform_ExternalUsbPeripherals/control.detect.long_cycle
+++ b/server/site_tests/platform_ExternalUsbPeripherals/control.detect.long_cycle
@@ -13,7 +13,7 @@
TEST_CLASS = "platform"
TEST_TYPE = "server"
ATTRIBUTES = "suite:usb_detect"
-DEPENDENCIES = "servo, usb_detect"
+DEPENDENCIES = "servo"
DOC = """
This test uses servo to connect/disconnect servo USB hub before and
diff --git a/server/site_tests/platform_ExternalUsbPeripherals/control.detect.unplug_login_plug b/server/site_tests/platform_ExternalUsbPeripherals/control.detect.unplug_login_plug
index 678cb2e..4013166 100644
--- a/server/site_tests/platform_ExternalUsbPeripherals/control.detect.unplug_login_plug
+++ b/server/site_tests/platform_ExternalUsbPeripherals/control.detect.unplug_login_plug
@@ -13,7 +13,7 @@
TEST_CLASS = "platform"
TEST_TYPE = "server"
ATTRIBUTES = "suite:usb_detect"
-DEPENDENCIES = "servo, usb_detect"
+DEPENDENCIES = "servo"
DOC = """
This test uses servo to connect/disconnect servo USB hub before and
diff --git a/server/site_tests/platform_GCC/boards/chromeos-machine.exp b/server/site_tests/platform_GCC/boards/chromeos-machine.exp
deleted file mode 100644
index 572aaea..0000000
--- a/server/site_tests/platform_GCC/boards/chromeos-machine.exp
+++ /dev/null
@@ -1,10 +0,0 @@
-load_base_board_description "unix"
-
-# Set hostname and username. # Make sure SSH keys are set up prior to run.
-set_board_info hostname $env(DEJAGNU_HOSTNAME)
-set_board_info username root
-
-set_board_info shell_prompt "dejagnu>"
-set_board_info rsh_prog "$env(DEJAGNU_SCRIPTS)/dejagnu_ssh"
-set_board_info rcp_prog "$env(DEJAGNU_SCRIPTS)/dejagnu_scp"
-
diff --git a/server/site_tests/platform_GCC/control b/server/site_tests/platform_GCC/control
deleted file mode 100644
index 124f331..0000000
--- a/server/site_tests/platform_GCC/control
+++ /dev/null
@@ -1,20 +0,0 @@
-# Copyright (c) 2009 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-AUTHOR = "Chrome OS Team"
-NAME = "GCC"
-TIME = "LONG"
-TEST_CATEGORY = "Functional"
-TEST_CLASS = "platform"
-TEST_TYPE = "server"
-
-DOC = """
-This test runs the GCC test suite which uses the DejaGNU unittest framework. This test requires that DejaGNU be installed and that setup_board is run with FEATURES=noclean so that the toolchain files are available for testing.
-"""
-
-def run_server_tests(machine):
- client = hosts.create_host(machine)
- job.run_test("platform_GCC", host=client, args=args)
-
-job.parallel_on_machines(run_server_tests, machines)
diff --git a/server/site_tests/platform_GCC/dejagnu_cleanup_remote b/server/site_tests/platform_GCC/dejagnu_cleanup_remote
deleted file mode 100755
index 10644d0..0000000
--- a/server/site_tests/platform_GCC/dejagnu_cleanup_remote
+++ /dev/null
@@ -1,6 +0,0 @@
-#!/bin/bash
-# Copyright (c) 2011 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-. "$(dirname $(readlink -f $0))/dejagnu_remote.sh"
-dejagnu_cleanup_remote
diff --git a/server/site_tests/platform_GCC/dejagnu_init_remote b/server/site_tests/platform_GCC/dejagnu_init_remote
deleted file mode 100755
index 8d3b3ca..0000000
--- a/server/site_tests/platform_GCC/dejagnu_init_remote
+++ /dev/null
@@ -1,6 +0,0 @@
-#!/bin/bash
-# Copyright (c) 2011 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-. "$(dirname $(readlink -f $0))/dejagnu_remote.sh"
-dejagnu_init_remote "$1"
diff --git a/server/site_tests/platform_GCC/dejagnu_remote.sh b/server/site_tests/platform_GCC/dejagnu_remote.sh
deleted file mode 100644
index 4f90e6a..0000000
--- a/server/site_tests/platform_GCC/dejagnu_remote.sh
+++ /dev/null
@@ -1,58 +0,0 @@
-#!/bin/bash
-# Copyright (c) 2011 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-find_common_sh() {
- local common_paths=(/usr/lib/crosutils $(dirname "$0"))
- local path
-
- SCRIPT_ROOT=
- for path in "${common_paths[@]}"; do
- local common="${path}/common.sh"
- if ([ -r "${common}" ] && . "${common}" && [ -d "${SCRIPTS_DIR}" ]); then
- SCRIPT_ROOT="${path}"
- break
- fi
- done
-}
-find_common_sh
-. "${SCRIPT_ROOT}/common.sh" || ! echo "Unable to load common.sh" || exit 1
-
-DEFAULT_PRIVATE_KEY="${GCLIENT_ROOT}/src/scripts/mod_for_test_scripts/\
-ssh_keys/testing_rsa"
-
-TMP="/tmp/dejagnu-tests/"
-TMP_PRIVATE_KEY=${TMP}/private_key
-TMP_KNOWN_HOSTS=${TMP}/known_hosts
-CONTROL_PATH="${TMP}/%r@%h:%p"
-SSH_ARGS="-p22 -o StrictHostKeyChecking=no \
- -o UserKnownHostsFile=${TMP_KNOWN_HOSTS} -i ${TMP_PRIVATE_KEY}"
-
-dejagnu_init_remote() {
- mkdir -p ${TMP}
- cp ${DEFAULT_PRIVATE_KEY} ${TMP_PRIVATE_KEY}
- chmod 0400 ${TMP_PRIVATE_KEY}
- PS1=. TERM=linux ssh ${SSH_ARGS} -t -t -M -S "${CONTROL_PATH}" root@$1 \
- >/dev/null 2>&1 &
- echo $! > "${TMP}/master-pid"
- dejagnu_ssh root@$1 -- "echo Connection OK."
-}
-
-dejagnu_cleanup_remote() {
- set +e
- kill "$(cat ${TMP}/master-pid)"
- set -e
- rm -rf "${TMP}"
-}
-
-dejagnu_ssh() {
- COMMAND="ssh ${SSH_ARGS} -t -o ControlPath=${CONTROL_PATH} $@"
- # TODO(raymes): Remove this timeout hack once our tests run without
- # infinite loops.
- TIMEOUT_COMMAND="$(echo "$COMMAND" | sed "s/sh -c '/sh -c 'timeout 5 /g")"
- $TIMEOUT_COMMAND
-}
-
-dejagnu_scp() {
- scp ${SSH_ARGS} -o ControlPath="${CONTROL_PATH}" $@
-}
diff --git a/server/site_tests/platform_GCC/dejagnu_scp b/server/site_tests/platform_GCC/dejagnu_scp
deleted file mode 100755
index faf661e..0000000
--- a/server/site_tests/platform_GCC/dejagnu_scp
+++ /dev/null
@@ -1,6 +0,0 @@
-#!/bin/bash
-# Copyright (c) 2011 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-. "$(dirname $(readlink -f $0))/dejagnu_remote.sh"
-dejagnu_scp $@
diff --git a/server/site_tests/platform_GCC/dejagnu_ssh b/server/site_tests/platform_GCC/dejagnu_ssh
deleted file mode 100755
index 79fc0b8..0000000
--- a/server/site_tests/platform_GCC/dejagnu_ssh
+++ /dev/null
@@ -1,6 +0,0 @@
-#!/bin/bash
-# Copyright (c) 2011 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-. "$(dirname $(readlink -f $0))/dejagnu_remote.sh"
-dejagnu_ssh $@
diff --git a/server/site_tests/platform_GCC/platform_GCC.py b/server/site_tests/platform_GCC/platform_GCC.py
deleted file mode 100644
index 0fc22d7..0000000
--- a/server/site_tests/platform_GCC/platform_GCC.py
+++ /dev/null
@@ -1,128 +0,0 @@
-# Copyright (c) 2009 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import glob, logging, os, shutil
-from autotest_lib.client.common_lib import error
-from autotest_lib.server import test, utils
-from optparse import OptionParser
-
-class platform_GCC(test.test):
- """Class for running the GCC dejagnu tests."""
- version = 1
- results = {}
-
- TEST_STATUSES = ('PASS', 'FAIL', 'UNRESOLVED', 'UNTESTED', 'UNSUPPORTED',
- 'XFAIL', 'KFAIL', 'XPASS', 'KPASS')
- TARBALL = '/usr/local/dejagnu/gcc/tests.tar.gz'
-
- def parse_log(self, log):
- results = {}
- counts = {}
- log_file = open(log, 'rb')
- for line in log_file:
- if line.startswith(self.TEST_STATUSES):
- result, testname = line.split(': ', 1)
- testname = testname.strip()
- if testname in results:
- counts[testname] += 1
- unique_testname = '%s (%d)' % (testname, counts[testname])
- else:
- counts[testname] = 1
- unique_testname = testname
- results[unique_testname] = result
- log_file.close()
- return results
-
-
- def compare_logs(self, baseline, new):
- baseline_results = self.parse_log(baseline)
- logging.info('%d results parsed in baseline (%s).' %
- (len(baseline_results), baseline))
- new_results = self.parse_log(new)
- logging.info('%d results parsed in new log (%s).' %
- (len(new_results), new))
-
- differences = []
- for testname in new_results.keys():
- if testname not in baseline_results:
- differences.append((testname, 'NOTEXECUTED',
- new_results[testname]))
- elif new_results[testname] != baseline_results[testname]:
- differences.append((testname, baseline_results[testname],
- new_results[testname]))
- for testname in baseline_results.keys():
- if testname not in new_results:
- differences.append((testname, baseline_results[testname],
- 'NOTEXECUTED'))
- return differences
-
-
- def run_once(self, host=None, args=[]):
- self.client = host
-
- parser = OptionParser()
- parser.add_option('--gcc_dir',
- dest='gcc_dir',
- default='/var/tmp/portage/cross-*/gcc-*/work/gcc-*build*',
- help='Path to the gcc build directory.')
- parser.add_option('--test_flags',
- dest='test_flags',
- default='',
- help='Options to pass to dejagnu.')
-
- options, args = parser.parse_args(args)
-
- utils.system('%s %s' %
- (os.path.join(self.bindir, 'dejagnu_init_remote'),
- self.client.ip))
-
- gcc_dirs = glob.glob(options.gcc_dir)
- if len(gcc_dirs) == 0:
- # If there is no directory present, try untarring the tarball
- # installed by the gcc package.
- logging.info('No gcc directory found, attempting to untar from %s'
- % self.TARBALL)
- os.chdir('/')
- os.system('tar -xzf %s' % self.TARBALL)
- gcc_dirs = glob.glob(options.gcc_dir)
- if len(gcc_dirs) == 0:
- raise error.TestFail('No gcc directory to test was found')
-
- gcc_dir = gcc_dirs[0]
-
- logging.info('Testing gcc in the following directory: %s' % gcc_dir)
- exp_file = os.path.join(self.bindir, 'site.exp')
- client_hostname = str(self.client.ip)
- test_flags = options.test_flags
- test_command = ('cd %s; DEJAGNU="%s" DEJAGNU_SCRIPTS=%s '
- 'DEJAGNU_HOSTNAME=%s make '
- 'RUNTESTFLAGS="%s" check-gcc' % (gcc_dir, exp_file,
- self.bindir, client_hostname, test_flags))
- utils.system(test_command)
-
- error_messages = []
- for log in ('gcc', 'g++'):
- log_from = os.path.join(gcc_dir, 'gcc/testsuite/%s/%s.log' %
- (log, log))
- log_to = os.path.join(self.resultsdir, '%s.log' % (log))
- shutil.copy(log_from, log_to)
-
- baseline = os.path.join(self.bindir, '%s.log' % (log))
-
- differences = self.compare_logs(baseline, log_to)
- for difference in differences:
- error_string = ('(%s) "%s" Expected: "%s" Actual: "%s"' %
- (log_to, difference[0],
- difference[1], difference[2]))
- error_messages.append(error_string)
- keyname = log.replace('+', 'p')
- self.results['%s_differences' % keyname] = len(differences)
-
- self.write_perf_keyval(self.results)
-
- if len(error_messages) != 0:
- raise error.TestFail('\n'.join(error_messages))
-
- def cleanup(self):
- utils.system(os.path.join(self.bindir, 'dejagnu_cleanup_remote'))
diff --git a/server/site_tests/platform_GCC/site.exp b/server/site_tests/platform_GCC/site.exp
deleted file mode 100644
index beaa958..0000000
--- a/server/site_tests/platform_GCC/site.exp
+++ /dev/null
@@ -1 +0,0 @@
-set target_list "chromeos-machine"
diff --git a/server/site_tests/platform_SuspendResumeTiming/control b/server/site_tests/platform_SuspendResumeTiming/control
index 37eff7c..9162df1 100644
--- a/server/site_tests/platform_SuspendResumeTiming/control
+++ b/server/site_tests/platform_SuspendResumeTiming/control
@@ -13,7 +13,7 @@
TEST_CLASS = "platform"
TEST_TYPE = "server"
ATTRIBUTES = "suite:usb_detect"
-DEPENDENCIES = "servo, usb_detect"
+DEPENDENCIES = "servo"
DOC = """
This test measures the time to suspend and resume
@@ -29,4 +29,4 @@
job.run_test("platform_SuspendResumeTiming", host=host,
plug_usb=False, disable_sysinfo=True)
-parallel_simple(run, machines)
\ No newline at end of file
+parallel_simple(run, machines)
diff --git a/server/site_tests/platform_SuspendResumeTiming/control.usb_plugged b/server/site_tests/platform_SuspendResumeTiming/control.usb_plugged
index 2b77f3c..f401a24 100644
--- a/server/site_tests/platform_SuspendResumeTiming/control.usb_plugged
+++ b/server/site_tests/platform_SuspendResumeTiming/control.usb_plugged
@@ -13,7 +13,7 @@
TEST_CLASS = "platform"
TEST_TYPE = "server"
ATTRIBUTES = "suite:usb_detect"
-DEPENDENCIES = "servo, usb_detect"
+DEPENDENCIES = "servo"
DOC = """
This test measures the time to suspend and resume
@@ -29,4 +29,4 @@
job.run_test("platform_SuspendResumeTiming", host=host,
plug_usb=True, disable_sysinfo=True)
-parallel_simple(run, machines)
\ No newline at end of file
+parallel_simple(run, machines)
diff --git a/server/site_tests/provision_Cr50Update/provision_Cr50Update.py b/server/site_tests/provision_Cr50Update/provision_Cr50Update.py
index fb5f372..b33f6b0 100644
--- a/server/site_tests/provision_Cr50Update/provision_Cr50Update.py
+++ b/server/site_tests/provision_Cr50Update/provision_Cr50Update.py
@@ -28,7 +28,6 @@
"""
version = 1
- MP_FLAGS = 0x7f80
def initialize(self, host, cmdline_args, full_args, value='',
release_path='', force=False):
@@ -58,7 +57,8 @@
self.local_path, self.image_ver = image_info
self.image_rw = self.image_ver[1]
self.image_bid = self.image_ver[2]
- self.chip_bid = self.get_chip_bid_from_image_bid()
+ self.chip_bid = cr50_utils.GetChipBIDFromImageBID(
+ self.image_bid, self.get_device_brand())
def init_local_image(self, release_path):
@@ -75,33 +75,6 @@
return release_path, ver
- def get_chip_bid_from_image_bid(self):
- """Calculate a chip bid that will work with the image bid.
-
- Returns:
- A tuple of integers (bid type, ~bid type, bid flags)
- """
- brand = self.get_device_brand()
- image_bid_tuple = cr50_utils.GetBoardIdInfoTuple(self.image_bid)
- # GetBoardIdInfoTuple returns None if the image isn't board id locked.
- # Generate a Tuple of all 0s the rest of the function can use.
- if not image_bid_tuple:
- image_bid_tuple = (0, 0, 0)
-
- image_bid, image_mask, image_flags = image_bid_tuple
- if image_mask:
- new_brand = cr50_utils.GetSymbolicBoardId(image_bid)
- else:
- new_brand = brand
- new_flags = image_flags or self.MP_FLAGS
- bid_type = cr50_utils.GetIntBoardId(new_brand)
- # If the board id type is erased, type_inv should also be unset.
- if bid_type == cr50_utils.ERASED_BID_INT:
- return (cr50_utils.ERASED_BID_INT, cr50_utils.ERASED_BID_INT,
- new_flags)
- return bid_type, 0xffffffff & ~bid_type, new_flags
-
-
def run_once(self, force=False):
"""The method called by the control file to start the update."""
# TODO(mruthven): remove once the test is successfully scheduled.
diff --git a/site_utils/deployment/install.py b/site_utils/deployment/install.py
index 9276708..5c494e6 100644
--- a/site_utils/deployment/install.py
+++ b/site_utils/deployment/install.py
@@ -61,8 +61,6 @@
import time
import traceback
-from chromite.lib import gs
-
import common
from autotest_lib.client.common_lib import error
from autotest_lib.client.common_lib import host_states
@@ -78,7 +76,6 @@
from autotest_lib.server.hosts import servo_host
from autotest_lib.site_utils.deployment import cmdvalidate
from autotest_lib.site_utils.deployment.prepare import dut as preparedut
-from autotest_lib.site_utils.stable_images import build_data
from autotest_lib.utils import labellib
@@ -142,8 +139,7 @@
@param dirpath Path to directory containing the logs.
@param gspath Path to GS bucket.
"""
- ctx = gs.GSContext()
- ctx.Copy(dirpath, gspath, recursive=True)
+ utils.run(['gsutil', 'cp', '-r', '--', dirpath, gspath])
def _get_omaha_build(board):
@@ -160,8 +156,8 @@
R##-####.#.#. Will return `None` if no Beta channel
entry is found.
"""
- ctx = gs.GSContext()
- omaha_status = json.loads(ctx.Cat(_OMAHA_STATUS))
+ ret = utils.run(['gsutil', 'cat', '--', _OMAHA_STATUS])
+ omaha_status = json.loads(ret.stdout)
omaha_board = board.replace('_', '-')
for e in omaha_status['omaha_data']:
if (e['channel'] == 'beta' and
diff --git a/site_utils/deployment/prepare/main.py b/site_utils/deployment/prepare/main.py
index 836101d..a5cb687 100755
--- a/site_utils/deployment/prepare/main.py
+++ b/site_utils/deployment/prepare/main.py
@@ -94,7 +94,7 @@
if 'update-label' in opts.actions:
try:
- host.labels.update_labels(host)
+ host.labels.update_labels(host, task_name='deploy')
except Exception as err:
logging.error("fail to update label: %s", err)
return RETURN_CODES.UPDATE_LABEL_FAILURE
diff --git a/site_utils/dump_suite_report.py b/site_utils/dump_suite_report.py
index 7d470d8..1e9b30f 100755
--- a/site_utils/dump_suite_report.py
+++ b/site_utils/dump_suite_report.py
@@ -8,17 +8,17 @@
from __future__ import print_function
import common
+import argparse
import logging
import sys
from autotest_lib.server.cros.dynamic_suite import frontend_wrappers
from autotest_lib.server.lib import suite_report
-from chromite.lib import commandline
from chromite.lib import ts_mon_config
def GetParser():
"""Creates the argparse parser."""
- parser = commandline.ArgumentParser(description=__doc__)
+ parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument('job_ids', type=int, nargs='+',
help='Suite job ids to dump')
# As a provision suite may exit before its all provision jobs finish, the
diff --git a/site_utils/test_that.py b/site_utils/test_that.py
index 616a329..07993d6 100755
--- a/site_utils/test_that.py
+++ b/site_utils/test_that.py
@@ -23,15 +23,6 @@
from autotest_lib.site_utils import test_runner_utils
-try:
- from chromite.lib import cros_build_lib
-except ImportError:
- print 'Unable to import chromite.'
- print 'This script must be either:'
- print ' - Be run in the chroot.'
- print ' - (not yet supported) be run after running '
- print ' ../utils/build_externals.py'
-
_QUICKMERGE_SCRIPTNAME = '/mnt/host/source/chromite/bin/autotest_quickmerge'
@@ -112,11 +103,10 @@
'and the lab server code rather than local '
'changes.')
test_runner_utils.add_common_args(parser)
- default_board = cros_build_lib.GetDefaultBoard()
- parser.add_argument('-b', '--board', metavar='BOARD', default=default_board,
+ parser.add_argument('-b', '--board', metavar='BOARD',
action='store',
- help='Board for which the test will run. Default: %s' %
- (default_board or 'Not configured'))
+ help='Board for which the test will run. '
+ 'Default: %(default)s')
parser.add_argument('-m', '--model', metavar='MODEL', default='',
help='Specific model the test will run against. '
'Matches the model:FAKE_MODEL label for the host.')
diff --git a/utils/loadtest.py b/utils/loadtest.py
index 3dd00a5..3d3e2d3 100755
--- a/utils/loadtest.py
+++ b/utils/loadtest.py
@@ -38,6 +38,7 @@
atest host mod -u DUT1 DUT2
"""
+import argparse
import collections
import datetime
import json
@@ -52,7 +53,6 @@
import common
from autotest_lib.client.common_lib import time_utils
from autotest_lib.client.common_lib.cros import dev_server
-from chromite.lib import commandline
from chromite.lib import locking
from chromite.lib import parallel
@@ -69,7 +69,7 @@
def get_parser():
"""Creates the argparse parser."""
- parser = commandline.ArgumentParser(description=__doc__)
+ parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument('server', type=str, action='store',
help='Devserver to load test.')
parser.add_argument('config', type=str, action='store',