blob: 186e0f1c679b0ecb58db70bfa6b915ae7902afac [file] [log] [blame]
Ilja H. Friedelbee84a72016-09-28 15:57:06 -07001# Copyright 2016 The Chromium OS Authors. All rights reserved.
2# Use of this source code is governed by a BSD-style license that can be
3# found in the LICENSE file.
4
5# repohooks/pre-upload.py currently does not run pylint. But for developers who
6# want to check their code manually we disable several harmless pylint warnings
7# which just distract from more serious remaining issues.
8#
9# The instance variables _host and _install_paths are not defined in __init__().
10# pylint: disable=attribute-defined-outside-init
11#
12# Many short variable names don't follow the naming convention.
13# pylint: disable=invalid-name
14#
15# _parse_result() and _dir_size() don't access self and could be functions.
16# pylint: disable=no-self-use
17#
18# _ChromeLogin and _TradefedLogCollector have no public methods.
19# pylint: disable=too-few-public-methods
20
21import contextlib
22import errno
David Haddockb9a362b2016-10-28 16:19:12 -070023import glob
Ilja H. Friedelbee84a72016-09-28 15:57:06 -070024import hashlib
Ilja H. Friedel46863772017-01-25 00:53:44 -080025import lockfile
Ilja H. Friedelbee84a72016-09-28 15:57:06 -070026import logging
27import os
28import pipes
29import random
30import re
31import shutil
32import stat
33import tempfile
34import urlparse
35
36from autotest_lib.client.bin import utils as client_utils
37from autotest_lib.client.common_lib import error
38from autotest_lib.client.common_lib.cros import dev_server
39from autotest_lib.server import afe_utils
40from autotest_lib.server import autotest
41from autotest_lib.server import test
42from autotest_lib.server import utils
43from autotest_lib.site_utils import lxc
44
Ilja H. Friedelbee84a72016-09-28 15:57:06 -070045
46_SDK_TOOLS_DIR = ('gs://chromeos-arc-images/builds/'
Ilja H. Friedel5c46f522016-12-07 20:24:00 -080047 'git_mnc-dr-arc-dev-linux-static_sdk_tools/3554341')
Ilja H. Friedelbee84a72016-09-28 15:57:06 -070048_SDK_TOOLS_FILES = ['aapt']
49# To stabilize adb behavior, we use dynamically linked adb.
50_ADB_DIR = ('gs://chromeos-arc-images/builds/'
Ilja H. Friedel5c46f522016-12-07 20:24:00 -080051 'git_mnc-dr-arc-dev-linux-cheets_arm-user/3554341')
Ilja H. Friedel94639902017-01-18 00:42:44 -080052# TODO(ihf): Make this the path below as it seems to work locally.
53# 'git_mnc-dr-arc-dev-linux-static_sdk_tools/3554341')
Ilja H. Friedelbee84a72016-09-28 15:57:06 -070054_ADB_FILES = ['adb']
55
56_ADB_POLLING_INTERVAL_SECONDS = 1
57_ADB_READY_TIMEOUT_SECONDS = 60
58_ANDROID_ADB_KEYS_PATH = '/data/misc/adb/adb_keys'
59
60_ARC_POLLING_INTERVAL_SECONDS = 1
61_ARC_READY_TIMEOUT_SECONDS = 60
62
63_TRADEFED_PREFIX = 'autotest-tradefed-install_'
64_TRADEFED_CACHE_LOCAL = '/tmp/autotest-tradefed-cache'
65_TRADEFED_CACHE_CONTAINER = '/usr/local/autotest/results/shared/cache'
66_TRADEFED_CACHE_CONTAINER_LOCK = '/usr/local/autotest/results/shared/lock'
67
68# According to dshi a drone has 500GB of disk space. It is ok for now to use
69# 10GB of disk space, as no more than 10 tests should run in parallel.
70# TODO(ihf): Investigate tighter cache size.
71_TRADEFED_CACHE_MAX_SIZE = (10 * 1024 * 1024 * 1024)
72
73
74class _ChromeLogin(object):
75 """Context manager to handle Chrome login state."""
76
77 def __init__(self, host):
78 self._host = host
79
80 def __enter__(self):
81 """Logs in to the Chrome."""
82 logging.info('Ensure Android is running...')
83 autotest.Autotest(self._host).run_test('cheets_CTSHelper',
84 check_client_result=True)
85
86 def __exit__(self, exc_type, exc_value, traceback):
87 """On exit, to wipe out all the login state, reboot the machine.
88
89 @param exc_type: Exception type if an exception is raised from the
90 with-block.
91 @param exc_value: Exception instance if an exception is raised from
92 the with-block.
93 @param traceback: Stack trace info if an exception is raised from
94 the with-block.
95 @return None, indicating not to ignore an exception from the with-block
96 if raised.
97 """
98 logging.info('Rebooting...')
99 try:
100 self._host.reboot()
101 except Exception:
102 if exc_type is None:
103 raise
104 # If an exception is raise from the with-block, just record the
105 # exception for the rebooting to avoid ignoring the original
106 # exception.
107 logging.exception('Rebooting failed.')
108
109
110@contextlib.contextmanager
111def lock(filename):
112 """Prevents other autotest/tradefed instances from accessing cache."""
113 filelock = lockfile.FileLock(filename)
114 # It is tempting just to call filelock.acquire(3600). But the implementation
115 # has very poor temporal granularity (timeout/10), which is unsuitable for
116 # our needs. See /usr/lib64/python2.7/site-packages/lockfile/
Ilja H. Friedeld2410cc2016-10-27 11:38:45 -0700117 attempts = 0
Ilja H. Friedelbee84a72016-09-28 15:57:06 -0700118 while not filelock.i_am_locking():
119 try:
Ilja H. Friedeld2410cc2016-10-27 11:38:45 -0700120 attempts += 1
Ilja H. Friedelbee84a72016-09-28 15:57:06 -0700121 logging.info('Waiting for cache lock...')
122 filelock.acquire(random.randint(1, 5))
123 except (lockfile.AlreadyLocked, lockfile.LockTimeout):
Ilja H. Friedeld2410cc2016-10-27 11:38:45 -0700124 if attempts > 1000:
125 # Normally we should aqcuire the lock in a few seconds. Once we
126 # wait on the order of hours either the dev server IO is
127 # overloaded or a lock didn't get cleaned up. Take one for the
128 # team, break the lock and report a failure. This should fix
129 # the lock for following tests. If the failure affects more than
130 # one job look for a deadlock or dev server overload.
131 logging.error('Permanent lock failure. Trying to break lock.')
132 filelock.break_lock()
133 raise error.TestFail('Error: permanent cache lock failure.')
Ilja H. Friedelbee84a72016-09-28 15:57:06 -0700134 else:
Ilja H. Friedeld2410cc2016-10-27 11:38:45 -0700135 logging.info('Acquired cache lock after %d attempts.', attempts)
Ilja H. Friedelbee84a72016-09-28 15:57:06 -0700136 try:
137 yield
138 finally:
139 filelock.release()
140 logging.info('Released cache lock.')
141
142
143class TradefedTest(test.test):
144 """Base class to prepare DUT to run tests via tradefed."""
145 version = 1
146
147 def initialize(self, host=None):
148 """Sets up the tools and binary bundles for the test."""
149 logging.info('Hostname: %s', host.hostname)
150 self._host = host
151 self._install_paths = []
152 # Tests in the lab run within individual lxc container instances.
153 if utils.is_in_container():
Ilja H. Friedelbee84a72016-09-28 15:57:06 -0700154 cache_root = _TRADEFED_CACHE_CONTAINER
155 else:
156 cache_root = _TRADEFED_CACHE_LOCAL
Ilja H. Friedel94639902017-01-18 00:42:44 -0800157 # Quick sanity check and spew of java version installed on the server.
158 utils.run('java', args=('-version',), ignore_status=False, verbose=True,
159 stdout_tee=utils.TEE_TO_LOGS, stderr_tee=utils.TEE_TO_LOGS)
Ilja H. Friedelbee84a72016-09-28 15:57:06 -0700160 # The content of the cache survives across jobs.
161 self._safe_makedirs(cache_root)
162 self._tradefed_cache = os.path.join(cache_root, 'cache')
163 self._tradefed_cache_lock = os.path.join(cache_root, 'lock')
164 # The content of the install location does not survive across jobs and
165 # is isolated (by using a unique path)_against other autotest instances.
166 # This is not needed for the lab, but if somebody wants to run multiple
167 # TradedefTest instance.
168 self._tradefed_install = tempfile.mkdtemp(prefix=_TRADEFED_PREFIX)
169 # Under lxc the cache is shared between multiple autotest/tradefed
170 # instances. We need to synchronize access to it. All binaries are
171 # installed through the (shared) cache into the local (unshared)
172 # lxc/autotest instance storage.
173 # If clearing the cache it must happen before all downloads.
174 self._clear_download_cache_if_needed()
175 # Set permissions (rwxr-xr-x) to the executable binaries.
176 permission = (stat.S_IRWXU | stat.S_IRGRP | stat.S_IXGRP | stat.S_IROTH
177 | stat.S_IXOTH)
178 self._install_files(_ADB_DIR, _ADB_FILES, permission)
179 self._install_files(_SDK_TOOLS_DIR, _SDK_TOOLS_FILES, permission)
180
181 def cleanup(self):
182 """Cleans up any dirtied state."""
183 # Kill any lingering adb servers.
184 self._run('adb', verbose=True, args=('kill-server',))
185 logging.info('Cleaning up %s.', self._tradefed_install)
186 shutil.rmtree(self._tradefed_install)
187
188 def _login_chrome(self):
189 """Returns Chrome log-in context manager.
190
191 Please see also cheets_CTSHelper for details about how this works.
192 """
193 return _ChromeLogin(self._host)
194
195 def _try_adb_connect(self):
196 """Attempts to connect to adb on the DUT.
197
198 @return boolean indicating if adb connected successfully.
199 """
200 # This may fail return failure due to a race condition in adb connect
201 # (b/29370989). If adb is already connected, this command will
202 # immediately return success.
203 hostport = '{}:{}'.format(self._host.hostname, self._host.port)
204 result = self._run(
205 'adb',
206 args=('connect', hostport),
207 verbose=True,
208 ignore_status=True)
209 logging.info('adb connect {}:\n{}'.format(hostport, result.stdout))
210 if result.exit_status != 0:
211 return False
212
213 result = self._run('adb', args=('devices',))
214 logging.info('adb devices:\n' + result.stdout)
215 if not re.search(
216 r'{}\s+(device|unauthorized)'.format(re.escape(hostport)),
217 result.stdout):
218 return False
219
220 # Actually test the connection with an adb command as there can be
221 # a race between detecting the connected device and actually being
222 # able to run a commmand with authenticated adb.
223 result = self._run('adb', args=('shell', 'exit'), ignore_status=True)
224 return result.exit_status == 0
225
226 def _android_shell(self, command):
227 """Run a command remotely on the device in an android shell
228
229 This function is strictly for internal use only, as commands do not run
230 in a fully consistent Android environment. Prefer adb shell instead.
231 """
232 self._host.run('android-sh -c ' + pipes.quote(command))
233
234 def _write_android_file(self, filename, data):
235 """Writes a file to a location relative to the android container.
236
237 This is an internal function used to bootstrap adb.
238 Tests should use adb push to write files.
239 """
240 android_cmd = 'echo %s > %s' % (pipes.quote(data),
241 pipes.quote(filename))
242 self._android_shell(android_cmd)
243
244 def _connect_adb(self):
245 """Sets up ADB connection to the ARC container."""
246 logging.info('Setting up adb connection.')
247 # Generate and push keys for adb.
248 # TODO(elijahtaylor): Extract this code to arc_common and de-duplicate
249 # code in arc.py on the client side tests.
250 key_path = os.path.join(self.tmpdir, 'test_key')
251 pubkey_path = key_path + '.pub'
252 self._run('adb', verbose=True, args=('keygen', pipes.quote(key_path)))
253 with open(pubkey_path, 'r') as f:
254 self._write_android_file(_ANDROID_ADB_KEYS_PATH, f.read())
255 self._android_shell('restorecon ' + pipes.quote(_ANDROID_ADB_KEYS_PATH))
256 os.environ['ADB_VENDOR_KEYS'] = key_path
257
258 # Kill existing adb server to ensure that the env var is picked up.
259 self._run('adb', verbose=True, args=('kill-server',))
260
261 # This starts adbd.
262 self._android_shell('setprop sys.usb.config mtp,adb')
263
264 # adbd may take some time to come up. Repeatedly try to connect to adb.
265 utils.poll_for_condition(lambda: self._try_adb_connect(),
Ilja H. Friedel6d5ca8f2016-10-26 22:35:36 -0700266 exception=error.TestFail(
267 'Error: Failed to set up adb connection'),
Ilja H. Friedelbee84a72016-09-28 15:57:06 -0700268 timeout=_ADB_READY_TIMEOUT_SECONDS,
269 sleep_interval=_ADB_POLLING_INTERVAL_SECONDS)
270
271 logging.info('Successfully setup adb connection.')
272
273 def _wait_for_arc_boot(self):
274 """Wait until ARC is fully booted.
275
276 Tests for the presence of the intent helper app to determine whether ARC
277 has finished booting.
278 """
279 def intent_helper_running():
Kazuhiro Inabaf2c47052017-01-26 09:18:51 +0900280 result = self._run('adb', args=('shell', 'pgrep', '-f',
Ilja H. Friedelbee84a72016-09-28 15:57:06 -0700281 'org.chromium.arc.intent_helper'))
282 return bool(result.stdout)
283 utils.poll_for_condition(
284 intent_helper_running,
Ilja H. Friedel6d5ca8f2016-10-26 22:35:36 -0700285 exception=error.TestFail(
286 'Error: Timed out waiting for intent helper.'),
Ilja H. Friedelbee84a72016-09-28 15:57:06 -0700287 timeout=_ARC_READY_TIMEOUT_SECONDS,
288 sleep_interval=_ARC_POLLING_INTERVAL_SECONDS)
289
290 def _disable_adb_install_dialog(self):
291 """Disables a dialog shown on adb install execution.
292
293 By default, on adb install execution, "Allow Google to regularly check
294 device activity ... " dialog is shown. It requires manual user action
295 so that tests are blocked at the point.
296 This method disables it.
297 """
298 logging.info('Disabling the adb install dialog.')
299 result = self._run(
300 'adb',
301 verbose=True,
302 args=(
303 'shell',
304 'settings',
305 'put',
306 'global',
307 'verifier_verify_adb_installs',
308 '0'))
309 logging.info('Disable adb dialog: %s', result.stdout)
310
311 def _ready_arc(self):
312 """Ready ARC and adb for running tests via tradefed."""
313 self._connect_adb()
314 self._disable_adb_install_dialog()
315 self._wait_for_arc_boot()
316
317 def _safe_makedirs(self, path):
318 """Creates a directory at |path| and its ancestors.
319
320 Unlike os.makedirs(), ignore errors even if directories exist.
321 """
322 try:
323 os.makedirs(path)
324 except OSError as e:
325 if not (e.errno == errno.EEXIST and os.path.isdir(path)):
326 raise
327
328 def _unzip(self, filename):
329 """Unzip the file.
330
331 The destination directory name will be the stem of filename.
332 E.g., _unzip('foo/bar/baz.zip') will create directory at
333 'foo/bar/baz', and then will inflate zip's content under the directory.
334 If here is already a directory at the stem, that directory will be used.
335
336 @param filename: Path to the zip archive.
337 @return Path to the inflated directory.
338 """
339 destination = os.path.splitext(filename)[0]
340 if os.path.isdir(destination):
341 return destination
342 self._safe_makedirs(destination)
343 utils.run('unzip', args=('-d', destination, filename))
344 return destination
345
346 def _dir_size(self, directory):
347 """Compute recursive size in bytes of directory."""
348 size = 0
349 for root, _, files in os.walk(directory):
350 size += sum(os.path.getsize(os.path.join(root, name))
351 for name in files)
352 return size
353
354 def _clear_download_cache_if_needed(self):
355 """Invalidates cache to prevent it from growing too large."""
356 # If the cache is large enough to hold a working set, we can simply
357 # delete everything without thrashing.
358 # TODO(ihf): Investigate strategies like LRU.
359 with lock(self._tradefed_cache_lock):
360 size = self._dir_size(self._tradefed_cache)
361 if size > _TRADEFED_CACHE_MAX_SIZE:
362 logging.info('Current cache size=%d got too large. Clearing %s.'
363 , size, self._tradefed_cache)
364 shutil.rmtree(self._tradefed_cache)
365 self._safe_makedirs(self._tradefed_cache)
366 else:
367 logging.info('Current cache size=%d of %s.', size,
368 self._tradefed_cache)
369
370 def _download_to_cache(self, uri):
371 """Downloads the uri from the storage server.
372
373 It always checks the cache for available binaries first and skips
374 download if binaries are already in cache.
375
376 The caller of this function is responsible for holding the cache lock.
377
378 @param uri: The Google Storage or dl.google.com uri.
379 @return Path to the downloaded object, name.
380 """
381 # Split uri into 3 pieces for use by gsutil and also by wget.
382 parsed = urlparse.urlparse(uri)
383 filename = os.path.basename(parsed.path)
384 # We are hashing the uri instead of the binary. This is acceptable, as
385 # the uris are supposed to contain version information and an object is
386 # not supposed to be changed once created.
387 output_dir = os.path.join(self._tradefed_cache,
388 hashlib.md5(uri).hexdigest())
389 output = os.path.join(output_dir, filename)
390 # Check for existence of file.
391 if os.path.exists(output):
392 logging.info('Skipping download of %s, reusing %s.', uri, output)
393 return output
394 self._safe_makedirs(output_dir)
395
396 if parsed.scheme not in ['gs', 'http', 'https']:
Ilja H. Friedel6d5ca8f2016-10-26 22:35:36 -0700397 raise error.TestFail('Error: Unknown download scheme %s' %
398 parsed.scheme)
Ilja H. Friedelbee84a72016-09-28 15:57:06 -0700399 if parsed.scheme in ['http', 'https']:
400 logging.info('Using wget to download %s to %s.', uri, output_dir)
401 # We are downloading 1 file at a time, hence using -O over -P.
402 # We also limit the rate to 20MBytes/s
403 utils.run(
404 'wget',
405 args=(
406 '--report-speed=bits',
407 '--limit-rate=20M',
408 '-O',
409 output,
410 uri),
411 verbose=True)
412 return output
413
414 if not client_utils.is_moblab():
415 # If the machine can access to the storage server directly,
416 # defer to "gsutil" for downloading.
417 logging.info('Host %s not in lab. Downloading %s directly to %s.',
418 self._host.hostname, uri, output)
419 # b/17445576: gsutil rsync of individual files is not implemented.
420 utils.run('gsutil', args=('cp', uri, output), verbose=True)
421 return output
422
423 # We are in the moblab. Because the machine cannot access the storage
424 # server directly, use dev server to proxy.
425 logging.info('Host %s is in lab. Downloading %s by staging to %s.',
426 self._host.hostname, uri, output)
427
428 dirname = os.path.dirname(parsed.path)
429 archive_url = '%s://%s%s' % (parsed.scheme, parsed.netloc, dirname)
430
431 # First, request the devserver to download files into the lab network.
432 # TODO(ihf): Switch stage_artifacts to honor rsync. Then we don't have
433 # to shuffle files inside of tarballs.
434 build = afe_utils.get_build(self._host)
435 ds = dev_server.ImageServer.resolve(build)
436 ds.stage_artifacts(build, files=[filename], archive_url=archive_url)
437
438 # Then download files from the dev server.
439 # TODO(ihf): use rsync instead of wget. Are there 3 machines involved?
440 # Itself, dev_server plus DUT? Or is there just no rsync in moblab?
441 ds_src = '/'.join([ds.url(), 'static', dirname, filename])
442 logging.info('dev_server URL: %s', ds_src)
443 # Calls into DUT to pull uri from dev_server.
444 utils.run(
445 'wget',
446 args=(
447 '--report-speed=bits',
448 '--limit-rate=20M',
449 '-O',
Ilja H. Friedelb83646b2016-10-18 13:02:59 -0700450 output,
451 ds_src),
Ilja H. Friedelbee84a72016-09-28 15:57:06 -0700452 verbose=True)
453 return output
454
455 def _instance_copy(self, cache_path):
456 """Makes a copy of a file from the (shared) cache to a wholy owned
457 local instance. Also copies one level of cache directoy (MD5 named).
458 """
459 filename = os.path.basename(cache_path)
460 dirname = os.path.basename(os.path.dirname(cache_path))
461 instance_dir = os.path.join(self._tradefed_install, dirname)
462 # Make sure destination directory is named the same.
463 self._safe_makedirs(instance_dir)
464 instance_path = os.path.join(instance_dir, filename)
465 shutil.copyfile(cache_path, instance_path)
466 return instance_path
467
468 def _install_bundle(self, gs_uri):
469 """Downloads a zip file, installs it and returns the local path."""
470 if not gs_uri.endswith('.zip'):
Ilja H. Friedel6d5ca8f2016-10-26 22:35:36 -0700471 raise error.TestFail('Error: Not a .zip file %s.', gs_uri)
Ilja H. Friedelbee84a72016-09-28 15:57:06 -0700472 # Atomic write through of file.
473 with lock(self._tradefed_cache_lock):
474 cache_path = self._download_to_cache(gs_uri)
475 local = self._instance_copy(cache_path)
David Haddockb9a362b2016-10-28 16:19:12 -0700476
477 unzipped = self._unzip(local)
478 self._abi = 'x86' if 'x86-x86' in unzipped else 'arm'
479 return unzipped
Ilja H. Friedelbee84a72016-09-28 15:57:06 -0700480
481 def _install_files(self, gs_dir, files, permission):
482 """Installs binary tools."""
483 for filename in files:
484 gs_uri = os.path.join(gs_dir, filename)
485 # Atomic write through of file.
486 with lock(self._tradefed_cache_lock):
487 cache_path = self._download_to_cache(gs_uri)
488 local = self._instance_copy(cache_path)
489 os.chmod(local, permission)
490 # Keep track of PATH.
491 self._install_paths.append(os.path.dirname(local))
492
493 def _run(self, *args, **kwargs):
494 """Executes the given command line.
495
496 To support SDK tools, such as adb or aapt, this adds _install_paths
497 to the extra_paths. Before invoking this, ensure _install_files() has
498 been called.
499 """
500 kwargs['extra_paths'] = (
501 kwargs.get('extra_paths', []) + self._install_paths)
502 return utils.run(*args, **kwargs)
503
504 def _parse_tradefed_datetime(self, result, summary=None):
505 """Get the tradefed provided result ID consisting of a datetime stamp.
506
507 Unfortunately we are unable to tell tradefed where to store the results.
508 In the lab we have multiple instances of tradefed running in parallel
509 writing results and logs to the same base directory. This function
510 finds the identifier which tradefed used during the current run and
511 returns it for further processing of result files.
512
513 @param result: The result object from utils.run.
514 @param summary: Test result summary from runs so far.
515 @return datetime_id: The result ID chosen by tradefed.
516 Example: '2016.07.14_00.34.50'.
517 """
518 # This string is show for both 'run' and 'continue' after all tests.
519 match = re.search(r': XML test result file generated at (\S+). Passed',
520 result.stdout)
521 if not (match and match.group(1)):
522 # TODO(ihf): Find out if we ever recover something interesting in
523 # this case. Otherwise delete it.
524 # Try harder to find the remains. This string shows before all
525 # tests but only with 'run', not 'continue'.
526 logging.warning('XML test result file incomplete?')
527 match = re.search(r': Created result dir (\S+)', result.stdout)
528 if not (match and match.group(1)):
529 error_msg = 'Test did not complete due to Chrome or ARC crash.'
530 if summary:
531 error_msg += (' Test summary from previous runs: %s'
532 % summary)
Ilja H. Friedel6d5ca8f2016-10-26 22:35:36 -0700533 raise error.TestFail(error_msg)
Ilja H. Friedelbee84a72016-09-28 15:57:06 -0700534 datetime_id = match.group(1)
535 logging.info('Tradefed identified results and logs with %s.',
536 datetime_id)
537 return datetime_id
538
Rohit Makasana99116d32016-10-17 19:32:04 -0700539 def _parse_result(self, result, waivers=None):
Ilja H. Friedelbee84a72016-09-28 15:57:06 -0700540 """Check the result from the tradefed output.
541
542 This extracts the test pass/fail/executed list from the output of
543 tradefed. It is up to the caller to handle inconsistencies.
544
545 @param result: The result object from utils.run.
Rohit Makasana99116d32016-10-17 19:32:04 -0700546 @param waivers: a set() of tests which are permitted to fail.
Ilja H. Friedelbee84a72016-09-28 15:57:06 -0700547 """
548 # Parse the stdout to extract test status. In particular step over
549 # similar output for each ABI and just look at the final summary.
550 match = re.search(r'(XML test result file generated at (\S+). '
551 r'Passed (\d+), Failed (\d+), Not Executed (\d+))',
552 result.stdout)
553 if not match:
554 raise error.Test('Test log does not contain a summary.')
555
556 passed = int(match.group(3))
557 failed = int(match.group(4))
558 not_executed = int(match.group(5))
559 match = re.search(r'(Start test run of (\d+) packages, containing '
560 r'(\d+(?:,\d+)?) tests)', result.stdout)
561 if match and match.group(3):
562 tests = int(match.group(3).replace(',', ''))
563 else:
564 # Unfortunately this happens. Assume it made no other mistakes.
565 logging.warning('Tradefed forgot to print number of tests.')
566 tests = passed + failed + not_executed
Rohit Makasana99116d32016-10-17 19:32:04 -0700567 # TODO(rohitbm): make failure parsing more robust by extracting the list
568 # of failing tests instead of searching in the result blob. As well as
569 # only parse for waivers for the running ABI.
570 if waivers:
571 for testname in waivers:
David Haddock16712332016-11-03 14:35:23 -0700572 # TODO(dhaddock): Find a more robust way to apply waivers.
573 fail_count = result.stdout.count(testname + ' FAIL')
574 if fail_count:
575 if fail_count > 2:
576 raise error.TestFail('Error: There are too many '
577 'failures found in the output to '
578 'be valid for applying waivers. '
579 'Please check output.')
580 failed -= fail_count
Rohit Makasana99116d32016-10-17 19:32:04 -0700581 # To maintain total count consistency.
David Haddock16712332016-11-03 14:35:23 -0700582 passed += fail_count
583 logging.info('Waived failure for %s %d time(s)',
584 testname, fail_count)
Rohit Makasana99116d32016-10-17 19:32:04 -0700585 logging.info('tests=%d, passed=%d, failed=%d, not_executed=%d',
586 tests, passed, failed, not_executed)
David Haddock16712332016-11-03 14:35:23 -0700587 if failed < 0:
588 raise error.TestFail('Error: Internal waiver book keeping has '
589 'become inconsistent.')
Ilja H. Friedelbee84a72016-09-28 15:57:06 -0700590 return (tests, passed, failed, not_executed)
591
592 def _collect_logs(self, repository, datetime, destination):
593 """Collects the tradefed logs.
594
595 It is legal to collect the same logs multiple times. This is normal
596 after 'tradefed continue' updates existing logs with new results.
597
598 @param repository: Full path to tradefeds output on disk.
599 @param datetime: The identifier which tradefed assigned to the run.
600 Currently this looks like '2016.07.14_00.34.50'.
601 @param destination: Autotest result directory (destination of logs).
602 """
603 logging.info('Collecting tradefed testResult.xml and logs to %s.',
604 destination)
605 repository_results = os.path.join(repository, 'results')
606 repository_logs = os.path.join(repository, 'logs')
607 # Because other tools rely on the currently chosen Google storage paths
608 # we need to keep destination_results in
609 # cheets_CTS.*/results/android-cts/2016.mm.dd_hh.mm.ss(/|.zip)
610 # and destination_logs in
611 # cheets_CTS.*/results/android-cts/logs/2016.mm.dd_hh.mm.ss/
612 destination_results = destination
Ilja H. Friedelb83646b2016-10-18 13:02:59 -0700613 destination_results_datetime = os.path.join(destination_results,
614 datetime)
Ilja H. Friedelbee84a72016-09-28 15:57:06 -0700615 destination_results_datetime_zip = destination_results_datetime + '.zip'
616 destination_logs = os.path.join(destination, 'logs')
617 destination_logs_datetime = os.path.join(destination_logs, datetime)
618 # We may have collected the same logs before, clean old versions.
619 if os.path.exists(destination_results_datetime_zip):
620 os.remove(destination_results_datetime_zip)
621 if os.path.exists(destination_results_datetime):
622 shutil.rmtree(destination_results_datetime)
623 if os.path.exists(destination_logs_datetime):
624 shutil.rmtree(destination_logs_datetime)
625 shutil.copytree(
626 os.path.join(repository_results, datetime),
627 destination_results_datetime)
628 # Copying the zip file has to happen after the tree so the destination
629 # directory is available.
630 shutil.copy(
631 os.path.join(repository_results, datetime) + '.zip',
632 destination_results_datetime_zip)
633 shutil.copytree(
634 os.path.join(repository_logs, datetime),
635 destination_logs_datetime)
David Haddockb9a362b2016-10-28 16:19:12 -0700636
Rohit Makasana77566902016-11-01 15:34:27 -0700637 def _get_expected_failures(self, directory):
638 """Return a list of expected failures.
David Haddockb9a362b2016-10-28 16:19:12 -0700639
Rohit Makasana77566902016-11-01 15:34:27 -0700640 @return: a list of expected failures.
David Haddockb9a362b2016-10-28 16:19:12 -0700641 """
Rohit Makasana77566902016-11-01 15:34:27 -0700642 logging.info('Loading expected failures from %s.', directory)
643 expected_fail_dir = os.path.join(self.bindir, directory)
David Haddockb9a362b2016-10-28 16:19:12 -0700644 expected_fail_files = glob.glob(expected_fail_dir + '/*.' + self._abi)
Rohit Makasana77566902016-11-01 15:34:27 -0700645 expected_failures = set()
David Haddockb9a362b2016-10-28 16:19:12 -0700646 for expected_fail_file in expected_fail_files:
647 try:
648 file_path = os.path.join(expected_fail_dir, expected_fail_file)
649 with open(file_path) as f:
650 lines = set(f.read().splitlines())
651 logging.info('Loaded %d expected failures from %s',
652 len(lines), expected_fail_file)
Rohit Makasana77566902016-11-01 15:34:27 -0700653 expected_failures |= lines
David Haddockb9a362b2016-10-28 16:19:12 -0700654 except IOError as e:
655 logging.error('Error loading %s (%s).', file_path, e.strerror)
Rohit Makasana77566902016-11-01 15:34:27 -0700656 logging.info('Finished loading expected failures: %s', expected_failures)
657 return expected_failures