blob: 4eb4bb5ea15fc6a0ce8eb80595502146dffe9e29 [file] [log] [blame]
Ilja H. Friedelbee84a72016-09-28 15:57:06 -07001# Copyright 2016 The Chromium OS Authors. All rights reserved.
2# Use of this source code is governed by a BSD-style license that can be
3# found in the LICENSE file.
4
5# repohooks/pre-upload.py currently does not run pylint. But for developers who
6# want to check their code manually we disable several harmless pylint warnings
7# which just distract from more serious remaining issues.
8#
9# The instance variables _host and _install_paths are not defined in __init__().
10# pylint: disable=attribute-defined-outside-init
11#
12# Many short variable names don't follow the naming convention.
13# pylint: disable=invalid-name
14#
15# _parse_result() and _dir_size() don't access self and could be functions.
16# pylint: disable=no-self-use
17#
18# _ChromeLogin and _TradefedLogCollector have no public methods.
19# pylint: disable=too-few-public-methods
20
21import contextlib
22import errno
David Haddockb9a362b2016-10-28 16:19:12 -070023import glob
Ilja H. Friedelbee84a72016-09-28 15:57:06 -070024import hashlib
25import logging
26import os
27import pipes
28import random
29import re
30import shutil
31import stat
32import tempfile
33import urlparse
34
35from autotest_lib.client.bin import utils as client_utils
36from autotest_lib.client.common_lib import error
37from autotest_lib.client.common_lib.cros import dev_server
38from autotest_lib.server import afe_utils
39from autotest_lib.server import autotest
40from autotest_lib.server import test
41from autotest_lib.server import utils
42from autotest_lib.site_utils import lxc
43
44try:
45 import lockfile
46except ImportError:
47 if utils.is_in_container():
48 # Ensure the container has the required packages installed.
49 lxc.install_packages(python_packages=['lockfile'])
50 import lockfile
51 else:
52 raise
53
54
55_SDK_TOOLS_DIR = ('gs://chromeos-arc-images/builds/'
Ilja H. Friedel5c46f522016-12-07 20:24:00 -080056 'git_mnc-dr-arc-dev-linux-static_sdk_tools/3554341')
Ilja H. Friedelbee84a72016-09-28 15:57:06 -070057_SDK_TOOLS_FILES = ['aapt']
58# To stabilize adb behavior, we use dynamically linked adb.
59_ADB_DIR = ('gs://chromeos-arc-images/builds/'
Ilja H. Friedel5c46f522016-12-07 20:24:00 -080060 'git_mnc-dr-arc-dev-linux-cheets_arm-user/3554341')
Ilja H. Friedelbee84a72016-09-28 15:57:06 -070061_ADB_FILES = ['adb']
62
63_ADB_POLLING_INTERVAL_SECONDS = 1
64_ADB_READY_TIMEOUT_SECONDS = 60
65_ANDROID_ADB_KEYS_PATH = '/data/misc/adb/adb_keys'
66
67_ARC_POLLING_INTERVAL_SECONDS = 1
68_ARC_READY_TIMEOUT_SECONDS = 60
69
70_TRADEFED_PREFIX = 'autotest-tradefed-install_'
71_TRADEFED_CACHE_LOCAL = '/tmp/autotest-tradefed-cache'
72_TRADEFED_CACHE_CONTAINER = '/usr/local/autotest/results/shared/cache'
73_TRADEFED_CACHE_CONTAINER_LOCK = '/usr/local/autotest/results/shared/lock'
74
75# According to dshi a drone has 500GB of disk space. It is ok for now to use
76# 10GB of disk space, as no more than 10 tests should run in parallel.
77# TODO(ihf): Investigate tighter cache size.
78_TRADEFED_CACHE_MAX_SIZE = (10 * 1024 * 1024 * 1024)
79
80
81class _ChromeLogin(object):
82 """Context manager to handle Chrome login state."""
83
84 def __init__(self, host):
85 self._host = host
86
87 def __enter__(self):
88 """Logs in to the Chrome."""
89 logging.info('Ensure Android is running...')
90 autotest.Autotest(self._host).run_test('cheets_CTSHelper',
91 check_client_result=True)
92
93 def __exit__(self, exc_type, exc_value, traceback):
94 """On exit, to wipe out all the login state, reboot the machine.
95
96 @param exc_type: Exception type if an exception is raised from the
97 with-block.
98 @param exc_value: Exception instance if an exception is raised from
99 the with-block.
100 @param traceback: Stack trace info if an exception is raised from
101 the with-block.
102 @return None, indicating not to ignore an exception from the with-block
103 if raised.
104 """
105 logging.info('Rebooting...')
106 try:
107 self._host.reboot()
108 except Exception:
109 if exc_type is None:
110 raise
111 # If an exception is raise from the with-block, just record the
112 # exception for the rebooting to avoid ignoring the original
113 # exception.
114 logging.exception('Rebooting failed.')
115
116
117@contextlib.contextmanager
118def lock(filename):
119 """Prevents other autotest/tradefed instances from accessing cache."""
120 filelock = lockfile.FileLock(filename)
121 # It is tempting just to call filelock.acquire(3600). But the implementation
122 # has very poor temporal granularity (timeout/10), which is unsuitable for
123 # our needs. See /usr/lib64/python2.7/site-packages/lockfile/
Ilja H. Friedeld2410cc2016-10-27 11:38:45 -0700124 attempts = 0
Ilja H. Friedelbee84a72016-09-28 15:57:06 -0700125 while not filelock.i_am_locking():
126 try:
Ilja H. Friedeld2410cc2016-10-27 11:38:45 -0700127 attempts += 1
Ilja H. Friedelbee84a72016-09-28 15:57:06 -0700128 logging.info('Waiting for cache lock...')
129 filelock.acquire(random.randint(1, 5))
130 except (lockfile.AlreadyLocked, lockfile.LockTimeout):
Ilja H. Friedeld2410cc2016-10-27 11:38:45 -0700131 if attempts > 1000:
132 # Normally we should aqcuire the lock in a few seconds. Once we
133 # wait on the order of hours either the dev server IO is
134 # overloaded or a lock didn't get cleaned up. Take one for the
135 # team, break the lock and report a failure. This should fix
136 # the lock for following tests. If the failure affects more than
137 # one job look for a deadlock or dev server overload.
138 logging.error('Permanent lock failure. Trying to break lock.')
139 filelock.break_lock()
140 raise error.TestFail('Error: permanent cache lock failure.')
Ilja H. Friedelbee84a72016-09-28 15:57:06 -0700141 else:
Ilja H. Friedeld2410cc2016-10-27 11:38:45 -0700142 logging.info('Acquired cache lock after %d attempts.', attempts)
Ilja H. Friedelbee84a72016-09-28 15:57:06 -0700143 try:
144 yield
145 finally:
146 filelock.release()
147 logging.info('Released cache lock.')
148
149
150class TradefedTest(test.test):
151 """Base class to prepare DUT to run tests via tradefed."""
152 version = 1
153
154 def initialize(self, host=None):
155 """Sets up the tools and binary bundles for the test."""
156 logging.info('Hostname: %s', host.hostname)
157 self._host = host
158 self._install_paths = []
159 # Tests in the lab run within individual lxc container instances.
160 if utils.is_in_container():
161 # Ensure the container has the required packages installed.
162 lxc.install_packages(packages=['unzip', 'default-jre'])
163 cache_root = _TRADEFED_CACHE_CONTAINER
164 else:
165 cache_root = _TRADEFED_CACHE_LOCAL
166 # The content of the cache survives across jobs.
167 self._safe_makedirs(cache_root)
168 self._tradefed_cache = os.path.join(cache_root, 'cache')
169 self._tradefed_cache_lock = os.path.join(cache_root, 'lock')
170 # The content of the install location does not survive across jobs and
171 # is isolated (by using a unique path)_against other autotest instances.
172 # This is not needed for the lab, but if somebody wants to run multiple
173 # TradedefTest instance.
174 self._tradefed_install = tempfile.mkdtemp(prefix=_TRADEFED_PREFIX)
175 # Under lxc the cache is shared between multiple autotest/tradefed
176 # instances. We need to synchronize access to it. All binaries are
177 # installed through the (shared) cache into the local (unshared)
178 # lxc/autotest instance storage.
179 # If clearing the cache it must happen before all downloads.
180 self._clear_download_cache_if_needed()
181 # Set permissions (rwxr-xr-x) to the executable binaries.
182 permission = (stat.S_IRWXU | stat.S_IRGRP | stat.S_IXGRP | stat.S_IROTH
183 | stat.S_IXOTH)
184 self._install_files(_ADB_DIR, _ADB_FILES, permission)
185 self._install_files(_SDK_TOOLS_DIR, _SDK_TOOLS_FILES, permission)
186
187 def cleanup(self):
188 """Cleans up any dirtied state."""
189 # Kill any lingering adb servers.
190 self._run('adb', verbose=True, args=('kill-server',))
191 logging.info('Cleaning up %s.', self._tradefed_install)
192 shutil.rmtree(self._tradefed_install)
193
194 def _login_chrome(self):
195 """Returns Chrome log-in context manager.
196
197 Please see also cheets_CTSHelper for details about how this works.
198 """
199 return _ChromeLogin(self._host)
200
201 def _try_adb_connect(self):
202 """Attempts to connect to adb on the DUT.
203
204 @return boolean indicating if adb connected successfully.
205 """
206 # This may fail return failure due to a race condition in adb connect
207 # (b/29370989). If adb is already connected, this command will
208 # immediately return success.
209 hostport = '{}:{}'.format(self._host.hostname, self._host.port)
210 result = self._run(
211 'adb',
212 args=('connect', hostport),
213 verbose=True,
214 ignore_status=True)
215 logging.info('adb connect {}:\n{}'.format(hostport, result.stdout))
216 if result.exit_status != 0:
217 return False
218
219 result = self._run('adb', args=('devices',))
220 logging.info('adb devices:\n' + result.stdout)
221 if not re.search(
222 r'{}\s+(device|unauthorized)'.format(re.escape(hostport)),
223 result.stdout):
224 return False
225
226 # Actually test the connection with an adb command as there can be
227 # a race between detecting the connected device and actually being
228 # able to run a commmand with authenticated adb.
229 result = self._run('adb', args=('shell', 'exit'), ignore_status=True)
230 return result.exit_status == 0
231
232 def _android_shell(self, command):
233 """Run a command remotely on the device in an android shell
234
235 This function is strictly for internal use only, as commands do not run
236 in a fully consistent Android environment. Prefer adb shell instead.
237 """
238 self._host.run('android-sh -c ' + pipes.quote(command))
239
240 def _write_android_file(self, filename, data):
241 """Writes a file to a location relative to the android container.
242
243 This is an internal function used to bootstrap adb.
244 Tests should use adb push to write files.
245 """
246 android_cmd = 'echo %s > %s' % (pipes.quote(data),
247 pipes.quote(filename))
248 self._android_shell(android_cmd)
249
250 def _connect_adb(self):
251 """Sets up ADB connection to the ARC container."""
252 logging.info('Setting up adb connection.')
253 # Generate and push keys for adb.
254 # TODO(elijahtaylor): Extract this code to arc_common and de-duplicate
255 # code in arc.py on the client side tests.
256 key_path = os.path.join(self.tmpdir, 'test_key')
257 pubkey_path = key_path + '.pub'
258 self._run('adb', verbose=True, args=('keygen', pipes.quote(key_path)))
259 with open(pubkey_path, 'r') as f:
260 self._write_android_file(_ANDROID_ADB_KEYS_PATH, f.read())
261 self._android_shell('restorecon ' + pipes.quote(_ANDROID_ADB_KEYS_PATH))
262 os.environ['ADB_VENDOR_KEYS'] = key_path
263
264 # Kill existing adb server to ensure that the env var is picked up.
265 self._run('adb', verbose=True, args=('kill-server',))
266
267 # This starts adbd.
268 self._android_shell('setprop sys.usb.config mtp,adb')
269
270 # adbd may take some time to come up. Repeatedly try to connect to adb.
271 utils.poll_for_condition(lambda: self._try_adb_connect(),
Ilja H. Friedel6d5ca8f2016-10-26 22:35:36 -0700272 exception=error.TestFail(
273 'Error: Failed to set up adb connection'),
Ilja H. Friedelbee84a72016-09-28 15:57:06 -0700274 timeout=_ADB_READY_TIMEOUT_SECONDS,
275 sleep_interval=_ADB_POLLING_INTERVAL_SECONDS)
276
277 logging.info('Successfully setup adb connection.')
278
279 def _wait_for_arc_boot(self):
280 """Wait until ARC is fully booted.
281
282 Tests for the presence of the intent helper app to determine whether ARC
283 has finished booting.
284 """
285 def intent_helper_running():
286 result = self._run('adb', args=('shell', 'pgrep',
287 'org.chromium.arc.intent_helper'))
288 return bool(result.stdout)
289 utils.poll_for_condition(
290 intent_helper_running,
Ilja H. Friedel6d5ca8f2016-10-26 22:35:36 -0700291 exception=error.TestFail(
292 'Error: Timed out waiting for intent helper.'),
Ilja H. Friedelbee84a72016-09-28 15:57:06 -0700293 timeout=_ARC_READY_TIMEOUT_SECONDS,
294 sleep_interval=_ARC_POLLING_INTERVAL_SECONDS)
295
296 def _disable_adb_install_dialog(self):
297 """Disables a dialog shown on adb install execution.
298
299 By default, on adb install execution, "Allow Google to regularly check
300 device activity ... " dialog is shown. It requires manual user action
301 so that tests are blocked at the point.
302 This method disables it.
303 """
304 logging.info('Disabling the adb install dialog.')
305 result = self._run(
306 'adb',
307 verbose=True,
308 args=(
309 'shell',
310 'settings',
311 'put',
312 'global',
313 'verifier_verify_adb_installs',
314 '0'))
315 logging.info('Disable adb dialog: %s', result.stdout)
316
317 def _ready_arc(self):
318 """Ready ARC and adb for running tests via tradefed."""
319 self._connect_adb()
320 self._disable_adb_install_dialog()
321 self._wait_for_arc_boot()
322
323 def _safe_makedirs(self, path):
324 """Creates a directory at |path| and its ancestors.
325
326 Unlike os.makedirs(), ignore errors even if directories exist.
327 """
328 try:
329 os.makedirs(path)
330 except OSError as e:
331 if not (e.errno == errno.EEXIST and os.path.isdir(path)):
332 raise
333
334 def _unzip(self, filename):
335 """Unzip the file.
336
337 The destination directory name will be the stem of filename.
338 E.g., _unzip('foo/bar/baz.zip') will create directory at
339 'foo/bar/baz', and then will inflate zip's content under the directory.
340 If here is already a directory at the stem, that directory will be used.
341
342 @param filename: Path to the zip archive.
343 @return Path to the inflated directory.
344 """
345 destination = os.path.splitext(filename)[0]
346 if os.path.isdir(destination):
347 return destination
348 self._safe_makedirs(destination)
349 utils.run('unzip', args=('-d', destination, filename))
350 return destination
351
352 def _dir_size(self, directory):
353 """Compute recursive size in bytes of directory."""
354 size = 0
355 for root, _, files in os.walk(directory):
356 size += sum(os.path.getsize(os.path.join(root, name))
357 for name in files)
358 return size
359
360 def _clear_download_cache_if_needed(self):
361 """Invalidates cache to prevent it from growing too large."""
362 # If the cache is large enough to hold a working set, we can simply
363 # delete everything without thrashing.
364 # TODO(ihf): Investigate strategies like LRU.
365 with lock(self._tradefed_cache_lock):
366 size = self._dir_size(self._tradefed_cache)
367 if size > _TRADEFED_CACHE_MAX_SIZE:
368 logging.info('Current cache size=%d got too large. Clearing %s.'
369 , size, self._tradefed_cache)
370 shutil.rmtree(self._tradefed_cache)
371 self._safe_makedirs(self._tradefed_cache)
372 else:
373 logging.info('Current cache size=%d of %s.', size,
374 self._tradefed_cache)
375
376 def _download_to_cache(self, uri):
377 """Downloads the uri from the storage server.
378
379 It always checks the cache for available binaries first and skips
380 download if binaries are already in cache.
381
382 The caller of this function is responsible for holding the cache lock.
383
384 @param uri: The Google Storage or dl.google.com uri.
385 @return Path to the downloaded object, name.
386 """
387 # Split uri into 3 pieces for use by gsutil and also by wget.
388 parsed = urlparse.urlparse(uri)
389 filename = os.path.basename(parsed.path)
390 # We are hashing the uri instead of the binary. This is acceptable, as
391 # the uris are supposed to contain version information and an object is
392 # not supposed to be changed once created.
393 output_dir = os.path.join(self._tradefed_cache,
394 hashlib.md5(uri).hexdigest())
395 output = os.path.join(output_dir, filename)
396 # Check for existence of file.
397 if os.path.exists(output):
398 logging.info('Skipping download of %s, reusing %s.', uri, output)
399 return output
400 self._safe_makedirs(output_dir)
401
402 if parsed.scheme not in ['gs', 'http', 'https']:
Ilja H. Friedel6d5ca8f2016-10-26 22:35:36 -0700403 raise error.TestFail('Error: Unknown download scheme %s' %
404 parsed.scheme)
Ilja H. Friedelbee84a72016-09-28 15:57:06 -0700405 if parsed.scheme in ['http', 'https']:
406 logging.info('Using wget to download %s to %s.', uri, output_dir)
407 # We are downloading 1 file at a time, hence using -O over -P.
408 # We also limit the rate to 20MBytes/s
409 utils.run(
410 'wget',
411 args=(
412 '--report-speed=bits',
413 '--limit-rate=20M',
414 '-O',
415 output,
416 uri),
417 verbose=True)
418 return output
419
420 if not client_utils.is_moblab():
421 # If the machine can access to the storage server directly,
422 # defer to "gsutil" for downloading.
423 logging.info('Host %s not in lab. Downloading %s directly to %s.',
424 self._host.hostname, uri, output)
425 # b/17445576: gsutil rsync of individual files is not implemented.
426 utils.run('gsutil', args=('cp', uri, output), verbose=True)
427 return output
428
429 # We are in the moblab. Because the machine cannot access the storage
430 # server directly, use dev server to proxy.
431 logging.info('Host %s is in lab. Downloading %s by staging to %s.',
432 self._host.hostname, uri, output)
433
434 dirname = os.path.dirname(parsed.path)
435 archive_url = '%s://%s%s' % (parsed.scheme, parsed.netloc, dirname)
436
437 # First, request the devserver to download files into the lab network.
438 # TODO(ihf): Switch stage_artifacts to honor rsync. Then we don't have
439 # to shuffle files inside of tarballs.
440 build = afe_utils.get_build(self._host)
441 ds = dev_server.ImageServer.resolve(build)
442 ds.stage_artifacts(build, files=[filename], archive_url=archive_url)
443
444 # Then download files from the dev server.
445 # TODO(ihf): use rsync instead of wget. Are there 3 machines involved?
446 # Itself, dev_server plus DUT? Or is there just no rsync in moblab?
447 ds_src = '/'.join([ds.url(), 'static', dirname, filename])
448 logging.info('dev_server URL: %s', ds_src)
449 # Calls into DUT to pull uri from dev_server.
450 utils.run(
451 'wget',
452 args=(
453 '--report-speed=bits',
454 '--limit-rate=20M',
455 '-O',
Ilja H. Friedelb83646b2016-10-18 13:02:59 -0700456 output,
457 ds_src),
Ilja H. Friedelbee84a72016-09-28 15:57:06 -0700458 verbose=True)
459 return output
460
461 def _instance_copy(self, cache_path):
462 """Makes a copy of a file from the (shared) cache to a wholy owned
463 local instance. Also copies one level of cache directoy (MD5 named).
464 """
465 filename = os.path.basename(cache_path)
466 dirname = os.path.basename(os.path.dirname(cache_path))
467 instance_dir = os.path.join(self._tradefed_install, dirname)
468 # Make sure destination directory is named the same.
469 self._safe_makedirs(instance_dir)
470 instance_path = os.path.join(instance_dir, filename)
471 shutil.copyfile(cache_path, instance_path)
472 return instance_path
473
474 def _install_bundle(self, gs_uri):
475 """Downloads a zip file, installs it and returns the local path."""
476 if not gs_uri.endswith('.zip'):
Ilja H. Friedel6d5ca8f2016-10-26 22:35:36 -0700477 raise error.TestFail('Error: Not a .zip file %s.', gs_uri)
Ilja H. Friedelbee84a72016-09-28 15:57:06 -0700478 # Atomic write through of file.
479 with lock(self._tradefed_cache_lock):
480 cache_path = self._download_to_cache(gs_uri)
481 local = self._instance_copy(cache_path)
David Haddockb9a362b2016-10-28 16:19:12 -0700482
483 unzipped = self._unzip(local)
484 self._abi = 'x86' if 'x86-x86' in unzipped else 'arm'
485 return unzipped
Ilja H. Friedelbee84a72016-09-28 15:57:06 -0700486
487 def _install_files(self, gs_dir, files, permission):
488 """Installs binary tools."""
489 for filename in files:
490 gs_uri = os.path.join(gs_dir, filename)
491 # Atomic write through of file.
492 with lock(self._tradefed_cache_lock):
493 cache_path = self._download_to_cache(gs_uri)
494 local = self._instance_copy(cache_path)
495 os.chmod(local, permission)
496 # Keep track of PATH.
497 self._install_paths.append(os.path.dirname(local))
498
499 def _run(self, *args, **kwargs):
500 """Executes the given command line.
501
502 To support SDK tools, such as adb or aapt, this adds _install_paths
503 to the extra_paths. Before invoking this, ensure _install_files() has
504 been called.
505 """
506 kwargs['extra_paths'] = (
507 kwargs.get('extra_paths', []) + self._install_paths)
508 return utils.run(*args, **kwargs)
509
510 def _parse_tradefed_datetime(self, result, summary=None):
511 """Get the tradefed provided result ID consisting of a datetime stamp.
512
513 Unfortunately we are unable to tell tradefed where to store the results.
514 In the lab we have multiple instances of tradefed running in parallel
515 writing results and logs to the same base directory. This function
516 finds the identifier which tradefed used during the current run and
517 returns it for further processing of result files.
518
519 @param result: The result object from utils.run.
520 @param summary: Test result summary from runs so far.
521 @return datetime_id: The result ID chosen by tradefed.
522 Example: '2016.07.14_00.34.50'.
523 """
524 # This string is show for both 'run' and 'continue' after all tests.
525 match = re.search(r': XML test result file generated at (\S+). Passed',
526 result.stdout)
527 if not (match and match.group(1)):
528 # TODO(ihf): Find out if we ever recover something interesting in
529 # this case. Otherwise delete it.
530 # Try harder to find the remains. This string shows before all
531 # tests but only with 'run', not 'continue'.
532 logging.warning('XML test result file incomplete?')
533 match = re.search(r': Created result dir (\S+)', result.stdout)
534 if not (match and match.group(1)):
535 error_msg = 'Test did not complete due to Chrome or ARC crash.'
536 if summary:
537 error_msg += (' Test summary from previous runs: %s'
538 % summary)
Ilja H. Friedel6d5ca8f2016-10-26 22:35:36 -0700539 raise error.TestFail(error_msg)
Ilja H. Friedelbee84a72016-09-28 15:57:06 -0700540 datetime_id = match.group(1)
541 logging.info('Tradefed identified results and logs with %s.',
542 datetime_id)
543 return datetime_id
544
Rohit Makasana99116d32016-10-17 19:32:04 -0700545 def _parse_result(self, result, waivers=None):
Ilja H. Friedelbee84a72016-09-28 15:57:06 -0700546 """Check the result from the tradefed output.
547
548 This extracts the test pass/fail/executed list from the output of
549 tradefed. It is up to the caller to handle inconsistencies.
550
551 @param result: The result object from utils.run.
Rohit Makasana99116d32016-10-17 19:32:04 -0700552 @param waivers: a set() of tests which are permitted to fail.
Ilja H. Friedelbee84a72016-09-28 15:57:06 -0700553 """
554 # Parse the stdout to extract test status. In particular step over
555 # similar output for each ABI and just look at the final summary.
556 match = re.search(r'(XML test result file generated at (\S+). '
557 r'Passed (\d+), Failed (\d+), Not Executed (\d+))',
558 result.stdout)
559 if not match:
560 raise error.Test('Test log does not contain a summary.')
561
562 passed = int(match.group(3))
563 failed = int(match.group(4))
564 not_executed = int(match.group(5))
565 match = re.search(r'(Start test run of (\d+) packages, containing '
566 r'(\d+(?:,\d+)?) tests)', result.stdout)
567 if match and match.group(3):
568 tests = int(match.group(3).replace(',', ''))
569 else:
570 # Unfortunately this happens. Assume it made no other mistakes.
571 logging.warning('Tradefed forgot to print number of tests.')
572 tests = passed + failed + not_executed
Rohit Makasana99116d32016-10-17 19:32:04 -0700573 # TODO(rohitbm): make failure parsing more robust by extracting the list
574 # of failing tests instead of searching in the result blob. As well as
575 # only parse for waivers for the running ABI.
576 if waivers:
577 for testname in waivers:
David Haddock16712332016-11-03 14:35:23 -0700578 # TODO(dhaddock): Find a more robust way to apply waivers.
579 fail_count = result.stdout.count(testname + ' FAIL')
580 if fail_count:
581 if fail_count > 2:
582 raise error.TestFail('Error: There are too many '
583 'failures found in the output to '
584 'be valid for applying waivers. '
585 'Please check output.')
586 failed -= fail_count
Rohit Makasana99116d32016-10-17 19:32:04 -0700587 # To maintain total count consistency.
David Haddock16712332016-11-03 14:35:23 -0700588 passed += fail_count
589 logging.info('Waived failure for %s %d time(s)',
590 testname, fail_count)
Rohit Makasana99116d32016-10-17 19:32:04 -0700591 logging.info('tests=%d, passed=%d, failed=%d, not_executed=%d',
592 tests, passed, failed, not_executed)
David Haddock16712332016-11-03 14:35:23 -0700593 if failed < 0:
594 raise error.TestFail('Error: Internal waiver book keeping has '
595 'become inconsistent.')
Ilja H. Friedelbee84a72016-09-28 15:57:06 -0700596 return (tests, passed, failed, not_executed)
597
598 def _collect_logs(self, repository, datetime, destination):
599 """Collects the tradefed logs.
600
601 It is legal to collect the same logs multiple times. This is normal
602 after 'tradefed continue' updates existing logs with new results.
603
604 @param repository: Full path to tradefeds output on disk.
605 @param datetime: The identifier which tradefed assigned to the run.
606 Currently this looks like '2016.07.14_00.34.50'.
607 @param destination: Autotest result directory (destination of logs).
608 """
609 logging.info('Collecting tradefed testResult.xml and logs to %s.',
610 destination)
611 repository_results = os.path.join(repository, 'results')
612 repository_logs = os.path.join(repository, 'logs')
613 # Because other tools rely on the currently chosen Google storage paths
614 # we need to keep destination_results in
615 # cheets_CTS.*/results/android-cts/2016.mm.dd_hh.mm.ss(/|.zip)
616 # and destination_logs in
617 # cheets_CTS.*/results/android-cts/logs/2016.mm.dd_hh.mm.ss/
618 destination_results = destination
Ilja H. Friedelb83646b2016-10-18 13:02:59 -0700619 destination_results_datetime = os.path.join(destination_results,
620 datetime)
Ilja H. Friedelbee84a72016-09-28 15:57:06 -0700621 destination_results_datetime_zip = destination_results_datetime + '.zip'
622 destination_logs = os.path.join(destination, 'logs')
623 destination_logs_datetime = os.path.join(destination_logs, datetime)
624 # We may have collected the same logs before, clean old versions.
625 if os.path.exists(destination_results_datetime_zip):
626 os.remove(destination_results_datetime_zip)
627 if os.path.exists(destination_results_datetime):
628 shutil.rmtree(destination_results_datetime)
629 if os.path.exists(destination_logs_datetime):
630 shutil.rmtree(destination_logs_datetime)
631 shutil.copytree(
632 os.path.join(repository_results, datetime),
633 destination_results_datetime)
634 # Copying the zip file has to happen after the tree so the destination
635 # directory is available.
636 shutil.copy(
637 os.path.join(repository_results, datetime) + '.zip',
638 destination_results_datetime_zip)
639 shutil.copytree(
640 os.path.join(repository_logs, datetime),
641 destination_logs_datetime)
David Haddockb9a362b2016-10-28 16:19:12 -0700642
Rohit Makasana77566902016-11-01 15:34:27 -0700643 def _get_expected_failures(self, directory):
644 """Return a list of expected failures.
David Haddockb9a362b2016-10-28 16:19:12 -0700645
Rohit Makasana77566902016-11-01 15:34:27 -0700646 @return: a list of expected failures.
David Haddockb9a362b2016-10-28 16:19:12 -0700647 """
Rohit Makasana77566902016-11-01 15:34:27 -0700648 logging.info('Loading expected failures from %s.', directory)
649 expected_fail_dir = os.path.join(self.bindir, directory)
David Haddockb9a362b2016-10-28 16:19:12 -0700650 expected_fail_files = glob.glob(expected_fail_dir + '/*.' + self._abi)
Rohit Makasana77566902016-11-01 15:34:27 -0700651 expected_failures = set()
David Haddockb9a362b2016-10-28 16:19:12 -0700652 for expected_fail_file in expected_fail_files:
653 try:
654 file_path = os.path.join(expected_fail_dir, expected_fail_file)
655 with open(file_path) as f:
656 lines = set(f.read().splitlines())
657 logging.info('Loaded %d expected failures from %s',
658 len(lines), expected_fail_file)
Rohit Makasana77566902016-11-01 15:34:27 -0700659 expected_failures |= lines
David Haddockb9a362b2016-10-28 16:19:12 -0700660 except IOError as e:
661 logging.error('Error loading %s (%s).', file_path, e.strerror)
Rohit Makasana77566902016-11-01 15:34:27 -0700662 logging.info('Finished loading expected failures: %s', expected_failures)
663 return expected_failures