blob: 5bef29de9631da35ab552b59eb26e255b675b382 [file] [log] [blame]
Ilja H. Friedelbee84a72016-09-28 15:57:06 -07001# Copyright 2016 The Chromium OS Authors. All rights reserved.
2# Use of this source code is governed by a BSD-style license that can be
3# found in the LICENSE file.
4
5# repohooks/pre-upload.py currently does not run pylint. But for developers who
6# want to check their code manually we disable several harmless pylint warnings
7# which just distract from more serious remaining issues.
8#
9# The instance variables _host and _install_paths are not defined in __init__().
10# pylint: disable=attribute-defined-outside-init
11#
12# Many short variable names don't follow the naming convention.
13# pylint: disable=invalid-name
14#
15# _parse_result() and _dir_size() don't access self and could be functions.
16# pylint: disable=no-self-use
17#
18# _ChromeLogin and _TradefedLogCollector have no public methods.
19# pylint: disable=too-few-public-methods
20
21import contextlib
22import errno
David Haddockb9a362b2016-10-28 16:19:12 -070023import glob
Ilja H. Friedelbee84a72016-09-28 15:57:06 -070024import hashlib
Ilja H. Friedel46863772017-01-25 00:53:44 -080025import lockfile
Ilja H. Friedelbee84a72016-09-28 15:57:06 -070026import logging
27import os
28import pipes
29import random
30import re
31import shutil
32import stat
33import tempfile
34import urlparse
35
36from autotest_lib.client.bin import utils as client_utils
Luis Hector Chavez554c6f82017-01-27 14:21:40 -080037from autotest_lib.client.common_lib import base_utils
Ilja H. Friedelbee84a72016-09-28 15:57:06 -070038from autotest_lib.client.common_lib import error
39from autotest_lib.client.common_lib.cros import dev_server
40from autotest_lib.server import afe_utils
41from autotest_lib.server import autotest
42from autotest_lib.server import test
43from autotest_lib.server import utils
44from autotest_lib.site_utils import lxc
45
Ilja H. Friedelbee84a72016-09-28 15:57:06 -070046
47_SDK_TOOLS_DIR = ('gs://chromeos-arc-images/builds/'
Ilja H. Friedel5c46f522016-12-07 20:24:00 -080048 'git_mnc-dr-arc-dev-linux-static_sdk_tools/3554341')
Ilja H. Friedelbee84a72016-09-28 15:57:06 -070049_SDK_TOOLS_FILES = ['aapt']
50# To stabilize adb behavior, we use dynamically linked adb.
51_ADB_DIR = ('gs://chromeos-arc-images/builds/'
Ilja H. Friedel5c46f522016-12-07 20:24:00 -080052 'git_mnc-dr-arc-dev-linux-cheets_arm-user/3554341')
Ilja H. Friedel94639902017-01-18 00:42:44 -080053# TODO(ihf): Make this the path below as it seems to work locally.
54# 'git_mnc-dr-arc-dev-linux-static_sdk_tools/3554341')
Ilja H. Friedelbee84a72016-09-28 15:57:06 -070055_ADB_FILES = ['adb']
56
57_ADB_POLLING_INTERVAL_SECONDS = 1
58_ADB_READY_TIMEOUT_SECONDS = 60
59_ANDROID_ADB_KEYS_PATH = '/data/misc/adb/adb_keys'
60
61_ARC_POLLING_INTERVAL_SECONDS = 1
62_ARC_READY_TIMEOUT_SECONDS = 60
63
64_TRADEFED_PREFIX = 'autotest-tradefed-install_'
65_TRADEFED_CACHE_LOCAL = '/tmp/autotest-tradefed-cache'
66_TRADEFED_CACHE_CONTAINER = '/usr/local/autotest/results/shared/cache'
67_TRADEFED_CACHE_CONTAINER_LOCK = '/usr/local/autotest/results/shared/lock'
68
69# According to dshi a drone has 500GB of disk space. It is ok for now to use
70# 10GB of disk space, as no more than 10 tests should run in parallel.
71# TODO(ihf): Investigate tighter cache size.
72_TRADEFED_CACHE_MAX_SIZE = (10 * 1024 * 1024 * 1024)
73
74
75class _ChromeLogin(object):
76 """Context manager to handle Chrome login state."""
77
78 def __init__(self, host):
79 self._host = host
80
81 def __enter__(self):
82 """Logs in to the Chrome."""
83 logging.info('Ensure Android is running...')
84 autotest.Autotest(self._host).run_test('cheets_CTSHelper',
85 check_client_result=True)
86
87 def __exit__(self, exc_type, exc_value, traceback):
88 """On exit, to wipe out all the login state, reboot the machine.
89
90 @param exc_type: Exception type if an exception is raised from the
91 with-block.
92 @param exc_value: Exception instance if an exception is raised from
93 the with-block.
94 @param traceback: Stack trace info if an exception is raised from
95 the with-block.
96 @return None, indicating not to ignore an exception from the with-block
97 if raised.
98 """
99 logging.info('Rebooting...')
100 try:
101 self._host.reboot()
102 except Exception:
103 if exc_type is None:
104 raise
105 # If an exception is raise from the with-block, just record the
106 # exception for the rebooting to avoid ignoring the original
107 # exception.
108 logging.exception('Rebooting failed.')
109
110
111@contextlib.contextmanager
112def lock(filename):
113 """Prevents other autotest/tradefed instances from accessing cache."""
114 filelock = lockfile.FileLock(filename)
115 # It is tempting just to call filelock.acquire(3600). But the implementation
116 # has very poor temporal granularity (timeout/10), which is unsuitable for
117 # our needs. See /usr/lib64/python2.7/site-packages/lockfile/
Ilja H. Friedeld2410cc2016-10-27 11:38:45 -0700118 attempts = 0
Ilja H. Friedelbee84a72016-09-28 15:57:06 -0700119 while not filelock.i_am_locking():
120 try:
Ilja H. Friedeld2410cc2016-10-27 11:38:45 -0700121 attempts += 1
Ilja H. Friedelbee84a72016-09-28 15:57:06 -0700122 logging.info('Waiting for cache lock...')
123 filelock.acquire(random.randint(1, 5))
124 except (lockfile.AlreadyLocked, lockfile.LockTimeout):
Ilja H. Friedeld2410cc2016-10-27 11:38:45 -0700125 if attempts > 1000:
126 # Normally we should aqcuire the lock in a few seconds. Once we
127 # wait on the order of hours either the dev server IO is
128 # overloaded or a lock didn't get cleaned up. Take one for the
129 # team, break the lock and report a failure. This should fix
130 # the lock for following tests. If the failure affects more than
131 # one job look for a deadlock or dev server overload.
132 logging.error('Permanent lock failure. Trying to break lock.')
133 filelock.break_lock()
134 raise error.TestFail('Error: permanent cache lock failure.')
Ilja H. Friedelbee84a72016-09-28 15:57:06 -0700135 else:
Ilja H. Friedeld2410cc2016-10-27 11:38:45 -0700136 logging.info('Acquired cache lock after %d attempts.', attempts)
Ilja H. Friedelbee84a72016-09-28 15:57:06 -0700137 try:
138 yield
139 finally:
140 filelock.release()
141 logging.info('Released cache lock.')
142
143
Luis Hector Chavez554c6f82017-01-27 14:21:40 -0800144@contextlib.contextmanager
145def adb_keepalive(target, extra_paths):
146 """A context manager that keeps the adb connection alive.
147
148 AdbKeepalive will spin off a new process that will continuously poll for
149 adb's connected state, and will attempt to reconnect if it ever goes down.
150 This is the only way we can currently recover safely from (intentional)
151 reboots.
152
153 @param target: the hostname and port of the DUT.
154 @param extra_paths: any additional components to the PATH environment
155 variable.
156 """
157 from autotest_lib.client.common_lib.cros import adb_keepalive as module
158 # |__file__| returns the absolute path of the compiled bytecode of the
159 # module. We want to run the original .py file, so we need to change the
160 # extension back.
161 script_filename = module.__file__.replace('.pyc', '.py')
162 job = base_utils.BgJob([script_filename, target],
163 nickname='adb_keepalive', stderr_level=logging.DEBUG,
164 stdout_tee=base_utils.TEE_TO_LOGS,
165 stderr_tee=base_utils.TEE_TO_LOGS,
166 extra_paths=extra_paths)
167
168 try:
169 yield
170 finally:
171 # The adb_keepalive.py script runs forever until SIGTERM is sent.
172 base_utils.nuke_subprocess(job.sp)
173 base_utils.join_bg_jobs([job])
174
175
Ilja H. Friedelbee84a72016-09-28 15:57:06 -0700176class TradefedTest(test.test):
177 """Base class to prepare DUT to run tests via tradefed."""
178 version = 1
179
180 def initialize(self, host=None):
181 """Sets up the tools and binary bundles for the test."""
182 logging.info('Hostname: %s', host.hostname)
183 self._host = host
184 self._install_paths = []
185 # Tests in the lab run within individual lxc container instances.
186 if utils.is_in_container():
Ilja H. Friedelbee84a72016-09-28 15:57:06 -0700187 cache_root = _TRADEFED_CACHE_CONTAINER
188 else:
189 cache_root = _TRADEFED_CACHE_LOCAL
Ilja H. Friedel94639902017-01-18 00:42:44 -0800190 # Quick sanity check and spew of java version installed on the server.
191 utils.run('java', args=('-version',), ignore_status=False, verbose=True,
192 stdout_tee=utils.TEE_TO_LOGS, stderr_tee=utils.TEE_TO_LOGS)
Ilja H. Friedelbee84a72016-09-28 15:57:06 -0700193 # The content of the cache survives across jobs.
194 self._safe_makedirs(cache_root)
195 self._tradefed_cache = os.path.join(cache_root, 'cache')
196 self._tradefed_cache_lock = os.path.join(cache_root, 'lock')
197 # The content of the install location does not survive across jobs and
198 # is isolated (by using a unique path)_against other autotest instances.
199 # This is not needed for the lab, but if somebody wants to run multiple
200 # TradedefTest instance.
201 self._tradefed_install = tempfile.mkdtemp(prefix=_TRADEFED_PREFIX)
202 # Under lxc the cache is shared between multiple autotest/tradefed
203 # instances. We need to synchronize access to it. All binaries are
204 # installed through the (shared) cache into the local (unshared)
205 # lxc/autotest instance storage.
206 # If clearing the cache it must happen before all downloads.
207 self._clear_download_cache_if_needed()
208 # Set permissions (rwxr-xr-x) to the executable binaries.
209 permission = (stat.S_IRWXU | stat.S_IRGRP | stat.S_IXGRP | stat.S_IROTH
210 | stat.S_IXOTH)
211 self._install_files(_ADB_DIR, _ADB_FILES, permission)
212 self._install_files(_SDK_TOOLS_DIR, _SDK_TOOLS_FILES, permission)
213
214 def cleanup(self):
215 """Cleans up any dirtied state."""
216 # Kill any lingering adb servers.
217 self._run('adb', verbose=True, args=('kill-server',))
218 logging.info('Cleaning up %s.', self._tradefed_install)
219 shutil.rmtree(self._tradefed_install)
220
221 def _login_chrome(self):
222 """Returns Chrome log-in context manager.
223
224 Please see also cheets_CTSHelper for details about how this works.
225 """
226 return _ChromeLogin(self._host)
227
Luis Hector Chavez554c6f82017-01-27 14:21:40 -0800228 def _get_adb_target(self):
229 return '{}:{}'.format(self._host.hostname, self._host.port)
230
Ilja H. Friedelbee84a72016-09-28 15:57:06 -0700231 def _try_adb_connect(self):
232 """Attempts to connect to adb on the DUT.
233
234 @return boolean indicating if adb connected successfully.
235 """
236 # This may fail return failure due to a race condition in adb connect
237 # (b/29370989). If adb is already connected, this command will
238 # immediately return success.
Luis Hector Chavez554c6f82017-01-27 14:21:40 -0800239 hostport = self._get_adb_target()
Ilja H. Friedelbee84a72016-09-28 15:57:06 -0700240 result = self._run(
241 'adb',
242 args=('connect', hostport),
243 verbose=True,
244 ignore_status=True)
245 logging.info('adb connect {}:\n{}'.format(hostport, result.stdout))
246 if result.exit_status != 0:
247 return False
248
249 result = self._run('adb', args=('devices',))
250 logging.info('adb devices:\n' + result.stdout)
251 if not re.search(
252 r'{}\s+(device|unauthorized)'.format(re.escape(hostport)),
253 result.stdout):
254 return False
255
256 # Actually test the connection with an adb command as there can be
257 # a race between detecting the connected device and actually being
258 # able to run a commmand with authenticated adb.
259 result = self._run('adb', args=('shell', 'exit'), ignore_status=True)
260 return result.exit_status == 0
261
262 def _android_shell(self, command):
263 """Run a command remotely on the device in an android shell
264
265 This function is strictly for internal use only, as commands do not run
266 in a fully consistent Android environment. Prefer adb shell instead.
267 """
268 self._host.run('android-sh -c ' + pipes.quote(command))
269
270 def _write_android_file(self, filename, data):
271 """Writes a file to a location relative to the android container.
272
273 This is an internal function used to bootstrap adb.
274 Tests should use adb push to write files.
275 """
276 android_cmd = 'echo %s > %s' % (pipes.quote(data),
277 pipes.quote(filename))
278 self._android_shell(android_cmd)
279
280 def _connect_adb(self):
281 """Sets up ADB connection to the ARC container."""
282 logging.info('Setting up adb connection.')
283 # Generate and push keys for adb.
284 # TODO(elijahtaylor): Extract this code to arc_common and de-duplicate
285 # code in arc.py on the client side tests.
286 key_path = os.path.join(self.tmpdir, 'test_key')
287 pubkey_path = key_path + '.pub'
288 self._run('adb', verbose=True, args=('keygen', pipes.quote(key_path)))
289 with open(pubkey_path, 'r') as f:
290 self._write_android_file(_ANDROID_ADB_KEYS_PATH, f.read())
291 self._android_shell('restorecon ' + pipes.quote(_ANDROID_ADB_KEYS_PATH))
292 os.environ['ADB_VENDOR_KEYS'] = key_path
293
294 # Kill existing adb server to ensure that the env var is picked up.
295 self._run('adb', verbose=True, args=('kill-server',))
296
297 # This starts adbd.
298 self._android_shell('setprop sys.usb.config mtp,adb')
299
Luis Hector Chavez554c6f82017-01-27 14:21:40 -0800300 # Also let it be automatically started upon reboot.
301 self._android_shell('setprop persist.sys.usb.config mtp,adb')
302
Ilja H. Friedelbee84a72016-09-28 15:57:06 -0700303 # adbd may take some time to come up. Repeatedly try to connect to adb.
304 utils.poll_for_condition(lambda: self._try_adb_connect(),
Ilja H. Friedel6d5ca8f2016-10-26 22:35:36 -0700305 exception=error.TestFail(
306 'Error: Failed to set up adb connection'),
Ilja H. Friedelbee84a72016-09-28 15:57:06 -0700307 timeout=_ADB_READY_TIMEOUT_SECONDS,
308 sleep_interval=_ADB_POLLING_INTERVAL_SECONDS)
309
310 logging.info('Successfully setup adb connection.')
311
312 def _wait_for_arc_boot(self):
313 """Wait until ARC is fully booted.
314
315 Tests for the presence of the intent helper app to determine whether ARC
316 has finished booting.
317 """
318 def intent_helper_running():
Kazuhiro Inabaf2c47052017-01-26 09:18:51 +0900319 result = self._run('adb', args=('shell', 'pgrep', '-f',
Ilja H. Friedelbee84a72016-09-28 15:57:06 -0700320 'org.chromium.arc.intent_helper'))
321 return bool(result.stdout)
322 utils.poll_for_condition(
323 intent_helper_running,
Ilja H. Friedel6d5ca8f2016-10-26 22:35:36 -0700324 exception=error.TestFail(
325 'Error: Timed out waiting for intent helper.'),
Ilja H. Friedelbee84a72016-09-28 15:57:06 -0700326 timeout=_ARC_READY_TIMEOUT_SECONDS,
327 sleep_interval=_ARC_POLLING_INTERVAL_SECONDS)
328
329 def _disable_adb_install_dialog(self):
330 """Disables a dialog shown on adb install execution.
331
332 By default, on adb install execution, "Allow Google to regularly check
333 device activity ... " dialog is shown. It requires manual user action
334 so that tests are blocked at the point.
335 This method disables it.
336 """
337 logging.info('Disabling the adb install dialog.')
338 result = self._run(
339 'adb',
340 verbose=True,
341 args=(
342 'shell',
343 'settings',
344 'put',
345 'global',
346 'verifier_verify_adb_installs',
347 '0'))
348 logging.info('Disable adb dialog: %s', result.stdout)
349
350 def _ready_arc(self):
351 """Ready ARC and adb for running tests via tradefed."""
352 self._connect_adb()
353 self._disable_adb_install_dialog()
354 self._wait_for_arc_boot()
355
356 def _safe_makedirs(self, path):
357 """Creates a directory at |path| and its ancestors.
358
359 Unlike os.makedirs(), ignore errors even if directories exist.
360 """
361 try:
362 os.makedirs(path)
363 except OSError as e:
364 if not (e.errno == errno.EEXIST and os.path.isdir(path)):
365 raise
366
367 def _unzip(self, filename):
368 """Unzip the file.
369
370 The destination directory name will be the stem of filename.
371 E.g., _unzip('foo/bar/baz.zip') will create directory at
372 'foo/bar/baz', and then will inflate zip's content under the directory.
373 If here is already a directory at the stem, that directory will be used.
374
375 @param filename: Path to the zip archive.
376 @return Path to the inflated directory.
377 """
378 destination = os.path.splitext(filename)[0]
379 if os.path.isdir(destination):
380 return destination
381 self._safe_makedirs(destination)
382 utils.run('unzip', args=('-d', destination, filename))
383 return destination
384
385 def _dir_size(self, directory):
386 """Compute recursive size in bytes of directory."""
387 size = 0
388 for root, _, files in os.walk(directory):
389 size += sum(os.path.getsize(os.path.join(root, name))
390 for name in files)
391 return size
392
393 def _clear_download_cache_if_needed(self):
394 """Invalidates cache to prevent it from growing too large."""
395 # If the cache is large enough to hold a working set, we can simply
396 # delete everything without thrashing.
397 # TODO(ihf): Investigate strategies like LRU.
398 with lock(self._tradefed_cache_lock):
399 size = self._dir_size(self._tradefed_cache)
400 if size > _TRADEFED_CACHE_MAX_SIZE:
401 logging.info('Current cache size=%d got too large. Clearing %s.'
402 , size, self._tradefed_cache)
403 shutil.rmtree(self._tradefed_cache)
404 self._safe_makedirs(self._tradefed_cache)
405 else:
406 logging.info('Current cache size=%d of %s.', size,
407 self._tradefed_cache)
408
409 def _download_to_cache(self, uri):
410 """Downloads the uri from the storage server.
411
412 It always checks the cache for available binaries first and skips
413 download if binaries are already in cache.
414
415 The caller of this function is responsible for holding the cache lock.
416
417 @param uri: The Google Storage or dl.google.com uri.
418 @return Path to the downloaded object, name.
419 """
420 # Split uri into 3 pieces for use by gsutil and also by wget.
421 parsed = urlparse.urlparse(uri)
422 filename = os.path.basename(parsed.path)
423 # We are hashing the uri instead of the binary. This is acceptable, as
424 # the uris are supposed to contain version information and an object is
425 # not supposed to be changed once created.
426 output_dir = os.path.join(self._tradefed_cache,
427 hashlib.md5(uri).hexdigest())
428 output = os.path.join(output_dir, filename)
429 # Check for existence of file.
430 if os.path.exists(output):
431 logging.info('Skipping download of %s, reusing %s.', uri, output)
432 return output
433 self._safe_makedirs(output_dir)
434
435 if parsed.scheme not in ['gs', 'http', 'https']:
Ilja H. Friedel6d5ca8f2016-10-26 22:35:36 -0700436 raise error.TestFail('Error: Unknown download scheme %s' %
437 parsed.scheme)
Ilja H. Friedelbee84a72016-09-28 15:57:06 -0700438 if parsed.scheme in ['http', 'https']:
439 logging.info('Using wget to download %s to %s.', uri, output_dir)
440 # We are downloading 1 file at a time, hence using -O over -P.
441 # We also limit the rate to 20MBytes/s
442 utils.run(
443 'wget',
444 args=(
445 '--report-speed=bits',
446 '--limit-rate=20M',
447 '-O',
448 output,
449 uri),
450 verbose=True)
451 return output
452
453 if not client_utils.is_moblab():
454 # If the machine can access to the storage server directly,
455 # defer to "gsutil" for downloading.
456 logging.info('Host %s not in lab. Downloading %s directly to %s.',
457 self._host.hostname, uri, output)
458 # b/17445576: gsutil rsync of individual files is not implemented.
459 utils.run('gsutil', args=('cp', uri, output), verbose=True)
460 return output
461
462 # We are in the moblab. Because the machine cannot access the storage
463 # server directly, use dev server to proxy.
464 logging.info('Host %s is in lab. Downloading %s by staging to %s.',
465 self._host.hostname, uri, output)
466
467 dirname = os.path.dirname(parsed.path)
468 archive_url = '%s://%s%s' % (parsed.scheme, parsed.netloc, dirname)
469
470 # First, request the devserver to download files into the lab network.
471 # TODO(ihf): Switch stage_artifacts to honor rsync. Then we don't have
472 # to shuffle files inside of tarballs.
473 build = afe_utils.get_build(self._host)
474 ds = dev_server.ImageServer.resolve(build)
475 ds.stage_artifacts(build, files=[filename], archive_url=archive_url)
476
477 # Then download files from the dev server.
478 # TODO(ihf): use rsync instead of wget. Are there 3 machines involved?
479 # Itself, dev_server plus DUT? Or is there just no rsync in moblab?
480 ds_src = '/'.join([ds.url(), 'static', dirname, filename])
481 logging.info('dev_server URL: %s', ds_src)
482 # Calls into DUT to pull uri from dev_server.
483 utils.run(
484 'wget',
485 args=(
486 '--report-speed=bits',
487 '--limit-rate=20M',
488 '-O',
Ilja H. Friedelb83646b2016-10-18 13:02:59 -0700489 output,
490 ds_src),
Ilja H. Friedelbee84a72016-09-28 15:57:06 -0700491 verbose=True)
492 return output
493
494 def _instance_copy(self, cache_path):
495 """Makes a copy of a file from the (shared) cache to a wholy owned
496 local instance. Also copies one level of cache directoy (MD5 named).
497 """
498 filename = os.path.basename(cache_path)
499 dirname = os.path.basename(os.path.dirname(cache_path))
500 instance_dir = os.path.join(self._tradefed_install, dirname)
501 # Make sure destination directory is named the same.
502 self._safe_makedirs(instance_dir)
503 instance_path = os.path.join(instance_dir, filename)
504 shutil.copyfile(cache_path, instance_path)
505 return instance_path
506
507 def _install_bundle(self, gs_uri):
508 """Downloads a zip file, installs it and returns the local path."""
509 if not gs_uri.endswith('.zip'):
Ilja H. Friedel6d5ca8f2016-10-26 22:35:36 -0700510 raise error.TestFail('Error: Not a .zip file %s.', gs_uri)
Ilja H. Friedelbee84a72016-09-28 15:57:06 -0700511 # Atomic write through of file.
512 with lock(self._tradefed_cache_lock):
513 cache_path = self._download_to_cache(gs_uri)
514 local = self._instance_copy(cache_path)
David Haddockb9a362b2016-10-28 16:19:12 -0700515
516 unzipped = self._unzip(local)
517 self._abi = 'x86' if 'x86-x86' in unzipped else 'arm'
518 return unzipped
Ilja H. Friedelbee84a72016-09-28 15:57:06 -0700519
520 def _install_files(self, gs_dir, files, permission):
521 """Installs binary tools."""
522 for filename in files:
523 gs_uri = os.path.join(gs_dir, filename)
524 # Atomic write through of file.
525 with lock(self._tradefed_cache_lock):
526 cache_path = self._download_to_cache(gs_uri)
527 local = self._instance_copy(cache_path)
528 os.chmod(local, permission)
529 # Keep track of PATH.
530 self._install_paths.append(os.path.dirname(local))
531
532 def _run(self, *args, **kwargs):
533 """Executes the given command line.
534
535 To support SDK tools, such as adb or aapt, this adds _install_paths
536 to the extra_paths. Before invoking this, ensure _install_files() has
537 been called.
538 """
539 kwargs['extra_paths'] = (
540 kwargs.get('extra_paths', []) + self._install_paths)
541 return utils.run(*args, **kwargs)
542
Kazuhiro Inabaeb6b6332017-01-28 01:10:16 +0900543 def _collect_tradefed_global_log(self, result, destination):
544 """Collects the tradefed global log.
545
546 @param result: The result object from utils.run.
547 @param destination: Autotest result directory (destination of logs).
548 """
549 match = re.search(r'Saved log to /tmp/(tradefed_global_log_.*\.txt)',
550 result.stdout)
551 if not match:
552 logging.error('no tradefed_global_log file is found')
553 return
554
555 name = match.group(1)
556 dest = os.path.join(destination, 'logs', 'tmp')
557 self._safe_makedirs(dest)
558 shutil.copy(os.path.join('/tmp', name), os.path.join(dest, name))
559
Ilja H. Friedelbee84a72016-09-28 15:57:06 -0700560 def _parse_tradefed_datetime(self, result, summary=None):
561 """Get the tradefed provided result ID consisting of a datetime stamp.
562
563 Unfortunately we are unable to tell tradefed where to store the results.
564 In the lab we have multiple instances of tradefed running in parallel
565 writing results and logs to the same base directory. This function
566 finds the identifier which tradefed used during the current run and
567 returns it for further processing of result files.
568
569 @param result: The result object from utils.run.
570 @param summary: Test result summary from runs so far.
571 @return datetime_id: The result ID chosen by tradefed.
572 Example: '2016.07.14_00.34.50'.
573 """
574 # This string is show for both 'run' and 'continue' after all tests.
575 match = re.search(r': XML test result file generated at (\S+). Passed',
576 result.stdout)
577 if not (match and match.group(1)):
578 # TODO(ihf): Find out if we ever recover something interesting in
579 # this case. Otherwise delete it.
580 # Try harder to find the remains. This string shows before all
581 # tests but only with 'run', not 'continue'.
582 logging.warning('XML test result file incomplete?')
583 match = re.search(r': Created result dir (\S+)', result.stdout)
584 if not (match and match.group(1)):
585 error_msg = 'Test did not complete due to Chrome or ARC crash.'
586 if summary:
587 error_msg += (' Test summary from previous runs: %s'
588 % summary)
Ilja H. Friedel6d5ca8f2016-10-26 22:35:36 -0700589 raise error.TestFail(error_msg)
Ilja H. Friedelbee84a72016-09-28 15:57:06 -0700590 datetime_id = match.group(1)
591 logging.info('Tradefed identified results and logs with %s.',
592 datetime_id)
593 return datetime_id
594
Rohit Makasana99116d32016-10-17 19:32:04 -0700595 def _parse_result(self, result, waivers=None):
Ilja H. Friedelbee84a72016-09-28 15:57:06 -0700596 """Check the result from the tradefed output.
597
598 This extracts the test pass/fail/executed list from the output of
599 tradefed. It is up to the caller to handle inconsistencies.
600
601 @param result: The result object from utils.run.
Rohit Makasana99116d32016-10-17 19:32:04 -0700602 @param waivers: a set() of tests which are permitted to fail.
Ilja H. Friedelbee84a72016-09-28 15:57:06 -0700603 """
604 # Parse the stdout to extract test status. In particular step over
605 # similar output for each ABI and just look at the final summary.
606 match = re.search(r'(XML test result file generated at (\S+). '
607 r'Passed (\d+), Failed (\d+), Not Executed (\d+))',
608 result.stdout)
609 if not match:
610 raise error.Test('Test log does not contain a summary.')
611
612 passed = int(match.group(3))
613 failed = int(match.group(4))
614 not_executed = int(match.group(5))
615 match = re.search(r'(Start test run of (\d+) packages, containing '
616 r'(\d+(?:,\d+)?) tests)', result.stdout)
617 if match and match.group(3):
618 tests = int(match.group(3).replace(',', ''))
619 else:
620 # Unfortunately this happens. Assume it made no other mistakes.
621 logging.warning('Tradefed forgot to print number of tests.')
622 tests = passed + failed + not_executed
Rohit Makasana99116d32016-10-17 19:32:04 -0700623 # TODO(rohitbm): make failure parsing more robust by extracting the list
624 # of failing tests instead of searching in the result blob. As well as
625 # only parse for waivers for the running ABI.
626 if waivers:
627 for testname in waivers:
David Haddock16712332016-11-03 14:35:23 -0700628 # TODO(dhaddock): Find a more robust way to apply waivers.
629 fail_count = result.stdout.count(testname + ' FAIL')
630 if fail_count:
631 if fail_count > 2:
632 raise error.TestFail('Error: There are too many '
633 'failures found in the output to '
634 'be valid for applying waivers. '
635 'Please check output.')
636 failed -= fail_count
Rohit Makasana99116d32016-10-17 19:32:04 -0700637 # To maintain total count consistency.
David Haddock16712332016-11-03 14:35:23 -0700638 passed += fail_count
639 logging.info('Waived failure for %s %d time(s)',
640 testname, fail_count)
Rohit Makasana99116d32016-10-17 19:32:04 -0700641 logging.info('tests=%d, passed=%d, failed=%d, not_executed=%d',
642 tests, passed, failed, not_executed)
David Haddock16712332016-11-03 14:35:23 -0700643 if failed < 0:
644 raise error.TestFail('Error: Internal waiver book keeping has '
645 'become inconsistent.')
Ilja H. Friedelbee84a72016-09-28 15:57:06 -0700646 return (tests, passed, failed, not_executed)
647
648 def _collect_logs(self, repository, datetime, destination):
649 """Collects the tradefed logs.
650
651 It is legal to collect the same logs multiple times. This is normal
652 after 'tradefed continue' updates existing logs with new results.
653
654 @param repository: Full path to tradefeds output on disk.
655 @param datetime: The identifier which tradefed assigned to the run.
656 Currently this looks like '2016.07.14_00.34.50'.
657 @param destination: Autotest result directory (destination of logs).
658 """
659 logging.info('Collecting tradefed testResult.xml and logs to %s.',
660 destination)
661 repository_results = os.path.join(repository, 'results')
662 repository_logs = os.path.join(repository, 'logs')
663 # Because other tools rely on the currently chosen Google storage paths
664 # we need to keep destination_results in
665 # cheets_CTS.*/results/android-cts/2016.mm.dd_hh.mm.ss(/|.zip)
666 # and destination_logs in
667 # cheets_CTS.*/results/android-cts/logs/2016.mm.dd_hh.mm.ss/
668 destination_results = destination
Ilja H. Friedelb83646b2016-10-18 13:02:59 -0700669 destination_results_datetime = os.path.join(destination_results,
670 datetime)
Ilja H. Friedelbee84a72016-09-28 15:57:06 -0700671 destination_results_datetime_zip = destination_results_datetime + '.zip'
672 destination_logs = os.path.join(destination, 'logs')
673 destination_logs_datetime = os.path.join(destination_logs, datetime)
674 # We may have collected the same logs before, clean old versions.
675 if os.path.exists(destination_results_datetime_zip):
676 os.remove(destination_results_datetime_zip)
677 if os.path.exists(destination_results_datetime):
678 shutil.rmtree(destination_results_datetime)
679 if os.path.exists(destination_logs_datetime):
680 shutil.rmtree(destination_logs_datetime)
681 shutil.copytree(
682 os.path.join(repository_results, datetime),
683 destination_results_datetime)
684 # Copying the zip file has to happen after the tree so the destination
685 # directory is available.
686 shutil.copy(
687 os.path.join(repository_results, datetime) + '.zip',
688 destination_results_datetime_zip)
689 shutil.copytree(
690 os.path.join(repository_logs, datetime),
691 destination_logs_datetime)
David Haddockb9a362b2016-10-28 16:19:12 -0700692
Rohit Makasana77566902016-11-01 15:34:27 -0700693 def _get_expected_failures(self, directory):
694 """Return a list of expected failures.
David Haddockb9a362b2016-10-28 16:19:12 -0700695
Rohit Makasana77566902016-11-01 15:34:27 -0700696 @return: a list of expected failures.
David Haddockb9a362b2016-10-28 16:19:12 -0700697 """
Rohit Makasana77566902016-11-01 15:34:27 -0700698 logging.info('Loading expected failures from %s.', directory)
699 expected_fail_dir = os.path.join(self.bindir, directory)
David Haddockb9a362b2016-10-28 16:19:12 -0700700 expected_fail_files = glob.glob(expected_fail_dir + '/*.' + self._abi)
Rohit Makasana77566902016-11-01 15:34:27 -0700701 expected_failures = set()
David Haddockb9a362b2016-10-28 16:19:12 -0700702 for expected_fail_file in expected_fail_files:
703 try:
704 file_path = os.path.join(expected_fail_dir, expected_fail_file)
705 with open(file_path) as f:
706 lines = set(f.read().splitlines())
707 logging.info('Loaded %d expected failures from %s',
708 len(lines), expected_fail_file)
Rohit Makasana77566902016-11-01 15:34:27 -0700709 expected_failures |= lines
David Haddockb9a362b2016-10-28 16:19:12 -0700710 except IOError as e:
711 logging.error('Error loading %s (%s).', file_path, e.strerror)
Rohit Makasana77566902016-11-01 15:34:27 -0700712 logging.info('Finished loading expected failures: %s', expected_failures)
713 return expected_failures