blob: 0c4c13ad207c09a7d0a7869a74ba4da8a04089c9 [file] [log] [blame]
Ilja H. Friedelbee84a72016-09-28 15:57:06 -07001# Copyright 2016 The Chromium OS Authors. All rights reserved.
2# Use of this source code is governed by a BSD-style license that can be
3# found in the LICENSE file.
4
5# repohooks/pre-upload.py currently does not run pylint. But for developers who
6# want to check their code manually we disable several harmless pylint warnings
7# which just distract from more serious remaining issues.
8#
9# The instance variables _host and _install_paths are not defined in __init__().
10# pylint: disable=attribute-defined-outside-init
11#
12# Many short variable names don't follow the naming convention.
13# pylint: disable=invalid-name
14#
15# _parse_result() and _dir_size() don't access self and could be functions.
16# pylint: disable=no-self-use
17#
18# _ChromeLogin and _TradefedLogCollector have no public methods.
19# pylint: disable=too-few-public-methods
20
21import contextlib
22import errno
David Haddockb9a362b2016-10-28 16:19:12 -070023import glob
Ilja H. Friedelbee84a72016-09-28 15:57:06 -070024import hashlib
Ilja H. Friedel46863772017-01-25 00:53:44 -080025import lockfile
Ilja H. Friedelbee84a72016-09-28 15:57:06 -070026import logging
27import os
28import pipes
29import random
30import re
31import shutil
32import stat
33import tempfile
34import urlparse
35
36from autotest_lib.client.bin import utils as client_utils
Luis Hector Chavez554c6f82017-01-27 14:21:40 -080037from autotest_lib.client.common_lib import base_utils
Ilja H. Friedelbee84a72016-09-28 15:57:06 -070038from autotest_lib.client.common_lib import error
39from autotest_lib.client.common_lib.cros import dev_server
Ilja H. Friedelbee84a72016-09-28 15:57:06 -070040from autotest_lib.server import autotest
41from autotest_lib.server import test
42from autotest_lib.server import utils
43from autotest_lib.site_utils import lxc
44
Ilja H. Friedelbee84a72016-09-28 15:57:06 -070045
46_SDK_TOOLS_DIR = ('gs://chromeos-arc-images/builds/'
Ilja H. Friedel5c46f522016-12-07 20:24:00 -080047 'git_mnc-dr-arc-dev-linux-static_sdk_tools/3554341')
Ilja H. Friedelbee84a72016-09-28 15:57:06 -070048_SDK_TOOLS_FILES = ['aapt']
49# To stabilize adb behavior, we use dynamically linked adb.
50_ADB_DIR = ('gs://chromeos-arc-images/builds/'
Ilja H. Friedel5c46f522016-12-07 20:24:00 -080051 'git_mnc-dr-arc-dev-linux-cheets_arm-user/3554341')
Ilja H. Friedel94639902017-01-18 00:42:44 -080052# TODO(ihf): Make this the path below as it seems to work locally.
53# 'git_mnc-dr-arc-dev-linux-static_sdk_tools/3554341')
Ilja H. Friedelbee84a72016-09-28 15:57:06 -070054_ADB_FILES = ['adb']
55
56_ADB_POLLING_INTERVAL_SECONDS = 1
57_ADB_READY_TIMEOUT_SECONDS = 60
58_ANDROID_ADB_KEYS_PATH = '/data/misc/adb/adb_keys'
59
60_ARC_POLLING_INTERVAL_SECONDS = 1
61_ARC_READY_TIMEOUT_SECONDS = 60
62
63_TRADEFED_PREFIX = 'autotest-tradefed-install_'
64_TRADEFED_CACHE_LOCAL = '/tmp/autotest-tradefed-cache'
65_TRADEFED_CACHE_CONTAINER = '/usr/local/autotest/results/shared/cache'
66_TRADEFED_CACHE_CONTAINER_LOCK = '/usr/local/autotest/results/shared/lock'
67
68# According to dshi a drone has 500GB of disk space. It is ok for now to use
69# 10GB of disk space, as no more than 10 tests should run in parallel.
70# TODO(ihf): Investigate tighter cache size.
71_TRADEFED_CACHE_MAX_SIZE = (10 * 1024 * 1024 * 1024)
72
73
74class _ChromeLogin(object):
75 """Context manager to handle Chrome login state."""
76
77 def __init__(self, host):
78 self._host = host
79
80 def __enter__(self):
81 """Logs in to the Chrome."""
82 logging.info('Ensure Android is running...')
83 autotest.Autotest(self._host).run_test('cheets_CTSHelper',
84 check_client_result=True)
85
86 def __exit__(self, exc_type, exc_value, traceback):
87 """On exit, to wipe out all the login state, reboot the machine.
88
89 @param exc_type: Exception type if an exception is raised from the
90 with-block.
91 @param exc_value: Exception instance if an exception is raised from
92 the with-block.
93 @param traceback: Stack trace info if an exception is raised from
94 the with-block.
95 @return None, indicating not to ignore an exception from the with-block
96 if raised.
97 """
98 logging.info('Rebooting...')
99 try:
100 self._host.reboot()
101 except Exception:
102 if exc_type is None:
103 raise
104 # If an exception is raise from the with-block, just record the
105 # exception for the rebooting to avoid ignoring the original
106 # exception.
107 logging.exception('Rebooting failed.')
108
109
110@contextlib.contextmanager
111def lock(filename):
112 """Prevents other autotest/tradefed instances from accessing cache."""
113 filelock = lockfile.FileLock(filename)
114 # It is tempting just to call filelock.acquire(3600). But the implementation
115 # has very poor temporal granularity (timeout/10), which is unsuitable for
116 # our needs. See /usr/lib64/python2.7/site-packages/lockfile/
Ilja H. Friedeld2410cc2016-10-27 11:38:45 -0700117 attempts = 0
Ilja H. Friedelbee84a72016-09-28 15:57:06 -0700118 while not filelock.i_am_locking():
119 try:
Ilja H. Friedeld2410cc2016-10-27 11:38:45 -0700120 attempts += 1
Ilja H. Friedelbee84a72016-09-28 15:57:06 -0700121 logging.info('Waiting for cache lock...')
122 filelock.acquire(random.randint(1, 5))
123 except (lockfile.AlreadyLocked, lockfile.LockTimeout):
Ilja H. Friedeld2410cc2016-10-27 11:38:45 -0700124 if attempts > 1000:
125 # Normally we should aqcuire the lock in a few seconds. Once we
126 # wait on the order of hours either the dev server IO is
127 # overloaded or a lock didn't get cleaned up. Take one for the
128 # team, break the lock and report a failure. This should fix
129 # the lock for following tests. If the failure affects more than
130 # one job look for a deadlock or dev server overload.
131 logging.error('Permanent lock failure. Trying to break lock.')
132 filelock.break_lock()
133 raise error.TestFail('Error: permanent cache lock failure.')
Ilja H. Friedelbee84a72016-09-28 15:57:06 -0700134 else:
Ilja H. Friedeld2410cc2016-10-27 11:38:45 -0700135 logging.info('Acquired cache lock after %d attempts.', attempts)
Ilja H. Friedelbee84a72016-09-28 15:57:06 -0700136 try:
137 yield
138 finally:
139 filelock.release()
140 logging.info('Released cache lock.')
141
142
Luis Hector Chavez554c6f82017-01-27 14:21:40 -0800143@contextlib.contextmanager
144def adb_keepalive(target, extra_paths):
145 """A context manager that keeps the adb connection alive.
146
147 AdbKeepalive will spin off a new process that will continuously poll for
148 adb's connected state, and will attempt to reconnect if it ever goes down.
149 This is the only way we can currently recover safely from (intentional)
150 reboots.
151
152 @param target: the hostname and port of the DUT.
153 @param extra_paths: any additional components to the PATH environment
154 variable.
155 """
156 from autotest_lib.client.common_lib.cros import adb_keepalive as module
157 # |__file__| returns the absolute path of the compiled bytecode of the
158 # module. We want to run the original .py file, so we need to change the
159 # extension back.
160 script_filename = module.__file__.replace('.pyc', '.py')
161 job = base_utils.BgJob([script_filename, target],
162 nickname='adb_keepalive', stderr_level=logging.DEBUG,
163 stdout_tee=base_utils.TEE_TO_LOGS,
164 stderr_tee=base_utils.TEE_TO_LOGS,
165 extra_paths=extra_paths)
166
167 try:
168 yield
169 finally:
170 # The adb_keepalive.py script runs forever until SIGTERM is sent.
171 base_utils.nuke_subprocess(job.sp)
172 base_utils.join_bg_jobs([job])
173
174
Ilja H. Friedelbee84a72016-09-28 15:57:06 -0700175class TradefedTest(test.test):
176 """Base class to prepare DUT to run tests via tradefed."""
177 version = 1
178
179 def initialize(self, host=None):
180 """Sets up the tools and binary bundles for the test."""
181 logging.info('Hostname: %s', host.hostname)
182 self._host = host
183 self._install_paths = []
184 # Tests in the lab run within individual lxc container instances.
185 if utils.is_in_container():
Ilja H. Friedelbee84a72016-09-28 15:57:06 -0700186 cache_root = _TRADEFED_CACHE_CONTAINER
187 else:
188 cache_root = _TRADEFED_CACHE_LOCAL
Ilja H. Friedel94639902017-01-18 00:42:44 -0800189 # Quick sanity check and spew of java version installed on the server.
190 utils.run('java', args=('-version',), ignore_status=False, verbose=True,
191 stdout_tee=utils.TEE_TO_LOGS, stderr_tee=utils.TEE_TO_LOGS)
Ilja H. Friedelbee84a72016-09-28 15:57:06 -0700192 # The content of the cache survives across jobs.
193 self._safe_makedirs(cache_root)
194 self._tradefed_cache = os.path.join(cache_root, 'cache')
195 self._tradefed_cache_lock = os.path.join(cache_root, 'lock')
196 # The content of the install location does not survive across jobs and
197 # is isolated (by using a unique path)_against other autotest instances.
198 # This is not needed for the lab, but if somebody wants to run multiple
199 # TradedefTest instance.
200 self._tradefed_install = tempfile.mkdtemp(prefix=_TRADEFED_PREFIX)
201 # Under lxc the cache is shared between multiple autotest/tradefed
202 # instances. We need to synchronize access to it. All binaries are
203 # installed through the (shared) cache into the local (unshared)
204 # lxc/autotest instance storage.
205 # If clearing the cache it must happen before all downloads.
206 self._clear_download_cache_if_needed()
207 # Set permissions (rwxr-xr-x) to the executable binaries.
208 permission = (stat.S_IRWXU | stat.S_IRGRP | stat.S_IXGRP | stat.S_IROTH
209 | stat.S_IXOTH)
210 self._install_files(_ADB_DIR, _ADB_FILES, permission)
211 self._install_files(_SDK_TOOLS_DIR, _SDK_TOOLS_FILES, permission)
212
213 def cleanup(self):
214 """Cleans up any dirtied state."""
215 # Kill any lingering adb servers.
216 self._run('adb', verbose=True, args=('kill-server',))
217 logging.info('Cleaning up %s.', self._tradefed_install)
218 shutil.rmtree(self._tradefed_install)
219
220 def _login_chrome(self):
221 """Returns Chrome log-in context manager.
222
223 Please see also cheets_CTSHelper for details about how this works.
224 """
225 return _ChromeLogin(self._host)
226
Luis Hector Chavez554c6f82017-01-27 14:21:40 -0800227 def _get_adb_target(self):
228 return '{}:{}'.format(self._host.hostname, self._host.port)
229
Ilja H. Friedelbee84a72016-09-28 15:57:06 -0700230 def _try_adb_connect(self):
231 """Attempts to connect to adb on the DUT.
232
233 @return boolean indicating if adb connected successfully.
234 """
235 # This may fail return failure due to a race condition in adb connect
236 # (b/29370989). If adb is already connected, this command will
237 # immediately return success.
Luis Hector Chavez554c6f82017-01-27 14:21:40 -0800238 hostport = self._get_adb_target()
Ilja H. Friedelbee84a72016-09-28 15:57:06 -0700239 result = self._run(
240 'adb',
241 args=('connect', hostport),
242 verbose=True,
243 ignore_status=True)
244 logging.info('adb connect {}:\n{}'.format(hostport, result.stdout))
245 if result.exit_status != 0:
246 return False
247
248 result = self._run('adb', args=('devices',))
249 logging.info('adb devices:\n' + result.stdout)
250 if not re.search(
251 r'{}\s+(device|unauthorized)'.format(re.escape(hostport)),
252 result.stdout):
253 return False
254
255 # Actually test the connection with an adb command as there can be
256 # a race between detecting the connected device and actually being
257 # able to run a commmand with authenticated adb.
258 result = self._run('adb', args=('shell', 'exit'), ignore_status=True)
259 return result.exit_status == 0
260
261 def _android_shell(self, command):
262 """Run a command remotely on the device in an android shell
263
264 This function is strictly for internal use only, as commands do not run
265 in a fully consistent Android environment. Prefer adb shell instead.
266 """
267 self._host.run('android-sh -c ' + pipes.quote(command))
268
269 def _write_android_file(self, filename, data):
270 """Writes a file to a location relative to the android container.
271
272 This is an internal function used to bootstrap adb.
273 Tests should use adb push to write files.
274 """
275 android_cmd = 'echo %s > %s' % (pipes.quote(data),
276 pipes.quote(filename))
277 self._android_shell(android_cmd)
278
279 def _connect_adb(self):
280 """Sets up ADB connection to the ARC container."""
281 logging.info('Setting up adb connection.')
282 # Generate and push keys for adb.
283 # TODO(elijahtaylor): Extract this code to arc_common and de-duplicate
284 # code in arc.py on the client side tests.
285 key_path = os.path.join(self.tmpdir, 'test_key')
286 pubkey_path = key_path + '.pub'
287 self._run('adb', verbose=True, args=('keygen', pipes.quote(key_path)))
288 with open(pubkey_path, 'r') as f:
289 self._write_android_file(_ANDROID_ADB_KEYS_PATH, f.read())
290 self._android_shell('restorecon ' + pipes.quote(_ANDROID_ADB_KEYS_PATH))
291 os.environ['ADB_VENDOR_KEYS'] = key_path
292
293 # Kill existing adb server to ensure that the env var is picked up.
294 self._run('adb', verbose=True, args=('kill-server',))
295
296 # This starts adbd.
297 self._android_shell('setprop sys.usb.config mtp,adb')
298
Luis Hector Chavez554c6f82017-01-27 14:21:40 -0800299 # Also let it be automatically started upon reboot.
300 self._android_shell('setprop persist.sys.usb.config mtp,adb')
301
Ilja H. Friedelbee84a72016-09-28 15:57:06 -0700302 # adbd may take some time to come up. Repeatedly try to connect to adb.
303 utils.poll_for_condition(lambda: self._try_adb_connect(),
Ilja H. Friedel6d5ca8f2016-10-26 22:35:36 -0700304 exception=error.TestFail(
305 'Error: Failed to set up adb connection'),
Ilja H. Friedelbee84a72016-09-28 15:57:06 -0700306 timeout=_ADB_READY_TIMEOUT_SECONDS,
307 sleep_interval=_ADB_POLLING_INTERVAL_SECONDS)
308
309 logging.info('Successfully setup adb connection.')
310
311 def _wait_for_arc_boot(self):
312 """Wait until ARC is fully booted.
313
314 Tests for the presence of the intent helper app to determine whether ARC
315 has finished booting.
316 """
317 def intent_helper_running():
Kazuhiro Inabaf2c47052017-01-26 09:18:51 +0900318 result = self._run('adb', args=('shell', 'pgrep', '-f',
Ilja H. Friedelbee84a72016-09-28 15:57:06 -0700319 'org.chromium.arc.intent_helper'))
320 return bool(result.stdout)
321 utils.poll_for_condition(
322 intent_helper_running,
Ilja H. Friedel6d5ca8f2016-10-26 22:35:36 -0700323 exception=error.TestFail(
324 'Error: Timed out waiting for intent helper.'),
Ilja H. Friedelbee84a72016-09-28 15:57:06 -0700325 timeout=_ARC_READY_TIMEOUT_SECONDS,
326 sleep_interval=_ARC_POLLING_INTERVAL_SECONDS)
327
328 def _disable_adb_install_dialog(self):
329 """Disables a dialog shown on adb install execution.
330
331 By default, on adb install execution, "Allow Google to regularly check
332 device activity ... " dialog is shown. It requires manual user action
333 so that tests are blocked at the point.
334 This method disables it.
335 """
336 logging.info('Disabling the adb install dialog.')
337 result = self._run(
338 'adb',
339 verbose=True,
340 args=(
341 'shell',
342 'settings',
343 'put',
344 'global',
345 'verifier_verify_adb_installs',
346 '0'))
347 logging.info('Disable adb dialog: %s', result.stdout)
348
349 def _ready_arc(self):
350 """Ready ARC and adb for running tests via tradefed."""
351 self._connect_adb()
352 self._disable_adb_install_dialog()
353 self._wait_for_arc_boot()
354
355 def _safe_makedirs(self, path):
356 """Creates a directory at |path| and its ancestors.
357
358 Unlike os.makedirs(), ignore errors even if directories exist.
359 """
360 try:
361 os.makedirs(path)
362 except OSError as e:
363 if not (e.errno == errno.EEXIST and os.path.isdir(path)):
364 raise
365
366 def _unzip(self, filename):
367 """Unzip the file.
368
369 The destination directory name will be the stem of filename.
370 E.g., _unzip('foo/bar/baz.zip') will create directory at
371 'foo/bar/baz', and then will inflate zip's content under the directory.
372 If here is already a directory at the stem, that directory will be used.
373
374 @param filename: Path to the zip archive.
375 @return Path to the inflated directory.
376 """
377 destination = os.path.splitext(filename)[0]
378 if os.path.isdir(destination):
379 return destination
380 self._safe_makedirs(destination)
381 utils.run('unzip', args=('-d', destination, filename))
382 return destination
383
384 def _dir_size(self, directory):
385 """Compute recursive size in bytes of directory."""
386 size = 0
387 for root, _, files in os.walk(directory):
388 size += sum(os.path.getsize(os.path.join(root, name))
389 for name in files)
390 return size
391
392 def _clear_download_cache_if_needed(self):
393 """Invalidates cache to prevent it from growing too large."""
394 # If the cache is large enough to hold a working set, we can simply
395 # delete everything without thrashing.
396 # TODO(ihf): Investigate strategies like LRU.
397 with lock(self._tradefed_cache_lock):
398 size = self._dir_size(self._tradefed_cache)
399 if size > _TRADEFED_CACHE_MAX_SIZE:
400 logging.info('Current cache size=%d got too large. Clearing %s.'
401 , size, self._tradefed_cache)
402 shutil.rmtree(self._tradefed_cache)
403 self._safe_makedirs(self._tradefed_cache)
404 else:
405 logging.info('Current cache size=%d of %s.', size,
406 self._tradefed_cache)
407
408 def _download_to_cache(self, uri):
409 """Downloads the uri from the storage server.
410
411 It always checks the cache for available binaries first and skips
412 download if binaries are already in cache.
413
414 The caller of this function is responsible for holding the cache lock.
415
416 @param uri: The Google Storage or dl.google.com uri.
417 @return Path to the downloaded object, name.
418 """
419 # Split uri into 3 pieces for use by gsutil and also by wget.
420 parsed = urlparse.urlparse(uri)
421 filename = os.path.basename(parsed.path)
422 # We are hashing the uri instead of the binary. This is acceptable, as
423 # the uris are supposed to contain version information and an object is
424 # not supposed to be changed once created.
425 output_dir = os.path.join(self._tradefed_cache,
426 hashlib.md5(uri).hexdigest())
427 output = os.path.join(output_dir, filename)
428 # Check for existence of file.
429 if os.path.exists(output):
430 logging.info('Skipping download of %s, reusing %s.', uri, output)
431 return output
432 self._safe_makedirs(output_dir)
433
434 if parsed.scheme not in ['gs', 'http', 'https']:
Ilja H. Friedel6d5ca8f2016-10-26 22:35:36 -0700435 raise error.TestFail('Error: Unknown download scheme %s' %
436 parsed.scheme)
Ilja H. Friedelbee84a72016-09-28 15:57:06 -0700437 if parsed.scheme in ['http', 'https']:
438 logging.info('Using wget to download %s to %s.', uri, output_dir)
439 # We are downloading 1 file at a time, hence using -O over -P.
440 # We also limit the rate to 20MBytes/s
441 utils.run(
442 'wget',
443 args=(
444 '--report-speed=bits',
445 '--limit-rate=20M',
446 '-O',
447 output,
448 uri),
449 verbose=True)
450 return output
451
452 if not client_utils.is_moblab():
453 # If the machine can access to the storage server directly,
454 # defer to "gsutil" for downloading.
455 logging.info('Host %s not in lab. Downloading %s directly to %s.',
456 self._host.hostname, uri, output)
457 # b/17445576: gsutil rsync of individual files is not implemented.
458 utils.run('gsutil', args=('cp', uri, output), verbose=True)
459 return output
460
461 # We are in the moblab. Because the machine cannot access the storage
462 # server directly, use dev server to proxy.
463 logging.info('Host %s is in lab. Downloading %s by staging to %s.',
464 self._host.hostname, uri, output)
465
466 dirname = os.path.dirname(parsed.path)
467 archive_url = '%s://%s%s' % (parsed.scheme, parsed.netloc, dirname)
468
469 # First, request the devserver to download files into the lab network.
470 # TODO(ihf): Switch stage_artifacts to honor rsync. Then we don't have
471 # to shuffle files inside of tarballs.
Prathmesh Prabhu4723b172017-02-06 10:03:18 -0800472 info = self._host.host_info_store.get()
473 ds = dev_server.ImageServer.resolve(info.build)
474 ds.stage_artifacts(info.build, files=[filename],
475 archive_url=archive_url)
Ilja H. Friedelbee84a72016-09-28 15:57:06 -0700476
477 # Then download files from the dev server.
478 # TODO(ihf): use rsync instead of wget. Are there 3 machines involved?
479 # Itself, dev_server plus DUT? Or is there just no rsync in moblab?
480 ds_src = '/'.join([ds.url(), 'static', dirname, filename])
481 logging.info('dev_server URL: %s', ds_src)
482 # Calls into DUT to pull uri from dev_server.
483 utils.run(
484 'wget',
485 args=(
486 '--report-speed=bits',
487 '--limit-rate=20M',
488 '-O',
Ilja H. Friedelb83646b2016-10-18 13:02:59 -0700489 output,
490 ds_src),
Ilja H. Friedelbee84a72016-09-28 15:57:06 -0700491 verbose=True)
492 return output
493
494 def _instance_copy(self, cache_path):
495 """Makes a copy of a file from the (shared) cache to a wholy owned
496 local instance. Also copies one level of cache directoy (MD5 named).
497 """
498 filename = os.path.basename(cache_path)
499 dirname = os.path.basename(os.path.dirname(cache_path))
500 instance_dir = os.path.join(self._tradefed_install, dirname)
501 # Make sure destination directory is named the same.
502 self._safe_makedirs(instance_dir)
503 instance_path = os.path.join(instance_dir, filename)
504 shutil.copyfile(cache_path, instance_path)
505 return instance_path
506
507 def _install_bundle(self, gs_uri):
508 """Downloads a zip file, installs it and returns the local path."""
509 if not gs_uri.endswith('.zip'):
Ilja H. Friedel6d5ca8f2016-10-26 22:35:36 -0700510 raise error.TestFail('Error: Not a .zip file %s.', gs_uri)
Ilja H. Friedelbee84a72016-09-28 15:57:06 -0700511 # Atomic write through of file.
512 with lock(self._tradefed_cache_lock):
513 cache_path = self._download_to_cache(gs_uri)
514 local = self._instance_copy(cache_path)
David Haddockb9a362b2016-10-28 16:19:12 -0700515
516 unzipped = self._unzip(local)
517 self._abi = 'x86' if 'x86-x86' in unzipped else 'arm'
518 return unzipped
Ilja H. Friedelbee84a72016-09-28 15:57:06 -0700519
520 def _install_files(self, gs_dir, files, permission):
521 """Installs binary tools."""
522 for filename in files:
523 gs_uri = os.path.join(gs_dir, filename)
524 # Atomic write through of file.
525 with lock(self._tradefed_cache_lock):
526 cache_path = self._download_to_cache(gs_uri)
527 local = self._instance_copy(cache_path)
528 os.chmod(local, permission)
529 # Keep track of PATH.
530 self._install_paths.append(os.path.dirname(local))
531
532 def _run(self, *args, **kwargs):
533 """Executes the given command line.
534
535 To support SDK tools, such as adb or aapt, this adds _install_paths
536 to the extra_paths. Before invoking this, ensure _install_files() has
537 been called.
538 """
539 kwargs['extra_paths'] = (
540 kwargs.get('extra_paths', []) + self._install_paths)
541 return utils.run(*args, **kwargs)
542
Kazuhiro Inabaeb6b6332017-01-28 01:10:16 +0900543 def _collect_tradefed_global_log(self, result, destination):
544 """Collects the tradefed global log.
545
546 @param result: The result object from utils.run.
547 @param destination: Autotest result directory (destination of logs).
548 """
549 match = re.search(r'Saved log to /tmp/(tradefed_global_log_.*\.txt)',
550 result.stdout)
551 if not match:
552 logging.error('no tradefed_global_log file is found')
553 return
554
555 name = match.group(1)
556 dest = os.path.join(destination, 'logs', 'tmp')
557 self._safe_makedirs(dest)
558 shutil.copy(os.path.join('/tmp', name), os.path.join(dest, name))
559
Ilja H. Friedelbee84a72016-09-28 15:57:06 -0700560 def _parse_tradefed_datetime(self, result, summary=None):
561 """Get the tradefed provided result ID consisting of a datetime stamp.
562
563 Unfortunately we are unable to tell tradefed where to store the results.
564 In the lab we have multiple instances of tradefed running in parallel
565 writing results and logs to the same base directory. This function
566 finds the identifier which tradefed used during the current run and
567 returns it for further processing of result files.
568
569 @param result: The result object from utils.run.
570 @param summary: Test result summary from runs so far.
571 @return datetime_id: The result ID chosen by tradefed.
572 Example: '2016.07.14_00.34.50'.
573 """
574 # This string is show for both 'run' and 'continue' after all tests.
575 match = re.search(r': XML test result file generated at (\S+). Passed',
576 result.stdout)
577 if not (match and match.group(1)):
578 # TODO(ihf): Find out if we ever recover something interesting in
579 # this case. Otherwise delete it.
580 # Try harder to find the remains. This string shows before all
581 # tests but only with 'run', not 'continue'.
582 logging.warning('XML test result file incomplete?')
583 match = re.search(r': Created result dir (\S+)', result.stdout)
584 if not (match and match.group(1)):
585 error_msg = 'Test did not complete due to Chrome or ARC crash.'
586 if summary:
587 error_msg += (' Test summary from previous runs: %s'
588 % summary)
Ilja H. Friedel6d5ca8f2016-10-26 22:35:36 -0700589 raise error.TestFail(error_msg)
Ilja H. Friedelbee84a72016-09-28 15:57:06 -0700590 datetime_id = match.group(1)
591 logging.info('Tradefed identified results and logs with %s.',
592 datetime_id)
593 return datetime_id
594
Rohit Makasana99116d32016-10-17 19:32:04 -0700595 def _parse_result(self, result, waivers=None):
Ilja H. Friedelbee84a72016-09-28 15:57:06 -0700596 """Check the result from the tradefed output.
597
598 This extracts the test pass/fail/executed list from the output of
599 tradefed. It is up to the caller to handle inconsistencies.
600
601 @param result: The result object from utils.run.
Rohit Makasana99116d32016-10-17 19:32:04 -0700602 @param waivers: a set() of tests which are permitted to fail.
Ilja H. Friedelbee84a72016-09-28 15:57:06 -0700603 """
604 # Parse the stdout to extract test status. In particular step over
605 # similar output for each ABI and just look at the final summary.
606 match = re.search(r'(XML test result file generated at (\S+). '
607 r'Passed (\d+), Failed (\d+), Not Executed (\d+))',
608 result.stdout)
609 if not match:
610 raise error.Test('Test log does not contain a summary.')
611
612 passed = int(match.group(3))
613 failed = int(match.group(4))
614 not_executed = int(match.group(5))
615 match = re.search(r'(Start test run of (\d+) packages, containing '
616 r'(\d+(?:,\d+)?) tests)', result.stdout)
617 if match and match.group(3):
618 tests = int(match.group(3).replace(',', ''))
619 else:
620 # Unfortunately this happens. Assume it made no other mistakes.
621 logging.warning('Tradefed forgot to print number of tests.')
622 tests = passed + failed + not_executed
Rohit Makasana99116d32016-10-17 19:32:04 -0700623 # TODO(rohitbm): make failure parsing more robust by extracting the list
624 # of failing tests instead of searching in the result blob. As well as
625 # only parse for waivers for the running ABI.
626 if waivers:
627 for testname in waivers:
David Haddock16712332016-11-03 14:35:23 -0700628 # TODO(dhaddock): Find a more robust way to apply waivers.
629 fail_count = result.stdout.count(testname + ' FAIL')
630 if fail_count:
631 if fail_count > 2:
632 raise error.TestFail('Error: There are too many '
633 'failures found in the output to '
634 'be valid for applying waivers. '
635 'Please check output.')
636 failed -= fail_count
Rohit Makasana99116d32016-10-17 19:32:04 -0700637 # To maintain total count consistency.
David Haddock16712332016-11-03 14:35:23 -0700638 passed += fail_count
639 logging.info('Waived failure for %s %d time(s)',
640 testname, fail_count)
Rohit Makasana99116d32016-10-17 19:32:04 -0700641 logging.info('tests=%d, passed=%d, failed=%d, not_executed=%d',
642 tests, passed, failed, not_executed)
David Haddock16712332016-11-03 14:35:23 -0700643 if failed < 0:
644 raise error.TestFail('Error: Internal waiver book keeping has '
645 'become inconsistent.')
Ilja H. Friedelbee84a72016-09-28 15:57:06 -0700646 return (tests, passed, failed, not_executed)
647
648 def _collect_logs(self, repository, datetime, destination):
649 """Collects the tradefed logs.
650
651 It is legal to collect the same logs multiple times. This is normal
652 after 'tradefed continue' updates existing logs with new results.
653
654 @param repository: Full path to tradefeds output on disk.
655 @param datetime: The identifier which tradefed assigned to the run.
656 Currently this looks like '2016.07.14_00.34.50'.
657 @param destination: Autotest result directory (destination of logs).
658 """
659 logging.info('Collecting tradefed testResult.xml and logs to %s.',
660 destination)
661 repository_results = os.path.join(repository, 'results')
662 repository_logs = os.path.join(repository, 'logs')
663 # Because other tools rely on the currently chosen Google storage paths
664 # we need to keep destination_results in
665 # cheets_CTS.*/results/android-cts/2016.mm.dd_hh.mm.ss(/|.zip)
666 # and destination_logs in
667 # cheets_CTS.*/results/android-cts/logs/2016.mm.dd_hh.mm.ss/
668 destination_results = destination
Ilja H. Friedelb83646b2016-10-18 13:02:59 -0700669 destination_results_datetime = os.path.join(destination_results,
670 datetime)
Ilja H. Friedelbee84a72016-09-28 15:57:06 -0700671 destination_results_datetime_zip = destination_results_datetime + '.zip'
672 destination_logs = os.path.join(destination, 'logs')
673 destination_logs_datetime = os.path.join(destination_logs, datetime)
674 # We may have collected the same logs before, clean old versions.
675 if os.path.exists(destination_results_datetime_zip):
676 os.remove(destination_results_datetime_zip)
677 if os.path.exists(destination_results_datetime):
678 shutil.rmtree(destination_results_datetime)
679 if os.path.exists(destination_logs_datetime):
680 shutil.rmtree(destination_logs_datetime)
681 shutil.copytree(
682 os.path.join(repository_results, datetime),
683 destination_results_datetime)
684 # Copying the zip file has to happen after the tree so the destination
685 # directory is available.
686 shutil.copy(
687 os.path.join(repository_results, datetime) + '.zip',
688 destination_results_datetime_zip)
689 shutil.copytree(
690 os.path.join(repository_logs, datetime),
691 destination_logs_datetime)
David Haddockb9a362b2016-10-28 16:19:12 -0700692
Rohit Makasana77566902016-11-01 15:34:27 -0700693 def _get_expected_failures(self, directory):
694 """Return a list of expected failures.
David Haddockb9a362b2016-10-28 16:19:12 -0700695
Rohit Makasana77566902016-11-01 15:34:27 -0700696 @return: a list of expected failures.
David Haddockb9a362b2016-10-28 16:19:12 -0700697 """
Rohit Makasana77566902016-11-01 15:34:27 -0700698 logging.info('Loading expected failures from %s.', directory)
699 expected_fail_dir = os.path.join(self.bindir, directory)
David Haddockb9a362b2016-10-28 16:19:12 -0700700 expected_fail_files = glob.glob(expected_fail_dir + '/*.' + self._abi)
Rohit Makasana77566902016-11-01 15:34:27 -0700701 expected_failures = set()
David Haddockb9a362b2016-10-28 16:19:12 -0700702 for expected_fail_file in expected_fail_files:
703 try:
704 file_path = os.path.join(expected_fail_dir, expected_fail_file)
705 with open(file_path) as f:
706 lines = set(f.read().splitlines())
707 logging.info('Loaded %d expected failures from %s',
708 len(lines), expected_fail_file)
Rohit Makasana77566902016-11-01 15:34:27 -0700709 expected_failures |= lines
David Haddockb9a362b2016-10-28 16:19:12 -0700710 except IOError as e:
711 logging.error('Error loading %s (%s).', file_path, e.strerror)
Rohit Makasana77566902016-11-01 15:34:27 -0700712 logging.info('Finished loading expected failures: %s', expected_failures)
713 return expected_failures