blob: 1afe3bfeaeacaf4c3c362b36f1eba3c64eb4c467 [file] [log] [blame]
Ilja H. Friedelbee84a72016-09-28 15:57:06 -07001# Copyright 2016 The Chromium OS Authors. All rights reserved.
2# Use of this source code is governed by a BSD-style license that can be
3# found in the LICENSE file.
4
5# repohooks/pre-upload.py currently does not run pylint. But for developers who
6# want to check their code manually we disable several harmless pylint warnings
7# which just distract from more serious remaining issues.
8#
9# The instance variables _host and _install_paths are not defined in __init__().
10# pylint: disable=attribute-defined-outside-init
11#
12# Many short variable names don't follow the naming convention.
13# pylint: disable=invalid-name
14#
15# _parse_result() and _dir_size() don't access self and could be functions.
16# pylint: disable=no-self-use
17#
18# _ChromeLogin and _TradefedLogCollector have no public methods.
19# pylint: disable=too-few-public-methods
20
21import contextlib
22import errno
23import hashlib
24import logging
25import os
26import pipes
27import random
28import re
29import shutil
30import stat
31import tempfile
32import urlparse
33
34from autotest_lib.client.bin import utils as client_utils
35from autotest_lib.client.common_lib import error
36from autotest_lib.client.common_lib.cros import dev_server
37from autotest_lib.server import afe_utils
38from autotest_lib.server import autotest
39from autotest_lib.server import test
40from autotest_lib.server import utils
41from autotest_lib.site_utils import lxc
42
43try:
44 import lockfile
45except ImportError:
46 if utils.is_in_container():
47 # Ensure the container has the required packages installed.
48 lxc.install_packages(python_packages=['lockfile'])
49 import lockfile
50 else:
51 raise
52
53
54_SDK_TOOLS_DIR = ('gs://chromeos-arc-images/builds/'
55 'git_mnc-dr-arc-dev-linux-static_sdk_tools/3264272')
56_SDK_TOOLS_FILES = ['aapt']
57# To stabilize adb behavior, we use dynamically linked adb.
58_ADB_DIR = ('gs://chromeos-arc-images/builds/'
59 'git_mnc-dr-arc-dev-linux-cheets_arm-user/3264272')
60_ADB_FILES = ['adb']
61
62_ADB_POLLING_INTERVAL_SECONDS = 1
63_ADB_READY_TIMEOUT_SECONDS = 60
64_ANDROID_ADB_KEYS_PATH = '/data/misc/adb/adb_keys'
65
66_ARC_POLLING_INTERVAL_SECONDS = 1
67_ARC_READY_TIMEOUT_SECONDS = 60
68
69_TRADEFED_PREFIX = 'autotest-tradefed-install_'
70_TRADEFED_CACHE_LOCAL = '/tmp/autotest-tradefed-cache'
71_TRADEFED_CACHE_CONTAINER = '/usr/local/autotest/results/shared/cache'
72_TRADEFED_CACHE_CONTAINER_LOCK = '/usr/local/autotest/results/shared/lock'
73
74# According to dshi a drone has 500GB of disk space. It is ok for now to use
75# 10GB of disk space, as no more than 10 tests should run in parallel.
76# TODO(ihf): Investigate tighter cache size.
77_TRADEFED_CACHE_MAX_SIZE = (10 * 1024 * 1024 * 1024)
78
79
80class _ChromeLogin(object):
81 """Context manager to handle Chrome login state."""
82
83 def __init__(self, host):
84 self._host = host
85
86 def __enter__(self):
87 """Logs in to the Chrome."""
88 logging.info('Ensure Android is running...')
89 autotest.Autotest(self._host).run_test('cheets_CTSHelper',
90 check_client_result=True)
91
92 def __exit__(self, exc_type, exc_value, traceback):
93 """On exit, to wipe out all the login state, reboot the machine.
94
95 @param exc_type: Exception type if an exception is raised from the
96 with-block.
97 @param exc_value: Exception instance if an exception is raised from
98 the with-block.
99 @param traceback: Stack trace info if an exception is raised from
100 the with-block.
101 @return None, indicating not to ignore an exception from the with-block
102 if raised.
103 """
104 logging.info('Rebooting...')
105 try:
106 self._host.reboot()
107 except Exception:
108 if exc_type is None:
109 raise
110 # If an exception is raise from the with-block, just record the
111 # exception for the rebooting to avoid ignoring the original
112 # exception.
113 logging.exception('Rebooting failed.')
114
115
116@contextlib.contextmanager
117def lock(filename):
118 """Prevents other autotest/tradefed instances from accessing cache."""
119 filelock = lockfile.FileLock(filename)
120 # It is tempting just to call filelock.acquire(3600). But the implementation
121 # has very poor temporal granularity (timeout/10), which is unsuitable for
122 # our needs. See /usr/lib64/python2.7/site-packages/lockfile/
Ilja H. Friedeld2410cc2016-10-27 11:38:45 -0700123 attempts = 0
Ilja H. Friedelbee84a72016-09-28 15:57:06 -0700124 while not filelock.i_am_locking():
125 try:
Ilja H. Friedeld2410cc2016-10-27 11:38:45 -0700126 attempts += 1
Ilja H. Friedelbee84a72016-09-28 15:57:06 -0700127 logging.info('Waiting for cache lock...')
128 filelock.acquire(random.randint(1, 5))
129 except (lockfile.AlreadyLocked, lockfile.LockTimeout):
Ilja H. Friedeld2410cc2016-10-27 11:38:45 -0700130 if attempts > 1000:
131 # Normally we should aqcuire the lock in a few seconds. Once we
132 # wait on the order of hours either the dev server IO is
133 # overloaded or a lock didn't get cleaned up. Take one for the
134 # team, break the lock and report a failure. This should fix
135 # the lock for following tests. If the failure affects more than
136 # one job look for a deadlock or dev server overload.
137 logging.error('Permanent lock failure. Trying to break lock.')
138 filelock.break_lock()
139 raise error.TestFail('Error: permanent cache lock failure.')
Ilja H. Friedelbee84a72016-09-28 15:57:06 -0700140 else:
Ilja H. Friedeld2410cc2016-10-27 11:38:45 -0700141 logging.info('Acquired cache lock after %d attempts.', attempts)
Ilja H. Friedelbee84a72016-09-28 15:57:06 -0700142 try:
143 yield
144 finally:
145 filelock.release()
146 logging.info('Released cache lock.')
147
148
149class TradefedTest(test.test):
150 """Base class to prepare DUT to run tests via tradefed."""
151 version = 1
152
153 def initialize(self, host=None):
154 """Sets up the tools and binary bundles for the test."""
155 logging.info('Hostname: %s', host.hostname)
156 self._host = host
157 self._install_paths = []
158 # Tests in the lab run within individual lxc container instances.
159 if utils.is_in_container():
160 # Ensure the container has the required packages installed.
161 lxc.install_packages(packages=['unzip', 'default-jre'])
162 cache_root = _TRADEFED_CACHE_CONTAINER
163 else:
164 cache_root = _TRADEFED_CACHE_LOCAL
165 # The content of the cache survives across jobs.
166 self._safe_makedirs(cache_root)
167 self._tradefed_cache = os.path.join(cache_root, 'cache')
168 self._tradefed_cache_lock = os.path.join(cache_root, 'lock')
169 # The content of the install location does not survive across jobs and
170 # is isolated (by using a unique path)_against other autotest instances.
171 # This is not needed for the lab, but if somebody wants to run multiple
172 # TradedefTest instance.
173 self._tradefed_install = tempfile.mkdtemp(prefix=_TRADEFED_PREFIX)
174 # Under lxc the cache is shared between multiple autotest/tradefed
175 # instances. We need to synchronize access to it. All binaries are
176 # installed through the (shared) cache into the local (unshared)
177 # lxc/autotest instance storage.
178 # If clearing the cache it must happen before all downloads.
179 self._clear_download_cache_if_needed()
180 # Set permissions (rwxr-xr-x) to the executable binaries.
181 permission = (stat.S_IRWXU | stat.S_IRGRP | stat.S_IXGRP | stat.S_IROTH
182 | stat.S_IXOTH)
183 self._install_files(_ADB_DIR, _ADB_FILES, permission)
184 self._install_files(_SDK_TOOLS_DIR, _SDK_TOOLS_FILES, permission)
185
186 def cleanup(self):
187 """Cleans up any dirtied state."""
188 # Kill any lingering adb servers.
189 self._run('adb', verbose=True, args=('kill-server',))
190 logging.info('Cleaning up %s.', self._tradefed_install)
191 shutil.rmtree(self._tradefed_install)
192
193 def _login_chrome(self):
194 """Returns Chrome log-in context manager.
195
196 Please see also cheets_CTSHelper for details about how this works.
197 """
198 return _ChromeLogin(self._host)
199
200 def _try_adb_connect(self):
201 """Attempts to connect to adb on the DUT.
202
203 @return boolean indicating if adb connected successfully.
204 """
205 # This may fail return failure due to a race condition in adb connect
206 # (b/29370989). If adb is already connected, this command will
207 # immediately return success.
208 hostport = '{}:{}'.format(self._host.hostname, self._host.port)
209 result = self._run(
210 'adb',
211 args=('connect', hostport),
212 verbose=True,
213 ignore_status=True)
214 logging.info('adb connect {}:\n{}'.format(hostport, result.stdout))
215 if result.exit_status != 0:
216 return False
217
218 result = self._run('adb', args=('devices',))
219 logging.info('adb devices:\n' + result.stdout)
220 if not re.search(
221 r'{}\s+(device|unauthorized)'.format(re.escape(hostport)),
222 result.stdout):
223 return False
224
225 # Actually test the connection with an adb command as there can be
226 # a race between detecting the connected device and actually being
227 # able to run a commmand with authenticated adb.
228 result = self._run('adb', args=('shell', 'exit'), ignore_status=True)
229 return result.exit_status == 0
230
231 def _android_shell(self, command):
232 """Run a command remotely on the device in an android shell
233
234 This function is strictly for internal use only, as commands do not run
235 in a fully consistent Android environment. Prefer adb shell instead.
236 """
237 self._host.run('android-sh -c ' + pipes.quote(command))
238
239 def _write_android_file(self, filename, data):
240 """Writes a file to a location relative to the android container.
241
242 This is an internal function used to bootstrap adb.
243 Tests should use adb push to write files.
244 """
245 android_cmd = 'echo %s > %s' % (pipes.quote(data),
246 pipes.quote(filename))
247 self._android_shell(android_cmd)
248
249 def _connect_adb(self):
250 """Sets up ADB connection to the ARC container."""
251 logging.info('Setting up adb connection.')
252 # Generate and push keys for adb.
253 # TODO(elijahtaylor): Extract this code to arc_common and de-duplicate
254 # code in arc.py on the client side tests.
255 key_path = os.path.join(self.tmpdir, 'test_key')
256 pubkey_path = key_path + '.pub'
257 self._run('adb', verbose=True, args=('keygen', pipes.quote(key_path)))
258 with open(pubkey_path, 'r') as f:
259 self._write_android_file(_ANDROID_ADB_KEYS_PATH, f.read())
260 self._android_shell('restorecon ' + pipes.quote(_ANDROID_ADB_KEYS_PATH))
261 os.environ['ADB_VENDOR_KEYS'] = key_path
262
263 # Kill existing adb server to ensure that the env var is picked up.
264 self._run('adb', verbose=True, args=('kill-server',))
265
266 # This starts adbd.
267 self._android_shell('setprop sys.usb.config mtp,adb')
268
269 # adbd may take some time to come up. Repeatedly try to connect to adb.
270 utils.poll_for_condition(lambda: self._try_adb_connect(),
Ilja H. Friedel6d5ca8f2016-10-26 22:35:36 -0700271 exception=error.TestFail(
272 'Error: Failed to set up adb connection'),
Ilja H. Friedelbee84a72016-09-28 15:57:06 -0700273 timeout=_ADB_READY_TIMEOUT_SECONDS,
274 sleep_interval=_ADB_POLLING_INTERVAL_SECONDS)
275
276 logging.info('Successfully setup adb connection.')
277
278 def _wait_for_arc_boot(self):
279 """Wait until ARC is fully booted.
280
281 Tests for the presence of the intent helper app to determine whether ARC
282 has finished booting.
283 """
284 def intent_helper_running():
285 result = self._run('adb', args=('shell', 'pgrep',
286 'org.chromium.arc.intent_helper'))
287 return bool(result.stdout)
288 utils.poll_for_condition(
289 intent_helper_running,
Ilja H. Friedel6d5ca8f2016-10-26 22:35:36 -0700290 exception=error.TestFail(
291 'Error: Timed out waiting for intent helper.'),
Ilja H. Friedelbee84a72016-09-28 15:57:06 -0700292 timeout=_ARC_READY_TIMEOUT_SECONDS,
293 sleep_interval=_ARC_POLLING_INTERVAL_SECONDS)
294
295 def _disable_adb_install_dialog(self):
296 """Disables a dialog shown on adb install execution.
297
298 By default, on adb install execution, "Allow Google to regularly check
299 device activity ... " dialog is shown. It requires manual user action
300 so that tests are blocked at the point.
301 This method disables it.
302 """
303 logging.info('Disabling the adb install dialog.')
304 result = self._run(
305 'adb',
306 verbose=True,
307 args=(
308 'shell',
309 'settings',
310 'put',
311 'global',
312 'verifier_verify_adb_installs',
313 '0'))
314 logging.info('Disable adb dialog: %s', result.stdout)
315
316 def _ready_arc(self):
317 """Ready ARC and adb for running tests via tradefed."""
318 self._connect_adb()
319 self._disable_adb_install_dialog()
320 self._wait_for_arc_boot()
321
322 def _safe_makedirs(self, path):
323 """Creates a directory at |path| and its ancestors.
324
325 Unlike os.makedirs(), ignore errors even if directories exist.
326 """
327 try:
328 os.makedirs(path)
329 except OSError as e:
330 if not (e.errno == errno.EEXIST and os.path.isdir(path)):
331 raise
332
333 def _unzip(self, filename):
334 """Unzip the file.
335
336 The destination directory name will be the stem of filename.
337 E.g., _unzip('foo/bar/baz.zip') will create directory at
338 'foo/bar/baz', and then will inflate zip's content under the directory.
339 If here is already a directory at the stem, that directory will be used.
340
341 @param filename: Path to the zip archive.
342 @return Path to the inflated directory.
343 """
344 destination = os.path.splitext(filename)[0]
345 if os.path.isdir(destination):
346 return destination
347 self._safe_makedirs(destination)
348 utils.run('unzip', args=('-d', destination, filename))
349 return destination
350
351 def _dir_size(self, directory):
352 """Compute recursive size in bytes of directory."""
353 size = 0
354 for root, _, files in os.walk(directory):
355 size += sum(os.path.getsize(os.path.join(root, name))
356 for name in files)
357 return size
358
359 def _clear_download_cache_if_needed(self):
360 """Invalidates cache to prevent it from growing too large."""
361 # If the cache is large enough to hold a working set, we can simply
362 # delete everything without thrashing.
363 # TODO(ihf): Investigate strategies like LRU.
364 with lock(self._tradefed_cache_lock):
365 size = self._dir_size(self._tradefed_cache)
366 if size > _TRADEFED_CACHE_MAX_SIZE:
367 logging.info('Current cache size=%d got too large. Clearing %s.'
368 , size, self._tradefed_cache)
369 shutil.rmtree(self._tradefed_cache)
370 self._safe_makedirs(self._tradefed_cache)
371 else:
372 logging.info('Current cache size=%d of %s.', size,
373 self._tradefed_cache)
374
375 def _download_to_cache(self, uri):
376 """Downloads the uri from the storage server.
377
378 It always checks the cache for available binaries first and skips
379 download if binaries are already in cache.
380
381 The caller of this function is responsible for holding the cache lock.
382
383 @param uri: The Google Storage or dl.google.com uri.
384 @return Path to the downloaded object, name.
385 """
386 # Split uri into 3 pieces for use by gsutil and also by wget.
387 parsed = urlparse.urlparse(uri)
388 filename = os.path.basename(parsed.path)
389 # We are hashing the uri instead of the binary. This is acceptable, as
390 # the uris are supposed to contain version information and an object is
391 # not supposed to be changed once created.
392 output_dir = os.path.join(self._tradefed_cache,
393 hashlib.md5(uri).hexdigest())
394 output = os.path.join(output_dir, filename)
395 # Check for existence of file.
396 if os.path.exists(output):
397 logging.info('Skipping download of %s, reusing %s.', uri, output)
398 return output
399 self._safe_makedirs(output_dir)
400
401 if parsed.scheme not in ['gs', 'http', 'https']:
Ilja H. Friedel6d5ca8f2016-10-26 22:35:36 -0700402 raise error.TestFail('Error: Unknown download scheme %s' %
403 parsed.scheme)
Ilja H. Friedelbee84a72016-09-28 15:57:06 -0700404 if parsed.scheme in ['http', 'https']:
405 logging.info('Using wget to download %s to %s.', uri, output_dir)
406 # We are downloading 1 file at a time, hence using -O over -P.
407 # We also limit the rate to 20MBytes/s
408 utils.run(
409 'wget',
410 args=(
411 '--report-speed=bits',
412 '--limit-rate=20M',
413 '-O',
414 output,
415 uri),
416 verbose=True)
417 return output
418
419 if not client_utils.is_moblab():
420 # If the machine can access to the storage server directly,
421 # defer to "gsutil" for downloading.
422 logging.info('Host %s not in lab. Downloading %s directly to %s.',
423 self._host.hostname, uri, output)
424 # b/17445576: gsutil rsync of individual files is not implemented.
425 utils.run('gsutil', args=('cp', uri, output), verbose=True)
426 return output
427
428 # We are in the moblab. Because the machine cannot access the storage
429 # server directly, use dev server to proxy.
430 logging.info('Host %s is in lab. Downloading %s by staging to %s.',
431 self._host.hostname, uri, output)
432
433 dirname = os.path.dirname(parsed.path)
434 archive_url = '%s://%s%s' % (parsed.scheme, parsed.netloc, dirname)
435
436 # First, request the devserver to download files into the lab network.
437 # TODO(ihf): Switch stage_artifacts to honor rsync. Then we don't have
438 # to shuffle files inside of tarballs.
439 build = afe_utils.get_build(self._host)
440 ds = dev_server.ImageServer.resolve(build)
441 ds.stage_artifacts(build, files=[filename], archive_url=archive_url)
442
443 # Then download files from the dev server.
444 # TODO(ihf): use rsync instead of wget. Are there 3 machines involved?
445 # Itself, dev_server plus DUT? Or is there just no rsync in moblab?
446 ds_src = '/'.join([ds.url(), 'static', dirname, filename])
447 logging.info('dev_server URL: %s', ds_src)
448 # Calls into DUT to pull uri from dev_server.
449 utils.run(
450 'wget',
451 args=(
452 '--report-speed=bits',
453 '--limit-rate=20M',
454 '-O',
Ilja H. Friedelb83646b2016-10-18 13:02:59 -0700455 output,
456 ds_src),
Ilja H. Friedelbee84a72016-09-28 15:57:06 -0700457 verbose=True)
458 return output
459
460 def _instance_copy(self, cache_path):
461 """Makes a copy of a file from the (shared) cache to a wholy owned
462 local instance. Also copies one level of cache directoy (MD5 named).
463 """
464 filename = os.path.basename(cache_path)
465 dirname = os.path.basename(os.path.dirname(cache_path))
466 instance_dir = os.path.join(self._tradefed_install, dirname)
467 # Make sure destination directory is named the same.
468 self._safe_makedirs(instance_dir)
469 instance_path = os.path.join(instance_dir, filename)
470 shutil.copyfile(cache_path, instance_path)
471 return instance_path
472
473 def _install_bundle(self, gs_uri):
474 """Downloads a zip file, installs it and returns the local path."""
475 if not gs_uri.endswith('.zip'):
Ilja H. Friedel6d5ca8f2016-10-26 22:35:36 -0700476 raise error.TestFail('Error: Not a .zip file %s.', gs_uri)
Ilja H. Friedelbee84a72016-09-28 15:57:06 -0700477 # Atomic write through of file.
478 with lock(self._tradefed_cache_lock):
479 cache_path = self._download_to_cache(gs_uri)
480 local = self._instance_copy(cache_path)
481 return self._unzip(local)
482
483 def _install_files(self, gs_dir, files, permission):
484 """Installs binary tools."""
485 for filename in files:
486 gs_uri = os.path.join(gs_dir, filename)
487 # Atomic write through of file.
488 with lock(self._tradefed_cache_lock):
489 cache_path = self._download_to_cache(gs_uri)
490 local = self._instance_copy(cache_path)
491 os.chmod(local, permission)
492 # Keep track of PATH.
493 self._install_paths.append(os.path.dirname(local))
494
495 def _run(self, *args, **kwargs):
496 """Executes the given command line.
497
498 To support SDK tools, such as adb or aapt, this adds _install_paths
499 to the extra_paths. Before invoking this, ensure _install_files() has
500 been called.
501 """
502 kwargs['extra_paths'] = (
503 kwargs.get('extra_paths', []) + self._install_paths)
504 return utils.run(*args, **kwargs)
505
506 def _parse_tradefed_datetime(self, result, summary=None):
507 """Get the tradefed provided result ID consisting of a datetime stamp.
508
509 Unfortunately we are unable to tell tradefed where to store the results.
510 In the lab we have multiple instances of tradefed running in parallel
511 writing results and logs to the same base directory. This function
512 finds the identifier which tradefed used during the current run and
513 returns it for further processing of result files.
514
515 @param result: The result object from utils.run.
516 @param summary: Test result summary from runs so far.
517 @return datetime_id: The result ID chosen by tradefed.
518 Example: '2016.07.14_00.34.50'.
519 """
520 # This string is show for both 'run' and 'continue' after all tests.
521 match = re.search(r': XML test result file generated at (\S+). Passed',
522 result.stdout)
523 if not (match and match.group(1)):
524 # TODO(ihf): Find out if we ever recover something interesting in
525 # this case. Otherwise delete it.
526 # Try harder to find the remains. This string shows before all
527 # tests but only with 'run', not 'continue'.
528 logging.warning('XML test result file incomplete?')
529 match = re.search(r': Created result dir (\S+)', result.stdout)
530 if not (match and match.group(1)):
531 error_msg = 'Test did not complete due to Chrome or ARC crash.'
532 if summary:
533 error_msg += (' Test summary from previous runs: %s'
534 % summary)
Ilja H. Friedel6d5ca8f2016-10-26 22:35:36 -0700535 raise error.TestFail(error_msg)
Ilja H. Friedelbee84a72016-09-28 15:57:06 -0700536 datetime_id = match.group(1)
537 logging.info('Tradefed identified results and logs with %s.',
538 datetime_id)
539 return datetime_id
540
Rohit Makasana99116d32016-10-17 19:32:04 -0700541 def _parse_result(self, result, waivers=None):
Ilja H. Friedelbee84a72016-09-28 15:57:06 -0700542 """Check the result from the tradefed output.
543
544 This extracts the test pass/fail/executed list from the output of
545 tradefed. It is up to the caller to handle inconsistencies.
546
547 @param result: The result object from utils.run.
Rohit Makasana99116d32016-10-17 19:32:04 -0700548 @param waivers: a set() of tests which are permitted to fail.
Ilja H. Friedelbee84a72016-09-28 15:57:06 -0700549 """
550 # Parse the stdout to extract test status. In particular step over
551 # similar output for each ABI and just look at the final summary.
552 match = re.search(r'(XML test result file generated at (\S+). '
553 r'Passed (\d+), Failed (\d+), Not Executed (\d+))',
554 result.stdout)
555 if not match:
556 raise error.Test('Test log does not contain a summary.')
557
558 passed = int(match.group(3))
559 failed = int(match.group(4))
560 not_executed = int(match.group(5))
561 match = re.search(r'(Start test run of (\d+) packages, containing '
562 r'(\d+(?:,\d+)?) tests)', result.stdout)
563 if match and match.group(3):
564 tests = int(match.group(3).replace(',', ''))
565 else:
566 # Unfortunately this happens. Assume it made no other mistakes.
567 logging.warning('Tradefed forgot to print number of tests.')
568 tests = passed + failed + not_executed
Rohit Makasana99116d32016-10-17 19:32:04 -0700569 # TODO(rohitbm): make failure parsing more robust by extracting the list
570 # of failing tests instead of searching in the result blob. As well as
571 # only parse for waivers for the running ABI.
572 if waivers:
573 for testname in waivers:
574 if testname + ' FAIL' in result.stdout:
575 failed -= 1
576 # To maintain total count consistency.
577 passed += 1
578 logging.info('Waived failure %s', testname)
579
580 logging.info('tests=%d, passed=%d, failed=%d, not_executed=%d',
581 tests, passed, failed, not_executed)
Ilja H. Friedelbee84a72016-09-28 15:57:06 -0700582 return (tests, passed, failed, not_executed)
583
584 def _collect_logs(self, repository, datetime, destination):
585 """Collects the tradefed logs.
586
587 It is legal to collect the same logs multiple times. This is normal
588 after 'tradefed continue' updates existing logs with new results.
589
590 @param repository: Full path to tradefeds output on disk.
591 @param datetime: The identifier which tradefed assigned to the run.
592 Currently this looks like '2016.07.14_00.34.50'.
593 @param destination: Autotest result directory (destination of logs).
594 """
595 logging.info('Collecting tradefed testResult.xml and logs to %s.',
596 destination)
597 repository_results = os.path.join(repository, 'results')
598 repository_logs = os.path.join(repository, 'logs')
599 # Because other tools rely on the currently chosen Google storage paths
600 # we need to keep destination_results in
601 # cheets_CTS.*/results/android-cts/2016.mm.dd_hh.mm.ss(/|.zip)
602 # and destination_logs in
603 # cheets_CTS.*/results/android-cts/logs/2016.mm.dd_hh.mm.ss/
604 destination_results = destination
Ilja H. Friedelb83646b2016-10-18 13:02:59 -0700605 destination_results_datetime = os.path.join(destination_results,
606 datetime)
Ilja H. Friedelbee84a72016-09-28 15:57:06 -0700607 destination_results_datetime_zip = destination_results_datetime + '.zip'
608 destination_logs = os.path.join(destination, 'logs')
609 destination_logs_datetime = os.path.join(destination_logs, datetime)
610 # We may have collected the same logs before, clean old versions.
611 if os.path.exists(destination_results_datetime_zip):
612 os.remove(destination_results_datetime_zip)
613 if os.path.exists(destination_results_datetime):
614 shutil.rmtree(destination_results_datetime)
615 if os.path.exists(destination_logs_datetime):
616 shutil.rmtree(destination_logs_datetime)
617 shutil.copytree(
618 os.path.join(repository_results, datetime),
619 destination_results_datetime)
620 # Copying the zip file has to happen after the tree so the destination
621 # directory is available.
622 shutil.copy(
623 os.path.join(repository_results, datetime) + '.zip',
624 destination_results_datetime_zip)
625 shutil.copytree(
626 os.path.join(repository_logs, datetime),
627 destination_logs_datetime)