blob: 6ae7406d2f36819fcd89244afa9e54045678cf6f [file] [log] [blame]
Ilja H. Friedelbee84a72016-09-28 15:57:06 -07001# Copyright 2016 The Chromium OS Authors. All rights reserved.
2# Use of this source code is governed by a BSD-style license that can be
3# found in the LICENSE file.
4
5# repohooks/pre-upload.py currently does not run pylint. But for developers who
6# want to check their code manually we disable several harmless pylint warnings
7# which just distract from more serious remaining issues.
8#
9# The instance variables _host and _install_paths are not defined in __init__().
10# pylint: disable=attribute-defined-outside-init
11#
12# Many short variable names don't follow the naming convention.
13# pylint: disable=invalid-name
14#
15# _parse_result() and _dir_size() don't access self and could be functions.
16# pylint: disable=no-self-use
17#
18# _ChromeLogin and _TradefedLogCollector have no public methods.
19# pylint: disable=too-few-public-methods
20
21import contextlib
22import errno
23import hashlib
24import logging
25import os
26import pipes
27import random
28import re
29import shutil
30import stat
31import tempfile
32import urlparse
33
34from autotest_lib.client.bin import utils as client_utils
35from autotest_lib.client.common_lib import error
36from autotest_lib.client.common_lib.cros import dev_server
37from autotest_lib.server import afe_utils
38from autotest_lib.server import autotest
39from autotest_lib.server import test
40from autotest_lib.server import utils
41from autotest_lib.site_utils import lxc
42
43try:
44 import lockfile
45except ImportError:
46 if utils.is_in_container():
47 # Ensure the container has the required packages installed.
48 lxc.install_packages(python_packages=['lockfile'])
49 import lockfile
50 else:
51 raise
52
53
54_SDK_TOOLS_DIR = ('gs://chromeos-arc-images/builds/'
55 'git_mnc-dr-arc-dev-linux-static_sdk_tools/3264272')
56_SDK_TOOLS_FILES = ['aapt']
57# To stabilize adb behavior, we use dynamically linked adb.
58_ADB_DIR = ('gs://chromeos-arc-images/builds/'
59 'git_mnc-dr-arc-dev-linux-cheets_arm-user/3264272')
60_ADB_FILES = ['adb']
61
62_ADB_POLLING_INTERVAL_SECONDS = 1
63_ADB_READY_TIMEOUT_SECONDS = 60
64_ANDROID_ADB_KEYS_PATH = '/data/misc/adb/adb_keys'
65
66_ARC_POLLING_INTERVAL_SECONDS = 1
67_ARC_READY_TIMEOUT_SECONDS = 60
68
69_TRADEFED_PREFIX = 'autotest-tradefed-install_'
70_TRADEFED_CACHE_LOCAL = '/tmp/autotest-tradefed-cache'
71_TRADEFED_CACHE_CONTAINER = '/usr/local/autotest/results/shared/cache'
72_TRADEFED_CACHE_CONTAINER_LOCK = '/usr/local/autotest/results/shared/lock'
73
74# According to dshi a drone has 500GB of disk space. It is ok for now to use
75# 10GB of disk space, as no more than 10 tests should run in parallel.
76# TODO(ihf): Investigate tighter cache size.
77_TRADEFED_CACHE_MAX_SIZE = (10 * 1024 * 1024 * 1024)
78
79
80class _ChromeLogin(object):
81 """Context manager to handle Chrome login state."""
82
83 def __init__(self, host):
84 self._host = host
85
86 def __enter__(self):
87 """Logs in to the Chrome."""
88 logging.info('Ensure Android is running...')
89 autotest.Autotest(self._host).run_test('cheets_CTSHelper',
90 check_client_result=True)
91
92 def __exit__(self, exc_type, exc_value, traceback):
93 """On exit, to wipe out all the login state, reboot the machine.
94
95 @param exc_type: Exception type if an exception is raised from the
96 with-block.
97 @param exc_value: Exception instance if an exception is raised from
98 the with-block.
99 @param traceback: Stack trace info if an exception is raised from
100 the with-block.
101 @return None, indicating not to ignore an exception from the with-block
102 if raised.
103 """
104 logging.info('Rebooting...')
105 try:
106 self._host.reboot()
107 except Exception:
108 if exc_type is None:
109 raise
110 # If an exception is raise from the with-block, just record the
111 # exception for the rebooting to avoid ignoring the original
112 # exception.
113 logging.exception('Rebooting failed.')
114
115
116@contextlib.contextmanager
117def lock(filename):
118 """Prevents other autotest/tradefed instances from accessing cache."""
119 filelock = lockfile.FileLock(filename)
120 # It is tempting just to call filelock.acquire(3600). But the implementation
121 # has very poor temporal granularity (timeout/10), which is unsuitable for
122 # our needs. See /usr/lib64/python2.7/site-packages/lockfile/
123 while not filelock.i_am_locking():
124 try:
125 logging.info('Waiting for cache lock...')
126 filelock.acquire(random.randint(1, 5))
127 except (lockfile.AlreadyLocked, lockfile.LockTimeout):
128 pass
129 else:
130 logging.info('Acquired cache lock.')
131 try:
132 yield
133 finally:
134 filelock.release()
135 logging.info('Released cache lock.')
136
137
138class TradefedTest(test.test):
139 """Base class to prepare DUT to run tests via tradefed."""
140 version = 1
141
142 def initialize(self, host=None):
143 """Sets up the tools and binary bundles for the test."""
144 logging.info('Hostname: %s', host.hostname)
145 self._host = host
146 self._install_paths = []
147 # Tests in the lab run within individual lxc container instances.
148 if utils.is_in_container():
149 # Ensure the container has the required packages installed.
150 lxc.install_packages(packages=['unzip', 'default-jre'])
151 cache_root = _TRADEFED_CACHE_CONTAINER
152 else:
153 cache_root = _TRADEFED_CACHE_LOCAL
154 # The content of the cache survives across jobs.
155 self._safe_makedirs(cache_root)
156 self._tradefed_cache = os.path.join(cache_root, 'cache')
157 self._tradefed_cache_lock = os.path.join(cache_root, 'lock')
158 # The content of the install location does not survive across jobs and
159 # is isolated (by using a unique path)_against other autotest instances.
160 # This is not needed for the lab, but if somebody wants to run multiple
161 # TradedefTest instance.
162 self._tradefed_install = tempfile.mkdtemp(prefix=_TRADEFED_PREFIX)
163 # Under lxc the cache is shared between multiple autotest/tradefed
164 # instances. We need to synchronize access to it. All binaries are
165 # installed through the (shared) cache into the local (unshared)
166 # lxc/autotest instance storage.
167 # If clearing the cache it must happen before all downloads.
168 self._clear_download_cache_if_needed()
169 # Set permissions (rwxr-xr-x) to the executable binaries.
170 permission = (stat.S_IRWXU | stat.S_IRGRP | stat.S_IXGRP | stat.S_IROTH
171 | stat.S_IXOTH)
172 self._install_files(_ADB_DIR, _ADB_FILES, permission)
173 self._install_files(_SDK_TOOLS_DIR, _SDK_TOOLS_FILES, permission)
174
175 def cleanup(self):
176 """Cleans up any dirtied state."""
177 # Kill any lingering adb servers.
178 self._run('adb', verbose=True, args=('kill-server',))
179 logging.info('Cleaning up %s.', self._tradefed_install)
180 shutil.rmtree(self._tradefed_install)
181
182 def _login_chrome(self):
183 """Returns Chrome log-in context manager.
184
185 Please see also cheets_CTSHelper for details about how this works.
186 """
187 return _ChromeLogin(self._host)
188
189 def _try_adb_connect(self):
190 """Attempts to connect to adb on the DUT.
191
192 @return boolean indicating if adb connected successfully.
193 """
194 # This may fail return failure due to a race condition in adb connect
195 # (b/29370989). If adb is already connected, this command will
196 # immediately return success.
197 hostport = '{}:{}'.format(self._host.hostname, self._host.port)
198 result = self._run(
199 'adb',
200 args=('connect', hostport),
201 verbose=True,
202 ignore_status=True)
203 logging.info('adb connect {}:\n{}'.format(hostport, result.stdout))
204 if result.exit_status != 0:
205 return False
206
207 result = self._run('adb', args=('devices',))
208 logging.info('adb devices:\n' + result.stdout)
209 if not re.search(
210 r'{}\s+(device|unauthorized)'.format(re.escape(hostport)),
211 result.stdout):
212 return False
213
214 # Actually test the connection with an adb command as there can be
215 # a race between detecting the connected device and actually being
216 # able to run a commmand with authenticated adb.
217 result = self._run('adb', args=('shell', 'exit'), ignore_status=True)
218 return result.exit_status == 0
219
220 def _android_shell(self, command):
221 """Run a command remotely on the device in an android shell
222
223 This function is strictly for internal use only, as commands do not run
224 in a fully consistent Android environment. Prefer adb shell instead.
225 """
226 self._host.run('android-sh -c ' + pipes.quote(command))
227
228 def _write_android_file(self, filename, data):
229 """Writes a file to a location relative to the android container.
230
231 This is an internal function used to bootstrap adb.
232 Tests should use adb push to write files.
233 """
234 android_cmd = 'echo %s > %s' % (pipes.quote(data),
235 pipes.quote(filename))
236 self._android_shell(android_cmd)
237
238 def _connect_adb(self):
239 """Sets up ADB connection to the ARC container."""
240 logging.info('Setting up adb connection.')
241 # Generate and push keys for adb.
242 # TODO(elijahtaylor): Extract this code to arc_common and de-duplicate
243 # code in arc.py on the client side tests.
244 key_path = os.path.join(self.tmpdir, 'test_key')
245 pubkey_path = key_path + '.pub'
246 self._run('adb', verbose=True, args=('keygen', pipes.quote(key_path)))
247 with open(pubkey_path, 'r') as f:
248 self._write_android_file(_ANDROID_ADB_KEYS_PATH, f.read())
249 self._android_shell('restorecon ' + pipes.quote(_ANDROID_ADB_KEYS_PATH))
250 os.environ['ADB_VENDOR_KEYS'] = key_path
251
252 # Kill existing adb server to ensure that the env var is picked up.
253 self._run('adb', verbose=True, args=('kill-server',))
254
255 # This starts adbd.
256 self._android_shell('setprop sys.usb.config mtp,adb')
257
258 # adbd may take some time to come up. Repeatedly try to connect to adb.
259 utils.poll_for_condition(lambda: self._try_adb_connect(),
Ilja H. Friedel6d5ca8f2016-10-26 22:35:36 -0700260 exception=error.TestFail(
261 'Error: Failed to set up adb connection'),
Ilja H. Friedelbee84a72016-09-28 15:57:06 -0700262 timeout=_ADB_READY_TIMEOUT_SECONDS,
263 sleep_interval=_ADB_POLLING_INTERVAL_SECONDS)
264
265 logging.info('Successfully setup adb connection.')
266
267 def _wait_for_arc_boot(self):
268 """Wait until ARC is fully booted.
269
270 Tests for the presence of the intent helper app to determine whether ARC
271 has finished booting.
272 """
273 def intent_helper_running():
274 result = self._run('adb', args=('shell', 'pgrep',
275 'org.chromium.arc.intent_helper'))
276 return bool(result.stdout)
277 utils.poll_for_condition(
278 intent_helper_running,
Ilja H. Friedel6d5ca8f2016-10-26 22:35:36 -0700279 exception=error.TestFail(
280 'Error: Timed out waiting for intent helper.'),
Ilja H. Friedelbee84a72016-09-28 15:57:06 -0700281 timeout=_ARC_READY_TIMEOUT_SECONDS,
282 sleep_interval=_ARC_POLLING_INTERVAL_SECONDS)
283
284 def _disable_adb_install_dialog(self):
285 """Disables a dialog shown on adb install execution.
286
287 By default, on adb install execution, "Allow Google to regularly check
288 device activity ... " dialog is shown. It requires manual user action
289 so that tests are blocked at the point.
290 This method disables it.
291 """
292 logging.info('Disabling the adb install dialog.')
293 result = self._run(
294 'adb',
295 verbose=True,
296 args=(
297 'shell',
298 'settings',
299 'put',
300 'global',
301 'verifier_verify_adb_installs',
302 '0'))
303 logging.info('Disable adb dialog: %s', result.stdout)
304
305 def _ready_arc(self):
306 """Ready ARC and adb for running tests via tradefed."""
307 self._connect_adb()
308 self._disable_adb_install_dialog()
309 self._wait_for_arc_boot()
310
311 def _safe_makedirs(self, path):
312 """Creates a directory at |path| and its ancestors.
313
314 Unlike os.makedirs(), ignore errors even if directories exist.
315 """
316 try:
317 os.makedirs(path)
318 except OSError as e:
319 if not (e.errno == errno.EEXIST and os.path.isdir(path)):
320 raise
321
322 def _unzip(self, filename):
323 """Unzip the file.
324
325 The destination directory name will be the stem of filename.
326 E.g., _unzip('foo/bar/baz.zip') will create directory at
327 'foo/bar/baz', and then will inflate zip's content under the directory.
328 If here is already a directory at the stem, that directory will be used.
329
330 @param filename: Path to the zip archive.
331 @return Path to the inflated directory.
332 """
333 destination = os.path.splitext(filename)[0]
334 if os.path.isdir(destination):
335 return destination
336 self._safe_makedirs(destination)
337 utils.run('unzip', args=('-d', destination, filename))
338 return destination
339
340 def _dir_size(self, directory):
341 """Compute recursive size in bytes of directory."""
342 size = 0
343 for root, _, files in os.walk(directory):
344 size += sum(os.path.getsize(os.path.join(root, name))
345 for name in files)
346 return size
347
348 def _clear_download_cache_if_needed(self):
349 """Invalidates cache to prevent it from growing too large."""
350 # If the cache is large enough to hold a working set, we can simply
351 # delete everything without thrashing.
352 # TODO(ihf): Investigate strategies like LRU.
353 with lock(self._tradefed_cache_lock):
354 size = self._dir_size(self._tradefed_cache)
355 if size > _TRADEFED_CACHE_MAX_SIZE:
356 logging.info('Current cache size=%d got too large. Clearing %s.'
357 , size, self._tradefed_cache)
358 shutil.rmtree(self._tradefed_cache)
359 self._safe_makedirs(self._tradefed_cache)
360 else:
361 logging.info('Current cache size=%d of %s.', size,
362 self._tradefed_cache)
363
364 def _download_to_cache(self, uri):
365 """Downloads the uri from the storage server.
366
367 It always checks the cache for available binaries first and skips
368 download if binaries are already in cache.
369
370 The caller of this function is responsible for holding the cache lock.
371
372 @param uri: The Google Storage or dl.google.com uri.
373 @return Path to the downloaded object, name.
374 """
375 # Split uri into 3 pieces for use by gsutil and also by wget.
376 parsed = urlparse.urlparse(uri)
377 filename = os.path.basename(parsed.path)
378 # We are hashing the uri instead of the binary. This is acceptable, as
379 # the uris are supposed to contain version information and an object is
380 # not supposed to be changed once created.
381 output_dir = os.path.join(self._tradefed_cache,
382 hashlib.md5(uri).hexdigest())
383 output = os.path.join(output_dir, filename)
384 # Check for existence of file.
385 if os.path.exists(output):
386 logging.info('Skipping download of %s, reusing %s.', uri, output)
387 return output
388 self._safe_makedirs(output_dir)
389
390 if parsed.scheme not in ['gs', 'http', 'https']:
Ilja H. Friedel6d5ca8f2016-10-26 22:35:36 -0700391 raise error.TestFail('Error: Unknown download scheme %s' %
392 parsed.scheme)
Ilja H. Friedelbee84a72016-09-28 15:57:06 -0700393 if parsed.scheme in ['http', 'https']:
394 logging.info('Using wget to download %s to %s.', uri, output_dir)
395 # We are downloading 1 file at a time, hence using -O over -P.
396 # We also limit the rate to 20MBytes/s
397 utils.run(
398 'wget',
399 args=(
400 '--report-speed=bits',
401 '--limit-rate=20M',
402 '-O',
403 output,
404 uri),
405 verbose=True)
406 return output
407
408 if not client_utils.is_moblab():
409 # If the machine can access to the storage server directly,
410 # defer to "gsutil" for downloading.
411 logging.info('Host %s not in lab. Downloading %s directly to %s.',
412 self._host.hostname, uri, output)
413 # b/17445576: gsutil rsync of individual files is not implemented.
414 utils.run('gsutil', args=('cp', uri, output), verbose=True)
415 return output
416
417 # We are in the moblab. Because the machine cannot access the storage
418 # server directly, use dev server to proxy.
419 logging.info('Host %s is in lab. Downloading %s by staging to %s.',
420 self._host.hostname, uri, output)
421
422 dirname = os.path.dirname(parsed.path)
423 archive_url = '%s://%s%s' % (parsed.scheme, parsed.netloc, dirname)
424
425 # First, request the devserver to download files into the lab network.
426 # TODO(ihf): Switch stage_artifacts to honor rsync. Then we don't have
427 # to shuffle files inside of tarballs.
428 build = afe_utils.get_build(self._host)
429 ds = dev_server.ImageServer.resolve(build)
430 ds.stage_artifacts(build, files=[filename], archive_url=archive_url)
431
432 # Then download files from the dev server.
433 # TODO(ihf): use rsync instead of wget. Are there 3 machines involved?
434 # Itself, dev_server plus DUT? Or is there just no rsync in moblab?
435 ds_src = '/'.join([ds.url(), 'static', dirname, filename])
436 logging.info('dev_server URL: %s', ds_src)
437 # Calls into DUT to pull uri from dev_server.
438 utils.run(
439 'wget',
440 args=(
441 '--report-speed=bits',
442 '--limit-rate=20M',
443 '-O',
Ilja H. Friedelb83646b2016-10-18 13:02:59 -0700444 output,
445 ds_src),
Ilja H. Friedelbee84a72016-09-28 15:57:06 -0700446 verbose=True)
447 return output
448
449 def _instance_copy(self, cache_path):
450 """Makes a copy of a file from the (shared) cache to a wholy owned
451 local instance. Also copies one level of cache directoy (MD5 named).
452 """
453 filename = os.path.basename(cache_path)
454 dirname = os.path.basename(os.path.dirname(cache_path))
455 instance_dir = os.path.join(self._tradefed_install, dirname)
456 # Make sure destination directory is named the same.
457 self._safe_makedirs(instance_dir)
458 instance_path = os.path.join(instance_dir, filename)
459 shutil.copyfile(cache_path, instance_path)
460 return instance_path
461
462 def _install_bundle(self, gs_uri):
463 """Downloads a zip file, installs it and returns the local path."""
464 if not gs_uri.endswith('.zip'):
Ilja H. Friedel6d5ca8f2016-10-26 22:35:36 -0700465 raise error.TestFail('Error: Not a .zip file %s.', gs_uri)
Ilja H. Friedelbee84a72016-09-28 15:57:06 -0700466 # Atomic write through of file.
467 with lock(self._tradefed_cache_lock):
468 cache_path = self._download_to_cache(gs_uri)
469 local = self._instance_copy(cache_path)
470 return self._unzip(local)
471
472 def _install_files(self, gs_dir, files, permission):
473 """Installs binary tools."""
474 for filename in files:
475 gs_uri = os.path.join(gs_dir, filename)
476 # Atomic write through of file.
477 with lock(self._tradefed_cache_lock):
478 cache_path = self._download_to_cache(gs_uri)
479 local = self._instance_copy(cache_path)
480 os.chmod(local, permission)
481 # Keep track of PATH.
482 self._install_paths.append(os.path.dirname(local))
483
484 def _run(self, *args, **kwargs):
485 """Executes the given command line.
486
487 To support SDK tools, such as adb or aapt, this adds _install_paths
488 to the extra_paths. Before invoking this, ensure _install_files() has
489 been called.
490 """
491 kwargs['extra_paths'] = (
492 kwargs.get('extra_paths', []) + self._install_paths)
493 return utils.run(*args, **kwargs)
494
495 def _parse_tradefed_datetime(self, result, summary=None):
496 """Get the tradefed provided result ID consisting of a datetime stamp.
497
498 Unfortunately we are unable to tell tradefed where to store the results.
499 In the lab we have multiple instances of tradefed running in parallel
500 writing results and logs to the same base directory. This function
501 finds the identifier which tradefed used during the current run and
502 returns it for further processing of result files.
503
504 @param result: The result object from utils.run.
505 @param summary: Test result summary from runs so far.
506 @return datetime_id: The result ID chosen by tradefed.
507 Example: '2016.07.14_00.34.50'.
508 """
509 # This string is show for both 'run' and 'continue' after all tests.
510 match = re.search(r': XML test result file generated at (\S+). Passed',
511 result.stdout)
512 if not (match and match.group(1)):
513 # TODO(ihf): Find out if we ever recover something interesting in
514 # this case. Otherwise delete it.
515 # Try harder to find the remains. This string shows before all
516 # tests but only with 'run', not 'continue'.
517 logging.warning('XML test result file incomplete?')
518 match = re.search(r': Created result dir (\S+)', result.stdout)
519 if not (match and match.group(1)):
520 error_msg = 'Test did not complete due to Chrome or ARC crash.'
521 if summary:
522 error_msg += (' Test summary from previous runs: %s'
523 % summary)
Ilja H. Friedel6d5ca8f2016-10-26 22:35:36 -0700524 raise error.TestFail(error_msg)
Ilja H. Friedelbee84a72016-09-28 15:57:06 -0700525 datetime_id = match.group(1)
526 logging.info('Tradefed identified results and logs with %s.',
527 datetime_id)
528 return datetime_id
529
Rohit Makasana99116d32016-10-17 19:32:04 -0700530 def _parse_result(self, result, waivers=None):
Ilja H. Friedelbee84a72016-09-28 15:57:06 -0700531 """Check the result from the tradefed output.
532
533 This extracts the test pass/fail/executed list from the output of
534 tradefed. It is up to the caller to handle inconsistencies.
535
536 @param result: The result object from utils.run.
Rohit Makasana99116d32016-10-17 19:32:04 -0700537 @param waivers: a set() of tests which are permitted to fail.
Ilja H. Friedelbee84a72016-09-28 15:57:06 -0700538 """
539 # Parse the stdout to extract test status. In particular step over
540 # similar output for each ABI and just look at the final summary.
541 match = re.search(r'(XML test result file generated at (\S+). '
542 r'Passed (\d+), Failed (\d+), Not Executed (\d+))',
543 result.stdout)
544 if not match:
545 raise error.Test('Test log does not contain a summary.')
546
547 passed = int(match.group(3))
548 failed = int(match.group(4))
549 not_executed = int(match.group(5))
550 match = re.search(r'(Start test run of (\d+) packages, containing '
551 r'(\d+(?:,\d+)?) tests)', result.stdout)
552 if match and match.group(3):
553 tests = int(match.group(3).replace(',', ''))
554 else:
555 # Unfortunately this happens. Assume it made no other mistakes.
556 logging.warning('Tradefed forgot to print number of tests.')
557 tests = passed + failed + not_executed
Rohit Makasana99116d32016-10-17 19:32:04 -0700558 # TODO(rohitbm): make failure parsing more robust by extracting the list
559 # of failing tests instead of searching in the result blob. As well as
560 # only parse for waivers for the running ABI.
561 if waivers:
562 for testname in waivers:
563 if testname + ' FAIL' in result.stdout:
564 failed -= 1
565 # To maintain total count consistency.
566 passed += 1
567 logging.info('Waived failure %s', testname)
568
569 logging.info('tests=%d, passed=%d, failed=%d, not_executed=%d',
570 tests, passed, failed, not_executed)
Ilja H. Friedelbee84a72016-09-28 15:57:06 -0700571 return (tests, passed, failed, not_executed)
572
573 def _collect_logs(self, repository, datetime, destination):
574 """Collects the tradefed logs.
575
576 It is legal to collect the same logs multiple times. This is normal
577 after 'tradefed continue' updates existing logs with new results.
578
579 @param repository: Full path to tradefeds output on disk.
580 @param datetime: The identifier which tradefed assigned to the run.
581 Currently this looks like '2016.07.14_00.34.50'.
582 @param destination: Autotest result directory (destination of logs).
583 """
584 logging.info('Collecting tradefed testResult.xml and logs to %s.',
585 destination)
586 repository_results = os.path.join(repository, 'results')
587 repository_logs = os.path.join(repository, 'logs')
588 # Because other tools rely on the currently chosen Google storage paths
589 # we need to keep destination_results in
590 # cheets_CTS.*/results/android-cts/2016.mm.dd_hh.mm.ss(/|.zip)
591 # and destination_logs in
592 # cheets_CTS.*/results/android-cts/logs/2016.mm.dd_hh.mm.ss/
593 destination_results = destination
Ilja H. Friedelb83646b2016-10-18 13:02:59 -0700594 destination_results_datetime = os.path.join(destination_results,
595 datetime)
Ilja H. Friedelbee84a72016-09-28 15:57:06 -0700596 destination_results_datetime_zip = destination_results_datetime + '.zip'
597 destination_logs = os.path.join(destination, 'logs')
598 destination_logs_datetime = os.path.join(destination_logs, datetime)
599 # We may have collected the same logs before, clean old versions.
600 if os.path.exists(destination_results_datetime_zip):
601 os.remove(destination_results_datetime_zip)
602 if os.path.exists(destination_results_datetime):
603 shutil.rmtree(destination_results_datetime)
604 if os.path.exists(destination_logs_datetime):
605 shutil.rmtree(destination_logs_datetime)
606 shutil.copytree(
607 os.path.join(repository_results, datetime),
608 destination_results_datetime)
609 # Copying the zip file has to happen after the tree so the destination
610 # directory is available.
611 shutil.copy(
612 os.path.join(repository_results, datetime) + '.zip',
613 destination_results_datetime_zip)
614 shutil.copytree(
615 os.path.join(repository_logs, datetime),
616 destination_logs_datetime)