blob: 7995a1f05106f500ac3dbb5d10b5647cfc3a1580 [file] [log] [blame]
showard9d02fb52008-08-08 18:20:37 +00001"""
2This module defines the BasePackageManager Class which provides an
3implementation of the packaging system API providing methods to fetch,
4upload and remove packages. Site specific extensions to any of these methods
5should inherit this class.
6"""
7
8import re, os, sys, traceback, subprocess, shutil, time, traceback, urlparse
showarda290e982009-03-27 20:57:30 +00009import fcntl, logging
mblighe1836812009-04-17 18:25:14 +000010from autotest_lib.client.common_lib import error, utils, global_config
showard9d02fb52008-08-08 18:20:37 +000011
12
13class PackageUploadError(error.AutotestError):
14 'Raised when there is an error uploading the package'
15
16class PackageFetchError(error.AutotestError):
17 'Raised when there is an error fetching the package'
18
19class PackageRemoveError(error.AutotestError):
20 'Raised when there is an error removing the package'
21
22class PackageInstallError(error.AutotestError):
23 'Raised when there is an error installing the package'
24
mblighe1836812009-04-17 18:25:14 +000025class RepoDiskFull(error.AutotestError):
26 'Raised when the destination for packages is full'
27
28class RepoWriteError(error.AutotestError):
29 "Raised when packager cannot write to a repo's desitnation"
30
31class RepoUnknownError(error.AutotestError):
32 "Raised when packager cannot write to a repo's desitnation"
33
34class RepoError(error.AutotestError):
35 "Raised when a repo isn't working in some way"
36
showard9d02fb52008-08-08 18:20:37 +000037# the name of the checksum file that stores the packages' checksums
38CHECKSUM_FILE = "packages.checksum"
39
mblighe1836812009-04-17 18:25:14 +000040
41def parse_ssh_path(repo):
42 '''
43 Parse ssh://xx@xx/path/to/ and return a tuple with host_line and
44 remote path
45 '''
46
47 match = re.search('^ssh://(.*?)(/.*)$', repo)
48 if match:
49 return match.groups()
50 else:
51 raise PackageUploadError("Incorrect SSH path in global_config: %s"
52 % repo)
53
54
55def repo_run_command(repo, cmd, ignore_status=False):
56 """Run a command relative to the repos path"""
57 repo = repo.strip()
58 run_cmd = None
59 if repo.startswith('ssh://'):
60 username = None
61 hostline, remote_path = parse_ssh_path(repo)
62 if '@' in hostline:
63 username, host = hostline.split('@')
64 run_cmd = 'ssh %s@%s "cd %s && %s"' % (username, host,
65 remote_path, cmd)
66 else:
67 run_cmd = 'ssh %s "cd %s && %s"' % (host, remote_path, cmd)
68
69 else:
70 run_cmd = "cd %s && %s" % (repo, cmd)
71
72 if run_cmd:
73 return utils.run(run_cmd, ignore_status=ignore_status)
74
75
76def check_diskspace(repo, min_free=None):
77 if not min_free:
78 min_free = global_config.global_config.get_config_value('PACKAGES',
79 'minimum_free_space',
80 type=int)
81 try:
82 df = repo_run_command(repo, 'df -mP . | tail -1').stdout.split()
83 free_space_gb = int(df[3])/1000.0
84 except Exception, e:
85 raise RepoUnknownError('Unknown Repo Error: %s' % e)
86 if free_space_gb < min_free:
87 raise RepoDiskFull('Not enough disk space available')
88
89
90def check_write(repo):
91 try:
92 repo_testfile = '.repo_test_file'
93 repo_run_command(repo, 'touch %s' % repo_testfile).stdout.strip()
94 repo_run_command(repo, 'rm ' + repo_testfile)
95 except error.CmdError:
96 raise RepoWriteError('Unable to write to ' + repo)
97
98
99def trim_custom_directories(repo, older_than_days=40):
100 older_than_days = global_config.global_config.get_config_value('PACKAGES',
101 'custom_max_age',
102 type=int)
103 cmd = 'find . -type f -atime %s -exec rm -f {} \;' % older_than_days
104 repo_run_command(repo, cmd, ignore_status=True)
105
106
showard9d02fb52008-08-08 18:20:37 +0000107class BasePackageManager(object):
108 _repo_exception = {}
109 REPO_OK = object()
jadmanski19426ea2009-07-28 20:19:40 +0000110 _wget_cmd_pattern = 'wget --connect-timeout=15 -nv %s -O %s'
showard9d02fb52008-08-08 18:20:37 +0000111
mbligh76d19f72008-10-15 16:24:43 +0000112 def __init__(self, pkgmgr_dir, hostname=None, repo_urls=None,
113 upload_paths=None, do_locking=True, run_function=utils.run,
114 run_function_args=[], run_function_dargs={}):
showard9d02fb52008-08-08 18:20:37 +0000115 '''
116 repo_urls: The list of the repository urls which is consulted
117 whilst fetching the package
118 upload_paths: The list of the upload of repositories to which
119 the package is uploaded to
120 pkgmgr_dir : A directory that can be used by the package manager
121 to dump stuff (like checksum files of the repositories
122 etc.).
123 do_locking : Enable locking when the packages are installed.
124
125 run_function is used to execute the commands throughout this file.
126 It defaults to utils.run() but a custom method (if provided) should
127 be of the same schema as utils.run. It should return a CmdResult
128 object and throw a CmdError exception. The reason for using a separate
129 function to run the commands is that the same code can be run to fetch
130 a package on the local machine or on a remote machine (in which case
131 ssh_host's run function is passed in for run_function).
132 '''
133 # In memory dictionary that stores the checksum's of packages
134 self._checksum_dict = {}
135
136 self.pkgmgr_dir = pkgmgr_dir
137 self.do_locking = do_locking
mbligh76d19f72008-10-15 16:24:43 +0000138 self.hostname = hostname
showard9d02fb52008-08-08 18:20:37 +0000139
140 # Process the repository URLs and the upload paths if specified
141 if not repo_urls:
142 self.repo_urls = []
143 else:
mbligh76d19f72008-10-15 16:24:43 +0000144 if hostname:
145 self.repo_urls = repo_urls
146 self.repo_urls = list(self.get_mirror_list())
147 else:
148 self.repo_urls = list(repo_urls)
showard9d02fb52008-08-08 18:20:37 +0000149 if not upload_paths:
150 self.upload_paths = []
151 else:
152 self.upload_paths = list(upload_paths)
153
mblighe1836812009-04-17 18:25:14 +0000154
showard9d02fb52008-08-08 18:20:37 +0000155 # Create an internal function that is a simple wrapper of
156 # run_function and takes in the args and dargs as arguments
157 def _run_command(command, _run_command_args=run_function_args,
158 _run_command_dargs={}):
159 '''
160 Special internal function that takes in a command as
161 argument and passes it on to run_function (if specified).
162 The _run_command_dargs are merged into run_function_dargs
163 with the former having more precedence than the latter.
164 '''
165 new_dargs = dict(run_function_dargs)
166 new_dargs.update(_run_command_dargs)
showard108d73e2009-06-22 18:14:41 +0000167 # avoid polluting logs with extremely verbose packaging output
168 new_dargs.update({'stdout_tee' : None})
showard9d02fb52008-08-08 18:20:37 +0000169
170 return run_function(command, *_run_command_args,
171 **new_dargs)
172
173 self._run_command = _run_command
174
mblighe1836812009-04-17 18:25:14 +0000175 def repo_check(self, repo):
176 '''
177 Check to make sure the repo is in a sane state:
178 ensure we have at least XX amount of free space
179 Make sure we can write to the repo
180 '''
181 try:
182 check_diskspace(repo)
183 check_write(repo)
184 except (RepoWriteError, RepoUnknownError, RepoDiskFull), e:
185 raise RepoError("ERROR: Repo %s: %s" % (repo, e))
186
187
188 def upkeep(self, custom_repos=None):
189 '''
190 Clean up custom upload/download areas
191 '''
mbligh1a519b92009-04-17 18:26:19 +0000192 from autotest_lib.server import subcommand
mblighe1836812009-04-17 18:25:14 +0000193 if not custom_repos:
194 custom_repos = global_config.global_config.get_config_value('PACKAGES',
195 'custom_upload_location').split(',')
196 custom_download = global_config.global_config.get_config_value(
197 'PACKAGES', 'custom_download_location')
198 custom_repos += [custom_download]
199
200 results = subcommand.parallel_simple(trim_custom_directories,
201 custom_repos, log=False, )
202
showard9d02fb52008-08-08 18:20:37 +0000203
204 def install_pkg(self, name, pkg_type, fetch_dir, install_dir,
205 preserve_install_dir=False, repo_url=None):
206 '''
207 Remove install_dir if it already exists and then recreate it unless
208 preserve_install_dir is specified as True.
209 Fetch the package into the pkg_dir. Untar the package into install_dir
210 The assumption is that packages are of the form :
211 <pkg_type>.<pkg_name>.tar.bz2
212 name : name of the package
213 type : type of the package
214 fetch_dir : The directory into which the package tarball will be
215 fetched to.
216 install_dir : the directory where the package files will be untarred to
217 repo_url : the url of the repository to fetch the package from.
218 '''
219
220 # do_locking flag is on by default unless you disable it (typically
221 # in the cases where packages are directly installed from the server
222 # onto the client in which case fcntl stuff wont work as the code
223 # will run on the server in that case..
224 if self.do_locking:
225 lockfile_name = '.%s-%s-lock' % (name, pkg_type)
226 lockfile = open(os.path.join(self.pkgmgr_dir, lockfile_name), 'w')
227
228 try:
229 if self.do_locking:
230 fcntl.flock(lockfile, fcntl.LOCK_EX)
231
232 self._run_command('mkdir -p %s' % fetch_dir)
233
234 pkg_name = self.get_tarball_name(name, pkg_type)
235 fetch_path = os.path.join(fetch_dir, pkg_name)
236 try:
237 # Fetch the package into fetch_dir
mbligh76d19f72008-10-15 16:24:43 +0000238 self.fetch_pkg(pkg_name, fetch_path, use_checksum=True)
showard9d02fb52008-08-08 18:20:37 +0000239
240 # check to see if the install_dir exists and if it does
241 # then check to see if the .checksum file is the latest
242 install_dir_exists = False
243 try:
244 self._run_command("ls %s" % install_dir)
245 install_dir_exists = True
246 except (error.CmdError, error.AutoservRunError):
247 pass
248
249 if (install_dir_exists and
250 not self.untar_required(fetch_path, install_dir)):
251 return
252
253 # untar the package into install_dir and
254 # update the checksum in that directory
255 if not preserve_install_dir:
256 # Make sure we clean up the install_dir
257 self._run_command('rm -rf %s' % install_dir)
258 self._run_command('mkdir -p %s' % install_dir)
259
260 self.untar_pkg(fetch_path, install_dir)
261
262 except PackageFetchError, why:
263 raise PackageInstallError('Installation of %s(type:%s) failed'
264 ' : %s' % (name, pkg_type, why))
265 finally:
266 if self.do_locking:
267 fcntl.flock(lockfile, fcntl.LOCK_UN)
268 lockfile.close()
269
270
mbligh76d19f72008-10-15 16:24:43 +0000271 def fetch_pkg(self, pkg_name, dest_path, repo_url=None, use_checksum=False):
showard9d02fb52008-08-08 18:20:37 +0000272 '''
273 Fetch the package into dest_dir from repo_url. By default repo_url
274 is None and the package is looked in all the repostories specified.
275 Otherwise it fetches it from the specific repo_url.
276 pkg_name : name of the package (ex: test-sleeptest.tar.bz2,
277 dep-gcc.tar.bz2, kernel.1-1.rpm)
278 repo_url : the URL of the repository where the package is located.
279 dest_path : complete path of where the package will be fetched to.
280 use_checksum : This is set to False to fetch the packages.checksum file
281 so that the checksum comparison is bypassed for the
282 checksum file itself. This is used internally by the
283 packaging system. It should be ignored by externals
284 callers of this method who use it fetch custom packages.
285 '''
286
287 try:
288 self._run_command("ls %s" % os.path.dirname(dest_path))
289 except (error.CmdError, error.AutoservRunError):
290 raise PackageFetchError("Please provide a valid "
291 "destination: %s " % dest_path)
292
293 # See if the package was already fetched earlier, if so
294 # the checksums need to be compared and the package is now
295 # fetched only if they differ.
296 pkg_exists = False
297 try:
298 self._run_command("ls %s" % dest_path)
299 pkg_exists = True
300 except (error.CmdError, error.AutoservRunError):
301 pass
302
303 # if a repository location is explicitly provided, fetch the package
304 # from there and return
305 if repo_url:
306 repo_url_list = [repo_url]
307 elif len(self.repo_urls) > 0:
308 repo_url_list = self.repo_urls
309 else:
310 raise PackageFetchError("There are no repository urls specified")
311
312 error_msgs = {}
313 for location in repo_url_list:
314 try:
315 # Fetch the checksum if it not there
316 if not use_checksum:
317 self.fetch_pkg_file(pkg_name, dest_path, location)
318
319 # Fetch the package if a) the pkg does not exist or
320 # b) if the checksum differs for the existing package
321 elif (not pkg_exists or
322 not self.compare_checksum(dest_path, location)):
323 self.fetch_pkg_file(pkg_name, dest_path, location)
324 # Update the checksum of the package in the packages'
325 # checksum file
326 self.update_checksum(dest_path)
327 return
mbligh7a603672009-02-07 01:52:08 +0000328 except (PackageFetchError, error.AutoservRunError):
showard9d02fb52008-08-08 18:20:37 +0000329 # The package could not be found in this repo, continue looking
jadmanski19426ea2009-07-28 20:19:40 +0000330 logging.error('%s could not be fetched from %s', pkg_name,
showarda290e982009-03-27 20:57:30 +0000331 location)
showard9d02fb52008-08-08 18:20:37 +0000332
333 # if we got here then that means the package is not found
334 # in any of the repositories.
mbligh76d19f72008-10-15 16:24:43 +0000335 raise PackageFetchError("%s could not be fetched from any of"
336 " the repos %s : %s " % (pkg_name,
337 repo_url_list,
showard9d02fb52008-08-08 18:20:37 +0000338 error_msgs))
339
340
mbligh76d19f72008-10-15 16:24:43 +0000341 def fetch_pkg_file(self, filename, dest_path, source_url):
showard9d02fb52008-08-08 18:20:37 +0000342 """
343 Fetch the file from source_url into dest_path. The package repository
344 url is parsed and the appropriate retrieval method is determined.
345
346 """
347 if source_url.startswith('http://'):
mbligh76d19f72008-10-15 16:24:43 +0000348 self.fetch_file_http(filename, dest_path, source_url)
showard9d02fb52008-08-08 18:20:37 +0000349 else:
mbligh76d19f72008-10-15 16:24:43 +0000350 raise PackageFetchError("Invalid location %s" % source_url)
showard9d02fb52008-08-08 18:20:37 +0000351
352
mbligh76d19f72008-10-15 16:24:43 +0000353 def fetch_file_http(self, filename, dest_path, source_url):
showard9d02fb52008-08-08 18:20:37 +0000354 """
355 Fetch the package using http protocol. Raises a PackageFetchError.
356 """
jadmanski19426ea2009-07-28 20:19:40 +0000357 logging.info("Fetching %s from %s to %s", filename, source_url,
showarda290e982009-03-27 20:57:30 +0000358 dest_path)
showard9d02fb52008-08-08 18:20:37 +0000359 # check to see if the source_url is reachable or not
360 self.run_http_test(source_url, os.path.dirname(dest_path))
361
mbligh76d19f72008-10-15 16:24:43 +0000362 pkg_path = os.path.join(source_url, filename)
showard9d02fb52008-08-08 18:20:37 +0000363 try:
jadmanski19426ea2009-07-28 20:19:40 +0000364 self._run_command(self._wget_cmd_pattern % (pkg_path, dest_path))
showarda290e982009-03-27 20:57:30 +0000365 logging.debug("Successfully fetched %s from %s", filename,
366 source_url)
mbligh7a603672009-02-07 01:52:08 +0000367 except error.CmdError:
368 raise PackageFetchError("%s not found in %s" % (filename,
369 source_url))
showard9d02fb52008-08-08 18:20:37 +0000370
371
372 def run_http_test(self, source_url, dest_dir):
373 '''
374 Run a simple 30 sec wget on source_url
375 just to see if it can be reachable or not. This avoids the need
376 for waiting for a 10min timeout.
377 '''
378 dest_file_path = os.path.join(dest_dir, 'http_test_file')
379
380 BPM = BasePackageManager
381 error_msg = "HTTP test failed. Failed to contact"
382 # We should never get here unless the source_url starts with http://
383 assert(source_url.startswith('http://'))
384
385 # Get the http server name from the URL
386 server_name = urlparse.urlparse(source_url)[1]
jadmanski19426ea2009-07-28 20:19:40 +0000387 http_cmd = self._wget_cmd_pattern % (server_name, dest_file_path)
mblighabe330e2008-12-09 23:37:52 +0000388
389 # Following repo_exception optimization is disabled for now.
jadmanski19426ea2009-07-28 20:19:40 +0000390 # Checksum files are optional. The attempted download of a
391 # missing checksum file erroneously causes the repos to be marked
392 # dead, causing download of its custom kernels to fail.
mblighabe330e2008-12-09 23:37:52 +0000393 # It also stays dead until Autotest is restarted.
jadmanski19426ea2009-07-28 20:19:40 +0000394 if server_name in BPM._repo_exception and False: # <--- TEMP
showard9d02fb52008-08-08 18:20:37 +0000395 if BPM._repo_exception[server_name] == BPM.REPO_OK:
396 # This repository is fine. Simply return
397 return
398 else:
399 raise PackageFetchError("%s - %s : %s "
400 % (error_msg, server_name,
401 BPM._repo_exception[server_name]))
402 try:
403 try:
404 self._run_command(http_cmd,
405 _run_command_dargs={'timeout':30})
406 BPM._repo_exception[server_name] = BPM.REPO_OK
407 finally:
408 self._run_command('rm -f %s' % dest_file_path)
mbligh76d19f72008-10-15 16:24:43 +0000409 except Exception, e:
showard9d02fb52008-08-08 18:20:37 +0000410 BPM._repo_exception[server_name] = e
mbligh76d19f72008-10-15 16:24:43 +0000411 raise PackageFetchError("%s - %s: %s " % (error_msg, server_name,
412 e))
showard9d02fb52008-08-08 18:20:37 +0000413
414
mblighe1836812009-04-17 18:25:14 +0000415 def upload_pkg(self, pkg_path, upload_path=None, update_checksum=False):
mbligh1a519b92009-04-17 18:26:19 +0000416 from autotest_lib.server import subcommand
mblighe1836812009-04-17 18:25:14 +0000417 if upload_path:
418 upload_path_list = [upload_path]
419 self.upkeep(upload_path_list)
420 elif len(self.upload_paths) > 0:
421 self.upkeep()
422 upload_path_list = self.upload_paths
423 else:
424 raise PackageUploadError("Invalid Upload Path specified")
425
mblighcf57ae22009-04-17 20:13:01 +0000426 if update_checksum:
427 # get the packages' checksum file and update it with the current
428 # package's checksum
429 self.update_checksum(pkg_path)
430
mblighe1836812009-04-17 18:25:14 +0000431 commands = []
432 for path in upload_path_list:
433 commands.append(subcommand.subcommand(self.upload_pkg_parallel,
434 (pkg_path, path,
435 update_checksum)))
436
437 results = subcommand.parallel(commands, 300, return_results=True)
438 for result in results:
439 if result:
440 print str(result)
441
442
showard9d02fb52008-08-08 18:20:37 +0000443 # TODO(aganti): Fix the bug with the current checksum logic where
444 # packages' checksums that are not present consistently in all the
445 # repositories are not handled properly. This is a corner case though
446 # but the ideal solution is to make the checksum file repository specific
447 # and then maintain it.
mblighe1836812009-04-17 18:25:14 +0000448 def upload_pkg_parallel(self, pkg_path, upload_path, update_checksum=False):
showard9d02fb52008-08-08 18:20:37 +0000449 '''
450 Uploads to a specified upload_path or to all the repos.
451 Also uploads the checksum file to all the repos.
452 pkg_path : The complete path to the package file
453 upload_path : the absolute path where the files are copied to.
454 if set to 'None' assumes 'all' repos
455 update_checksum : If set to False, the checksum file is not
456 going to be updated which happens by default.
457 This is necessary for custom
458 packages (like custom kernels and custom tests)
459 that get uploaded which do not need to be part of
460 the checksum file and bloat it.
461 '''
mblighe1836812009-04-17 18:25:14 +0000462 self.repo_check(upload_path)
showard9d02fb52008-08-08 18:20:37 +0000463 # upload the package
mblighe1836812009-04-17 18:25:14 +0000464 if os.path.isdir(pkg_path):
465 self.upload_pkg_dir(pkg_path, upload_path)
466 else:
467 self.upload_pkg_file(pkg_path, upload_path)
showard9d02fb52008-08-08 18:20:37 +0000468 if update_checksum:
mblighcf57ae22009-04-17 20:13:01 +0000469 self.upload_pkg_file(self._get_checksum_file_path(),
470 upload_path)
showard9d02fb52008-08-08 18:20:37 +0000471
472
473 def upload_pkg_file(self, file_path, upload_path):
474 '''
475 Upload a single file. Depending on the upload path, the appropriate
476 method for that protocol is called. Currently this simply copies the
477 file to the target directory (but can be extended for other protocols)
478 This assumes that the web server is running on the same machine where
479 the method is being called from. The upload_path's files are
480 basically served by that web server.
481 '''
482 try:
mbligh93a9e292008-10-10 21:09:53 +0000483 if upload_path.startswith('ssh://'):
484 # parse ssh://user@host/usr/local/autotest/packages
mblighe1836812009-04-17 18:25:14 +0000485 hostline, remote_path = parse_ssh_path(upload_path)
mbligh1e3b0992008-10-14 16:29:54 +0000486 try:
487 utils.run('scp %s %s:%s' % (file_path, hostline,
488 remote_path))
489 r_path = os.path.join(remote_path,
490 os.path.basename(file_path))
491 utils.run("ssh %s 'chmod 644 %s'" % (hostline, r_path))
492 except error.CmdError:
jadmanski19426ea2009-07-28 20:19:40 +0000493 logging.error("Error uploading to repository %s",
showarda290e982009-03-27 20:57:30 +0000494 upload_path)
mbligh93a9e292008-10-10 21:09:53 +0000495 else:
496 shutil.copy(file_path, upload_path)
497 os.chmod(os.path.join(upload_path,
498 os.path.basename(file_path)), 0644)
showard9d02fb52008-08-08 18:20:37 +0000499 except (IOError, os.error), why:
jadmanski19426ea2009-07-28 20:19:40 +0000500 logging.error("Upload of %s to %s failed: %s", file_path,
showarda290e982009-03-27 20:57:30 +0000501 upload_path, why)
showard9d02fb52008-08-08 18:20:37 +0000502
503
mbligh9fc77972008-10-02 20:32:09 +0000504 def upload_pkg_dir(self, dir_path, upload_path):
505 '''
506 Upload a full directory. Depending on the upload path, the appropriate
507 method for that protocol is called. Currently this copies the whole
508 tmp package directory to the target directory.
509 This assumes that the web server is running on the same machine where
510 the method is being called from. The upload_path's files are
511 basically served by that web server.
512 '''
mbligh93a9e292008-10-10 21:09:53 +0000513 local_path = os.path.join(dir_path, "*")
mbligh9fc77972008-10-02 20:32:09 +0000514 try:
mbligh93a9e292008-10-10 21:09:53 +0000515 if upload_path.startswith('ssh://'):
mblighe1836812009-04-17 18:25:14 +0000516 hostline, remote_path = parse_ssh_path(upload_path)
mbligh1e3b0992008-10-14 16:29:54 +0000517 try:
518 utils.run('scp %s %s:%s' % (local_path, hostline,
519 remote_path))
520 ssh_path = os.path.join(remote_path, "*")
521 utils.run("ssh %s 'chmod 644 %s'" % (hostline, ssh_path))
522 except error.CmdError:
showarda290e982009-03-27 20:57:30 +0000523 logging.error("Error uploading to repository: %s",
524 upload_path)
mbligh93a9e292008-10-10 21:09:53 +0000525 else:
526 utils.run("cp %s %s " % (local_path, upload_path))
527 up_path = os.path.join(upload_path, "*")
528 utils.run("chmod 644 %s" % up_path)
mbligh9fc77972008-10-02 20:32:09 +0000529 except (IOError, os.error), why:
530 raise PackageUploadError("Upload of %s to %s failed: %s"
531 % (dir_path, upload_path, why))
532
533
showard9d02fb52008-08-08 18:20:37 +0000534 def remove_pkg(self, pkg_name, remove_path=None, remove_checksum=False):
535 '''
536 Remove the package from the specified remove_path
537 pkg_name : name of the package (ex: test-sleeptest.tar.bz2,
538 dep-gcc.tar.bz2)
539 remove_path : the location to remove the package from.
540
541 '''
542 if remove_path:
543 remove_path_list = [remove_path]
544 elif len(self.upload_paths) > 0:
545 remove_path_list = self.upload_paths
546 else:
547 raise PackageRemoveError("Invalid path to remove the pkg from")
548
549 checksum_path = self._get_checksum_file_path()
550
551 if remove_checksum:
552 self.remove_checksum(pkg_name)
553
554 # remove the package and upload the checksum file to the repos
555 for path in remove_path_list:
556 self.remove_pkg_file(pkg_name, path)
557 self.upload_pkg_file(checksum_path, path)
558
559
mbligh76d19f72008-10-15 16:24:43 +0000560 def remove_pkg_file(self, filename, pkg_dir):
showard9d02fb52008-08-08 18:20:37 +0000561 '''
mbligh76d19f72008-10-15 16:24:43 +0000562 Remove the file named filename from pkg_dir
showard9d02fb52008-08-08 18:20:37 +0000563 '''
564 try:
565 # Remove the file
mbligh93a9e292008-10-10 21:09:53 +0000566 if pkg_dir.startswith('ssh://'):
mblighe1836812009-04-17 18:25:14 +0000567 hostline, remote_path = parse_ssh_path(pkg_dir)
mbligh76d19f72008-10-15 16:24:43 +0000568 path = os.path.join(remote_path, filename)
mbligh93a9e292008-10-10 21:09:53 +0000569 utils.run("ssh %s 'rm -rf %s/%s'" % (hostline, remote_path,
570 path))
571 else:
mbligh76d19f72008-10-15 16:24:43 +0000572 os.remove(os.path.join(pkg_dir, filename))
showard9d02fb52008-08-08 18:20:37 +0000573 except (IOError, os.error), why:
574 raise PackageRemoveError("Could not remove %s from %s: %s "
mbligh76d19f72008-10-15 16:24:43 +0000575 % (filename, pkg_dir, why))
showard9d02fb52008-08-08 18:20:37 +0000576
577
mbligh76d19f72008-10-15 16:24:43 +0000578 def get_mirror_list(self):
mbligh1e3b0992008-10-14 16:29:54 +0000579 '''
mbligh76d19f72008-10-15 16:24:43 +0000580 Stub function for site specific mirrors.
mbligh1e3b0992008-10-14 16:29:54 +0000581
582 Returns:
583 Priority ordered list
584 '''
585 return self.repo_urls
586
587
showard9d02fb52008-08-08 18:20:37 +0000588 def _get_checksum_file_path(self):
589 '''
590 Return the complete path of the checksum file (assumed to be stored
591 in self.pkgmgr_dir
592 '''
593 return os.path.join(self.pkgmgr_dir, CHECKSUM_FILE)
594
595
596 def _get_checksum_dict(self):
597 '''
598 Fetch the checksum file if not already fetched. If the checksum file
599 cannot be fetched from the repos then a new file is created with
600 the current package's (specified in pkg_path) checksum value in it.
601 Populate the local checksum dictionary with the values read from
602 the checksum file.
603 The checksum file is assumed to be present in self.pkgmgr_dir
604 '''
605 checksum_path = self._get_checksum_file_path()
606 if not self._checksum_dict:
607 # Fetch the checksum file
608 try:
609 try:
610 self._run_command("ls %s" % checksum_path)
611 except (error.CmdError, error.AutoservRunError):
612 # The packages checksum file does not exist locally.
613 # See if it is present in the repositories.
mbligh76d19f72008-10-15 16:24:43 +0000614 self.fetch_pkg(CHECKSUM_FILE, checksum_path)
showard9d02fb52008-08-08 18:20:37 +0000615 except PackageFetchError, e:
616 # This should not happen whilst fetching a package..if a
617 # package is present in the repository, the corresponding
618 # checksum file should also be automatically present. This
619 # case happens only when a package
620 # is being uploaded and if it is the first package to be
621 # uploaded to the repos (hence no checksum file created yet)
622 # Return an empty dictionary in that case
623 return {}
624
625 # Read the checksum file into memory
626 checksum_file_contents = self._run_command('cat '
627 + checksum_path).stdout
628
629 # Return {} if we have an empty checksum file present
630 if not checksum_file_contents.strip():
631 return {}
632
633 # Parse the checksum file contents into self._checksum_dict
634 for line in checksum_file_contents.splitlines():
635 checksum, package_name = line.split(None, 1)
636 self._checksum_dict[package_name] = checksum
637
638 return self._checksum_dict
639
640
641 def _save_checksum_dict(self, checksum_dict):
642 '''
643 Save the checksum dictionary onto the checksum file. Update the
644 local _checksum_dict variable with this new set of values.
645 checksum_dict : New checksum dictionary
646 checksum_dir : The directory in which to store the checksum file to.
647 '''
648 checksum_path = self._get_checksum_file_path()
649 self._checksum_dict = checksum_dict.copy()
650 checksum_contents = '\n'.join(checksum + ' ' + pkg_name
651 for pkg_name,checksum in
652 checksum_dict.iteritems())
653 # Write the checksum file back to disk
654 self._run_command('echo "%s" > %s' % (checksum_contents,
655 checksum_path))
656
657
658 def compute_checksum(self, pkg_path):
659 '''
660 Compute the MD5 checksum for the package file and return it.
661 pkg_path : The complete path for the package file
662 '''
663 md5sum_output = self._run_command("md5sum %s " % pkg_path).stdout
664 return md5sum_output.split()[0]
665
666
667 def update_checksum(self, pkg_path):
668 '''
669 Update the checksum of the package in the packages' checksum
670 file. This method is called whenever a package is fetched just
671 to be sure that the checksums in the local file are the latest.
672 pkg_path : The complete path to the package file.
673 '''
674 # Compute the new checksum
675 new_checksum = self.compute_checksum(pkg_path)
676 checksum_dict = self._get_checksum_dict()
677 checksum_dict[os.path.basename(pkg_path)] = new_checksum
678 self._save_checksum_dict(checksum_dict)
679
680
681 def remove_checksum(self, pkg_name):
682 '''
683 Remove the checksum of the package from the packages checksum file.
684 This method is called whenever a package is removed from the
685 repositories in order clean its corresponding checksum.
686 pkg_name : The name of the package to be removed
687 '''
688 checksum_dict = self._get_checksum_dict()
689 if pkg_name in checksum_dict:
690 del checksum_dict[pkg_name]
691 self._save_checksum_dict(checksum_dict)
692
693
694 def compare_checksum(self, pkg_path, repo_url):
695 '''
696 Calculate the checksum of the file specified in pkg_path and
697 compare it with the checksum in the checksum file
698 Return True if both match else return False.
699 pkg_path : The full path to the package file for which the
700 checksum is being compared
701 repo_url : The URL to fetch the checksum from
702 '''
703 checksum_dict = self._get_checksum_dict()
704 package_name = os.path.basename(pkg_path)
705 if not checksum_dict or package_name not in checksum_dict:
706 return False
707
708 repository_checksum = checksum_dict[package_name]
709 local_checksum = self.compute_checksum(pkg_path)
710 return (local_checksum == repository_checksum)
711
712
mblighdbfc4e32008-08-22 18:08:07 +0000713 def tar_package(self, pkg_name, src_dir, dest_dir, exclude_string=None):
showard9d02fb52008-08-08 18:20:37 +0000714 '''
715 Create a tar.bz2 file with the name 'pkg_name' say test-blah.tar.bz2.
mbligh9fc77972008-10-02 20:32:09 +0000716 Excludes the directories specified in exclude_string while tarring
showard9d02fb52008-08-08 18:20:37 +0000717 the source. Returns the tarball path.
718 '''
showard9d02fb52008-08-08 18:20:37 +0000719 tarball_path = os.path.join(dest_dir, pkg_name)
mblighd5a61a02009-05-21 01:44:30 +0000720 temp_path = tarball_path + '.tmp'
721 cmd = "tar -cvjf %s -C %s %s " % (temp_path, src_dir, exclude_string)
showard9d02fb52008-08-08 18:20:37 +0000722
mblighd5a61a02009-05-21 01:44:30 +0000723 try:
724 utils.system(cmd)
725 except:
726 os.unlink(temp_path)
727 raise
728
729 os.rename(temp_path, tarball_path)
showard9d02fb52008-08-08 18:20:37 +0000730 return tarball_path
731
732
733 def untar_required(self, tarball_path, dest_dir):
734 '''
735 Compare the checksum of the tarball_path with the .checksum file
736 in the dest_dir and return False if it matches. The untar
737 of the package happens only if the checksums do not match.
738 '''
739 checksum_path = os.path.join(dest_dir, '.checksum')
740 try:
741 existing_checksum = self._run_command('cat ' + checksum_path).stdout
742 except (error.CmdError, error.AutoservRunError):
743 # If the .checksum file is not present (generally, this should
744 # not be the case) then return True so that the untar happens
745 return True
746
747 new_checksum = self.compute_checksum(tarball_path)
748 return (new_checksum.strip() != existing_checksum.strip())
749
750
751 def untar_pkg(self, tarball_path, dest_dir):
752 '''
753 Untar the package present in the tarball_path and put a
754 ".checksum" file in the dest_dir containing the checksum
755 of the tarball. This method
756 assumes that the package to be untarred is of the form
757 <name>.tar.bz2
758 '''
mbligh96ad8512008-10-03 03:45:26 +0000759 self._run_command('tar xjf %s -C %s' % (tarball_path, dest_dir))
showard9d02fb52008-08-08 18:20:37 +0000760 # Put the .checksum file in the install_dir to note
761 # where the package came from
762 pkg_checksum = self.compute_checksum(tarball_path)
763 pkg_checksum_path = os.path.join(dest_dir,
764 '.checksum')
765 self._run_command('echo "%s" > %s '
766 % (pkg_checksum, pkg_checksum_path))
767
768
769 def get_tarball_name(self, name, pkg_type):
770 return "%s-%s.tar.bz2" % (pkg_type, name)
771
772
773 def is_url(self, url):
774 """Return true if path looks like a URL"""
775 return url.startswith('http://')
776
777
778 def get_package_name(self, url, pkg_type):
779 '''
780 Extract the group and test name for the url. This method is currently
781 used only for tests.
782 '''
783 if pkg_type == 'test':
mblighecbaec32008-10-25 13:37:42 +0000784 regex = '[^:]+://(.*)/([^/]*)$'
showard9d02fb52008-08-08 18:20:37 +0000785 return self._get_package_name(url, regex)
786 else:
787 return ('', url)
788
789
790 def _get_package_name(self, url, regex):
791 if not self.is_url(url):
792 if url.endswith('.tar.bz2'):
793 testname = url.replace('.tar.bz2', '')
794 testname = re.sub(r'(\d*)\.', '', testname)
795 return (testname, testname)
796 else:
797 return ('', url)
798
799 match = re.match(regex, url)
800 if not match:
801 return ('', url)
802 group, filename = match.groups()
803 # Generate the group prefix.
804 group = re.sub(r'\W', '_', group)
805 # Drop the extension to get the raw test name.
806 testname = re.sub(r'\.tar\.bz2', '', filename)
807 # Drop any random numbers at the end of the test name if any
808 testname = re.sub(r'\.(\d*)', '', testname)
809 return (group, testname)
810
811
mbligha7007722009-01-13 00:37:11 +0000812SitePackageManager = utils.import_site_class(
813 __file__, "autotest_lib.client.common_lib.site_packages",
814 "SitePackageManager", BasePackageManager)
showard9d02fb52008-08-08 18:20:37 +0000815
816class PackageManager(SitePackageManager):
817 pass