Greg Ward | a82122b | 2000-02-17 23:56:15 +0000 | [diff] [blame] | 1 | """distutils.command.sdist |
| 2 | |
| 3 | Implements the Distutils 'sdist' command (create a source distribution).""" |
| 4 | |
| 5 | # created 1999/09/22, Greg Ward |
| 6 | |
Greg Ward | 3ce77fd | 2000-03-02 01:49:45 +0000 | [diff] [blame] | 7 | __revision__ = "$Id$" |
Greg Ward | a82122b | 2000-02-17 23:56:15 +0000 | [diff] [blame] | 8 | |
| 9 | import sys, os, string, re |
| 10 | import fnmatch |
| 11 | from types import * |
| 12 | from glob import glob |
Greg Ward | a82122b | 2000-02-17 23:56:15 +0000 | [diff] [blame] | 13 | from distutils.core import Command |
Greg Ward | 2dc139c | 2000-03-18 15:43:42 +0000 | [diff] [blame^] | 14 | from distutils.util import newer, remove_tree |
Greg Ward | a82122b | 2000-02-17 23:56:15 +0000 | [diff] [blame] | 15 | from distutils.text_file import TextFile |
| 16 | from distutils.errors import DistutilsExecError |
| 17 | |
| 18 | |
Greg Ward | 1993f9a | 2000-02-18 00:13:53 +0000 | [diff] [blame] | 19 | class sdist (Command): |
Greg Ward | a82122b | 2000-02-17 23:56:15 +0000 | [diff] [blame] | 20 | |
| 21 | description = "create a source distribution (tarball, zip file, etc.)" |
| 22 | |
Greg Ward | bbeceea | 2000-02-18 00:25:39 +0000 | [diff] [blame] | 23 | user_options = [ |
| 24 | ('template=', 't', |
| 25 | "name of manifest template file [default: MANIFEST.in]"), |
| 26 | ('manifest=', 'm', |
| 27 | "name of manifest file [default: MANIFEST]"), |
| 28 | ('use-defaults', None, |
| 29 | "include the default file set in the manifest " |
| 30 | "[default; disable with --no-defaults]"), |
| 31 | ('manifest-only', None, |
| 32 | "just regenerate the manifest and then stop"), |
| 33 | ('force-manifest', None, |
| 34 | "forcibly regenerate the manifest and carry on as usual"), |
| 35 | |
| 36 | ('formats=', None, |
| 37 | "formats for source distribution (tar, ztar, gztar, or zip)"), |
| 38 | ('list-only', 'l', |
| 39 | "just list files that would be distributed"), |
| 40 | ('keep-tree', 'k', |
| 41 | "keep the distribution tree around after creating " + |
| 42 | "archive file(s)"), |
| 43 | ] |
Greg Ward | a82122b | 2000-02-17 23:56:15 +0000 | [diff] [blame] | 44 | negative_opts = {'use-defaults': 'no-defaults'} |
| 45 | |
| 46 | default_format = { 'posix': 'gztar', |
| 47 | 'nt': 'zip' } |
| 48 | |
| 49 | exclude_re = re.compile (r'\s*!\s*(\S+)') # for manifest lines |
| 50 | |
| 51 | |
Greg Ward | e01149c | 2000-02-18 00:35:22 +0000 | [diff] [blame] | 52 | def initialize_options (self): |
Greg Ward | a82122b | 2000-02-17 23:56:15 +0000 | [diff] [blame] | 53 | # 'template' and 'manifest' are, respectively, the names of |
| 54 | # the manifest template and manifest file. |
| 55 | self.template = None |
| 56 | self.manifest = None |
| 57 | |
| 58 | # 'use_defaults': if true, we will include the default file set |
| 59 | # in the manifest |
| 60 | self.use_defaults = 1 |
| 61 | |
| 62 | self.manifest_only = 0 |
| 63 | self.force_manifest = 0 |
| 64 | |
| 65 | self.formats = None |
| 66 | self.list_only = 0 |
| 67 | self.keep_tree = 0 |
| 68 | |
| 69 | |
Greg Ward | e01149c | 2000-02-18 00:35:22 +0000 | [diff] [blame] | 70 | def finalize_options (self): |
Greg Ward | a82122b | 2000-02-17 23:56:15 +0000 | [diff] [blame] | 71 | if self.manifest is None: |
| 72 | self.manifest = "MANIFEST" |
| 73 | if self.template is None: |
| 74 | self.template = "MANIFEST.in" |
| 75 | |
| 76 | if self.formats is None: |
| 77 | try: |
| 78 | self.formats = [self.default_format[os.name]] |
| 79 | except KeyError: |
| 80 | raise DistutilsPlatformError, \ |
| 81 | "don't know how to build source distributions on " + \ |
| 82 | "%s platform" % os.name |
| 83 | elif type (self.formats) is StringType: |
| 84 | self.formats = string.split (self.formats, ',') |
| 85 | |
| 86 | |
| 87 | def run (self): |
| 88 | |
| 89 | # 'files' is the list of files that will make up the manifest |
| 90 | self.files = [] |
| 91 | |
| 92 | # Ensure that all required meta-data is given; warn if not (but |
| 93 | # don't die, it's not *that* serious!) |
| 94 | self.check_metadata () |
| 95 | |
| 96 | # Do whatever it takes to get the list of files to process |
| 97 | # (process the manifest template, read an existing manifest, |
| 98 | # whatever). File list is put into 'self.files'. |
| 99 | self.get_file_list () |
| 100 | |
| 101 | # If user just wanted us to regenerate the manifest, stop now. |
| 102 | if self.manifest_only: |
| 103 | return |
| 104 | |
| 105 | # Otherwise, go ahead and create the source distribution tarball, |
| 106 | # or zipfile, or whatever. |
| 107 | self.make_distribution () |
| 108 | |
| 109 | |
| 110 | def check_metadata (self): |
| 111 | |
| 112 | dist = self.distribution |
| 113 | |
| 114 | missing = [] |
| 115 | for attr in ('name', 'version', 'url'): |
| 116 | if not (hasattr (dist, attr) and getattr (dist, attr)): |
| 117 | missing.append (attr) |
| 118 | |
| 119 | if missing: |
| 120 | self.warn ("missing required meta-data: " + |
| 121 | string.join (missing, ", ")) |
| 122 | |
| 123 | if dist.author: |
| 124 | if not dist.author_email: |
| 125 | self.warn ("missing meta-data: if 'author' supplied, " + |
| 126 | "'author_email' must be supplied too") |
| 127 | elif dist.maintainer: |
| 128 | if not dist.maintainer_email: |
| 129 | self.warn ("missing meta-data: if 'maintainer' supplied, " + |
| 130 | "'maintainer_email' must be supplied too") |
| 131 | else: |
| 132 | self.warn ("missing meta-data: either (author and author_email) " + |
| 133 | "or (maintainer and maintainer_email) " + |
| 134 | "must be supplied") |
| 135 | |
| 136 | # check_metadata () |
| 137 | |
| 138 | |
| 139 | def get_file_list (self): |
| 140 | """Figure out the list of files to include in the source |
| 141 | distribution, and put it in 'self.files'. This might |
| 142 | involve reading the manifest template (and writing the |
| 143 | manifest), or just reading the manifest, or just using |
| 144 | the default file set -- it all depends on the user's |
| 145 | options and the state of the filesystem.""" |
| 146 | |
| 147 | |
| 148 | template_exists = os.path.isfile (self.template) |
| 149 | if template_exists: |
| 150 | template_newer = newer (self.template, self.manifest) |
| 151 | |
| 152 | # Regenerate the manifest if necessary (or if explicitly told to) |
| 153 | if ((template_exists and template_newer) or |
| 154 | self.force_manifest or |
| 155 | self.manifest_only): |
| 156 | |
| 157 | if not template_exists: |
| 158 | self.warn (("manifest template '%s' does not exist " + |
| 159 | "(using default file list)") % |
| 160 | self.template) |
| 161 | |
| 162 | # Add default file set to 'files' |
| 163 | if self.use_defaults: |
| 164 | self.find_defaults () |
| 165 | |
| 166 | # Read manifest template if it exists |
| 167 | if template_exists: |
| 168 | self.read_template () |
| 169 | |
| 170 | # File list now complete -- sort it so that higher-level files |
| 171 | # come first |
| 172 | sortable_files = map (os.path.split, self.files) |
| 173 | sortable_files.sort () |
| 174 | self.files = [] |
| 175 | for sort_tuple in sortable_files: |
| 176 | self.files.append (apply (os.path.join, sort_tuple)) |
| 177 | |
| 178 | # Remove duplicates from the file list |
| 179 | for i in range (len(self.files)-1, 0, -1): |
| 180 | if self.files[i] == self.files[i-1]: |
| 181 | del self.files[i] |
| 182 | |
| 183 | # And write complete file list (including default file set) to |
| 184 | # the manifest. |
| 185 | self.write_manifest () |
| 186 | |
| 187 | # Don't regenerate the manifest, just read it in. |
| 188 | else: |
| 189 | self.read_manifest () |
| 190 | |
| 191 | # get_file_list () |
| 192 | |
| 193 | |
| 194 | def find_defaults (self): |
| 195 | |
| 196 | standards = [('README', 'README.txt'), 'setup.py'] |
| 197 | for fn in standards: |
| 198 | if type (fn) is TupleType: |
| 199 | alts = fn |
Greg Ward | 4840112 | 2000-02-24 03:17:43 +0000 | [diff] [blame] | 200 | got_it = 0 |
Greg Ward | a82122b | 2000-02-17 23:56:15 +0000 | [diff] [blame] | 201 | for fn in alts: |
| 202 | if os.path.exists (fn): |
| 203 | got_it = 1 |
| 204 | self.files.append (fn) |
| 205 | break |
| 206 | |
| 207 | if not got_it: |
| 208 | self.warn ("standard file not found: should have one of " + |
| 209 | string.join (alts, ', ')) |
| 210 | else: |
| 211 | if os.path.exists (fn): |
| 212 | self.files.append (fn) |
| 213 | else: |
| 214 | self.warn ("standard file '%s' not found" % fn) |
| 215 | |
| 216 | optional = ['test/test*.py'] |
| 217 | for pattern in optional: |
| 218 | files = filter (os.path.isfile, glob (pattern)) |
| 219 | if files: |
| 220 | self.files.extend (files) |
| 221 | |
| 222 | if self.distribution.packages or self.distribution.py_modules: |
| 223 | build_py = self.find_peer ('build_py') |
| 224 | build_py.ensure_ready () |
| 225 | self.files.extend (build_py.get_source_files ()) |
| 226 | |
| 227 | if self.distribution.ext_modules: |
| 228 | build_ext = self.find_peer ('build_ext') |
| 229 | build_ext.ensure_ready () |
| 230 | self.files.extend (build_ext.get_source_files ()) |
| 231 | |
| 232 | |
| 233 | |
| 234 | def search_dir (self, dir, pattern=None): |
| 235 | """Recursively find files under 'dir' matching 'pattern' (a string |
| 236 | containing a Unix-style glob pattern). If 'pattern' is None, |
| 237 | find all files under 'dir'. Return the list of found |
| 238 | filenames.""" |
| 239 | |
| 240 | allfiles = findall (dir) |
| 241 | if pattern is None: |
| 242 | return allfiles |
| 243 | |
| 244 | pattern_re = translate_pattern (pattern) |
| 245 | files = [] |
| 246 | for file in allfiles: |
| 247 | if pattern_re.match (os.path.basename (file)): |
| 248 | files.append (file) |
| 249 | |
| 250 | return files |
| 251 | |
| 252 | # search_dir () |
| 253 | |
| 254 | |
| 255 | def exclude_pattern (self, pattern): |
| 256 | """Remove filenames from 'self.files' that match 'pattern'.""" |
| 257 | print "exclude_pattern: pattern=%s" % pattern |
| 258 | pattern_re = translate_pattern (pattern) |
| 259 | for i in range (len (self.files)-1, -1, -1): |
| 260 | if pattern_re.match (self.files[i]): |
| 261 | print "removing %s" % self.files[i] |
| 262 | del self.files[i] |
| 263 | |
| 264 | |
| 265 | def recursive_exclude_pattern (self, dir, pattern=None): |
| 266 | """Remove filenames from 'self.files' that are under 'dir' |
| 267 | and whose basenames match 'pattern'.""" |
| 268 | |
| 269 | print "recursive_exclude_pattern: dir=%s, pattern=%s" % (dir, pattern) |
| 270 | if pattern is None: |
| 271 | pattern_re = None |
| 272 | else: |
| 273 | pattern_re = translate_pattern (pattern) |
| 274 | |
| 275 | for i in range (len (self.files)-1, -1, -1): |
| 276 | (cur_dir, cur_base) = os.path.split (self.files[i]) |
| 277 | if (cur_dir == dir and |
| 278 | (pattern_re is None or pattern_re.match (cur_base))): |
| 279 | print "removing %s" % self.files[i] |
| 280 | del self.files[i] |
| 281 | |
| 282 | |
| 283 | def read_template (self): |
| 284 | """Read and parse the manifest template file named by |
| 285 | 'self.template' (usually "MANIFEST.in"). Process all file |
| 286 | specifications (include and exclude) in the manifest template |
| 287 | and add the resulting filenames to 'self.files'.""" |
| 288 | |
| 289 | assert self.files is not None and type (self.files) is ListType |
| 290 | |
| 291 | template = TextFile (self.template, |
| 292 | strip_comments=1, |
| 293 | skip_blanks=1, |
| 294 | join_lines=1, |
| 295 | lstrip_ws=1, |
| 296 | rstrip_ws=1, |
| 297 | collapse_ws=1) |
| 298 | |
| 299 | all_files = findall () |
| 300 | |
| 301 | while 1: |
| 302 | |
| 303 | line = template.readline() |
| 304 | if line is None: # end of file |
| 305 | break |
| 306 | |
| 307 | words = string.split (line) |
| 308 | action = words[0] |
| 309 | |
| 310 | # First, check that the right number of words are present |
| 311 | # for the given action (which is the first word) |
| 312 | if action in ('include','exclude', |
| 313 | 'global-include','global-exclude'): |
| 314 | if len (words) != 2: |
| 315 | template.warn \ |
| 316 | ("invalid manifest template line: " + |
| 317 | "'%s' expects a single <pattern>" % |
| 318 | action) |
| 319 | continue |
| 320 | |
| 321 | pattern = words[1] |
| 322 | |
| 323 | elif action in ('recursive-include','recursive-exclude'): |
| 324 | if len (words) != 3: |
| 325 | template.warn \ |
| 326 | ("invalid manifest template line: " + |
| 327 | "'%s' expects <dir> <pattern>" % |
| 328 | action) |
| 329 | continue |
| 330 | |
| 331 | (dir, pattern) = words[1:3] |
| 332 | |
| 333 | elif action in ('graft','prune'): |
| 334 | if len (words) != 2: |
| 335 | template.warn \ |
| 336 | ("invalid manifest template line: " + |
| 337 | "'%s' expects a single <dir_pattern>" % |
| 338 | action) |
| 339 | continue |
| 340 | |
| 341 | dir_pattern = words[1] |
| 342 | |
| 343 | else: |
| 344 | template.warn ("invalid manifest template line: " + |
| 345 | "unknown action '%s'" % action) |
| 346 | continue |
| 347 | |
| 348 | # OK, now we know that the action is valid and we have the |
| 349 | # right number of words on the line for that action -- so we |
| 350 | # can proceed with minimal error-checking. Also, we have |
| 351 | # defined either 'patter', 'dir' and 'pattern', or |
| 352 | # 'dir_pattern' -- so we don't have to spend any time digging |
| 353 | # stuff up out of 'words'. |
| 354 | |
| 355 | if action == 'include': |
| 356 | print "include", pattern |
| 357 | files = select_pattern (all_files, pattern, anchor=1) |
| 358 | if not files: |
| 359 | template.warn ("no files found matching '%s'" % pattern) |
| 360 | else: |
| 361 | self.files.extend (files) |
| 362 | |
| 363 | elif action == 'exclude': |
| 364 | print "exclude", pattern |
| 365 | num = exclude_pattern (self.files, pattern, anchor=1) |
| 366 | if num == 0: |
| 367 | template.warn \ |
| 368 | ("no previously-included files found matching '%s'" % |
| 369 | pattern) |
| 370 | |
| 371 | elif action == 'global-include': |
| 372 | print "global-include", pattern |
| 373 | files = select_pattern (all_files, pattern, anchor=0) |
| 374 | if not files: |
| 375 | template.warn (("no files found matching '%s' " + |
| 376 | "anywhere in distribution") % |
| 377 | pattern) |
| 378 | else: |
| 379 | self.files.extend (files) |
| 380 | |
| 381 | elif action == 'global-exclude': |
| 382 | print "global-exclude", pattern |
| 383 | num = exclude_pattern (self.files, pattern, anchor=0) |
| 384 | if num == 0: |
| 385 | template.warn \ |
| 386 | (("no previously-included files matching '%s' " + |
| 387 | "found anywhere in distribution") % |
| 388 | pattern) |
| 389 | |
| 390 | elif action == 'recursive-include': |
| 391 | print "recursive-include", dir, pattern |
| 392 | files = select_pattern (all_files, pattern, prefix=dir) |
| 393 | if not files: |
| 394 | template.warn (("no files found matching '%s' " + |
| 395 | "under directory '%s'") % |
| 396 | (pattern, dir)) |
| 397 | else: |
| 398 | self.files.extend (files) |
| 399 | |
| 400 | elif action == 'recursive-exclude': |
| 401 | print "recursive-exclude", dir, pattern |
| 402 | num = exclude_pattern (self.files, pattern, prefix=dir) |
| 403 | if num == 0: |
| 404 | template.warn \ |
| 405 | (("no previously-included files matching '%s' " + |
| 406 | "found under directory '%s'") % |
| 407 | (pattern, dir)) |
| 408 | |
| 409 | elif action == 'graft': |
| 410 | print "graft", dir_pattern |
| 411 | files = select_pattern (all_files, None, prefix=dir_pattern) |
| 412 | if not files: |
| 413 | template.warn ("no directories found matching '%s'" % |
| 414 | dir_pattern) |
| 415 | else: |
| 416 | self.files.extend (files) |
| 417 | |
| 418 | elif action == 'prune': |
| 419 | print "prune", dir_pattern |
| 420 | num = exclude_pattern (self.files, None, prefix=dir_pattern) |
| 421 | if num == 0: |
| 422 | template.warn \ |
| 423 | (("no previously-included directories found " + |
| 424 | "matching '%s'") % |
| 425 | dir_pattern) |
| 426 | else: |
| 427 | raise RuntimeError, \ |
| 428 | "this cannot happen: invalid action '%s'" % action |
| 429 | |
| 430 | # while loop over lines of template file |
| 431 | |
| 432 | # read_template () |
| 433 | |
| 434 | |
| 435 | def write_manifest (self): |
| 436 | """Write the file list in 'self.files' (presumably as filled in |
| 437 | by 'find_defaults()' and 'read_template()') to the manifest file |
| 438 | named by 'self.manifest'.""" |
| 439 | |
| 440 | manifest = open (self.manifest, "w") |
| 441 | for fn in self.files: |
| 442 | manifest.write (fn + '\n') |
| 443 | manifest.close () |
| 444 | |
| 445 | # write_manifest () |
| 446 | |
| 447 | |
| 448 | def read_manifest (self): |
| 449 | """Read the manifest file (named by 'self.manifest') and use |
| 450 | it to fill in 'self.files', the list of files to include |
| 451 | in the source distribution.""" |
| 452 | |
| 453 | manifest = open (self.manifest) |
| 454 | while 1: |
| 455 | line = manifest.readline () |
| 456 | if line == '': # end of file |
| 457 | break |
| 458 | if line[-1] == '\n': |
| 459 | line = line[0:-1] |
| 460 | self.files.append (line) |
| 461 | |
| 462 | # read_manifest () |
| 463 | |
| 464 | |
| 465 | |
| 466 | def make_release_tree (self, base_dir, files): |
| 467 | |
| 468 | # First get the list of directories to create |
| 469 | need_dir = {} |
| 470 | for file in files: |
| 471 | need_dir[os.path.join (base_dir, os.path.dirname (file))] = 1 |
| 472 | need_dirs = need_dir.keys() |
| 473 | need_dirs.sort() |
| 474 | |
| 475 | # Now create them |
| 476 | for dir in need_dirs: |
| 477 | self.mkpath (dir) |
| 478 | |
| 479 | # And walk over the list of files, either making a hard link (if |
| 480 | # os.link exists) to each one that doesn't already exist in its |
| 481 | # corresponding location under 'base_dir', or copying each file |
| 482 | # that's out-of-date in 'base_dir'. (Usually, all files will be |
| 483 | # out-of-date, because by default we blow away 'base_dir' when |
| 484 | # we're done making the distribution archives.) |
| 485 | |
| 486 | try: |
| 487 | link = os.link |
| 488 | msg = "making hard links in %s..." % base_dir |
| 489 | except AttributeError: |
| 490 | link = 0 |
| 491 | msg = "copying files to %s..." % base_dir |
| 492 | |
| 493 | self.announce (msg) |
| 494 | for file in files: |
| 495 | dest = os.path.join (base_dir, file) |
| 496 | if link: |
| 497 | if not os.path.exists (dest): |
| 498 | self.execute (os.link, (file, dest), |
| 499 | "linking %s -> %s" % (file, dest)) |
| 500 | else: |
| 501 | self.copy_file (file, dest) |
| 502 | |
| 503 | # make_release_tree () |
| 504 | |
| 505 | |
Greg Ward | a82122b | 2000-02-17 23:56:15 +0000 | [diff] [blame] | 506 | def make_tarball (self, base_dir, compress="gzip"): |
| 507 | |
| 508 | # XXX GNU tar 1.13 has a nifty option to add a prefix directory. |
| 509 | # It's pretty new, though, so we certainly can't require it -- |
| 510 | # but it would be nice to take advantage of it to skip the |
| 511 | # "create a tree of hardlinks" step! (Would also be nice to |
| 512 | # detect GNU tar to use its 'z' option and save a step.) |
| 513 | |
| 514 | if compress is not None and compress not in ('gzip', 'compress'): |
| 515 | raise ValueError, \ |
| 516 | "if given, 'compress' must be 'gzip' or 'compress'" |
| 517 | |
| 518 | archive_name = base_dir + ".tar" |
| 519 | self.spawn (["tar", "-cf", archive_name, base_dir]) |
| 520 | |
| 521 | if compress: |
| 522 | self.spawn ([compress, archive_name]) |
| 523 | |
| 524 | |
| 525 | def make_zipfile (self, base_dir): |
| 526 | |
| 527 | # This initially assumed the Unix 'zip' utility -- but |
| 528 | # apparently InfoZIP's zip.exe works the same under Windows, so |
| 529 | # no changes needed! |
| 530 | |
| 531 | try: |
| 532 | self.spawn (["zip", "-r", base_dir + ".zip", base_dir]) |
| 533 | except DistutilsExecError: |
| 534 | |
| 535 | # XXX really should distinguish between "couldn't find |
| 536 | # external 'zip' command" and "zip failed" -- shouldn't try |
| 537 | # again in the latter case. (I think fixing this will |
| 538 | # require some cooperation from the spawn module -- perhaps |
| 539 | # a utility function to search the path, so we can fallback |
| 540 | # on zipfile.py without the failed spawn.) |
| 541 | try: |
| 542 | import zipfile |
| 543 | except ImportError: |
| 544 | raise DistutilsExecError, \ |
| 545 | ("unable to create zip file '%s.zip': " + |
| 546 | "could neither find a standalone zip utility nor " + |
| 547 | "import the 'zipfile' module") % base_dir |
| 548 | |
| 549 | z = zipfile.ZipFile (base_dir + ".zip", "wb", |
| 550 | compression=zipfile.ZIP_DEFLATED) |
| 551 | |
| 552 | def visit (z, dirname, names): |
| 553 | for name in names: |
| 554 | path = os.path.join (dirname, name) |
| 555 | if os.path.isfile (path): |
| 556 | z.write (path, path) |
| 557 | |
| 558 | os.path.walk (base_dir, visit, z) |
| 559 | z.close() |
| 560 | |
| 561 | |
| 562 | def make_distribution (self): |
| 563 | |
| 564 | # Don't warn about missing meta-data here -- should be done |
| 565 | # elsewhere. |
| 566 | name = self.distribution.name or "UNKNOWN" |
| 567 | version = self.distribution.version |
| 568 | |
| 569 | if version: |
| 570 | base_dir = "%s-%s" % (name, version) |
| 571 | else: |
| 572 | base_dir = name |
| 573 | |
| 574 | # Remove any files that match "base_dir" from the fileset -- we |
| 575 | # don't want to go distributing the distribution inside itself! |
| 576 | self.exclude_pattern (base_dir + "*") |
| 577 | |
| 578 | self.make_release_tree (base_dir, self.files) |
| 579 | for fmt in self.formats: |
| 580 | if fmt == 'gztar': |
| 581 | self.make_tarball (base_dir, compress='gzip') |
| 582 | elif fmt == 'ztar': |
| 583 | self.make_tarball (base_dir, compress='compress') |
| 584 | elif fmt == 'tar': |
| 585 | self.make_tarball (base_dir, compress=None) |
| 586 | elif fmt == 'zip': |
| 587 | self.make_zipfile (base_dir) |
| 588 | |
| 589 | if not self.keep_tree: |
Greg Ward | 2dc139c | 2000-03-18 15:43:42 +0000 | [diff] [blame^] | 590 | remove_tree (base_dir, self.verbose, self.dry_run) |
Greg Ward | a82122b | 2000-02-17 23:56:15 +0000 | [diff] [blame] | 591 | |
| 592 | # class Dist |
| 593 | |
| 594 | |
| 595 | # ---------------------------------------------------------------------- |
| 596 | # Utility functions |
| 597 | |
| 598 | def findall (dir = os.curdir): |
| 599 | """Find all files under 'dir' and return the list of full |
| 600 | filenames (relative to 'dir').""" |
| 601 | |
| 602 | list = [] |
| 603 | stack = [dir] |
| 604 | pop = stack.pop |
| 605 | push = stack.append |
| 606 | |
| 607 | while stack: |
| 608 | dir = pop() |
| 609 | names = os.listdir (dir) |
| 610 | |
| 611 | for name in names: |
| 612 | if dir != os.curdir: # avoid the dreaded "./" syndrome |
| 613 | fullname = os.path.join (dir, name) |
| 614 | else: |
| 615 | fullname = name |
| 616 | list.append (fullname) |
| 617 | if os.path.isdir (fullname) and not os.path.islink(fullname): |
| 618 | push (fullname) |
| 619 | |
| 620 | return list |
| 621 | |
| 622 | |
| 623 | def select_pattern (files, pattern, anchor=1, prefix=None): |
| 624 | """Select strings (presumably filenames) from 'files' that match |
| 625 | 'pattern', a Unix-style wildcard (glob) pattern. Patterns are not |
| 626 | quite the same as implemented by the 'fnmatch' module: '*' and '?' |
| 627 | match non-special characters, where "special" is platform-dependent: |
| 628 | slash on Unix, colon, slash, and backslash on DOS/Windows, and colon |
| 629 | on Mac OS. |
| 630 | |
| 631 | If 'anchor' is true (the default), then the pattern match is more |
| 632 | stringent: "*.py" will match "foo.py" but not "foo/bar.py". If |
| 633 | 'anchor' is false, both of these will match. |
| 634 | |
| 635 | If 'prefix' is supplied, then only filenames starting with 'prefix' |
| 636 | (itself a pattern) and ending with 'pattern', with anything in |
| 637 | between them, will match. 'anchor' is ignored in this case. |
| 638 | |
| 639 | Return the list of matching strings, possibly empty.""" |
| 640 | |
| 641 | matches = [] |
| 642 | pattern_re = translate_pattern (pattern, anchor, prefix) |
| 643 | print "select_pattern: applying re %s" % pattern_re.pattern |
| 644 | for name in files: |
| 645 | if pattern_re.search (name): |
| 646 | matches.append (name) |
| 647 | print " adding", name |
| 648 | |
| 649 | return matches |
| 650 | |
| 651 | # select_pattern () |
| 652 | |
| 653 | |
| 654 | def exclude_pattern (files, pattern, anchor=1, prefix=None): |
| 655 | |
| 656 | pattern_re = translate_pattern (pattern, anchor, prefix) |
| 657 | print "exclude_pattern: applying re %s" % pattern_re.pattern |
| 658 | for i in range (len(files)-1, -1, -1): |
| 659 | if pattern_re.search (files[i]): |
| 660 | print " removing", files[i] |
| 661 | del files[i] |
| 662 | |
| 663 | # exclude_pattern () |
| 664 | |
| 665 | |
| 666 | def glob_to_re (pattern): |
| 667 | """Translate a shell-like glob pattern to a regular expression; |
| 668 | return a string containing the regex. Differs from |
| 669 | 'fnmatch.translate()' in that '*' does not match "special |
| 670 | characters" (which are platform-specific).""" |
| 671 | pattern_re = fnmatch.translate (pattern) |
| 672 | |
| 673 | # '?' and '*' in the glob pattern become '.' and '.*' in the RE, which |
| 674 | # IMHO is wrong -- '?' and '*' aren't supposed to match slash in Unix, |
| 675 | # and by extension they shouldn't match such "special characters" under |
| 676 | # any OS. So change all non-escaped dots in the RE to match any |
| 677 | # character except the special characters. |
| 678 | # XXX currently the "special characters" are just slash -- i.e. this is |
| 679 | # Unix-only. |
| 680 | pattern_re = re.sub (r'(^|[^\\])\.', r'\1[^/]', pattern_re) |
| 681 | return pattern_re |
| 682 | |
| 683 | # glob_to_re () |
| 684 | |
| 685 | |
| 686 | def translate_pattern (pattern, anchor=1, prefix=None): |
| 687 | """Translate a shell-like wildcard pattern to a compiled regular |
| 688 | expression. Return the compiled regex.""" |
| 689 | |
| 690 | if pattern: |
| 691 | pattern_re = glob_to_re (pattern) |
| 692 | else: |
| 693 | pattern_re = '' |
| 694 | |
| 695 | if prefix is not None: |
| 696 | prefix_re = (glob_to_re (prefix))[0:-1] # ditch trailing $ |
| 697 | pattern_re = "^" + os.path.join (prefix_re, ".*" + pattern_re) |
| 698 | else: # no prefix -- respect anchor flag |
| 699 | if anchor: |
| 700 | pattern_re = "^" + pattern_re |
| 701 | |
| 702 | return re.compile (pattern_re) |
| 703 | |
| 704 | # translate_pattern () |