Éric Araujo | 35a4d01 | 2011-06-04 22:24:59 +0200 | [diff] [blame] | 1 | """Miscellaneous utility functions.""" |
| 2 | |
Tarek Ziade | 1231a4e | 2011-05-19 13:07:25 +0200 | [diff] [blame] | 3 | import os |
Tarek Ziade | 1231a4e | 2011-05-19 13:07:25 +0200 | [diff] [blame] | 4 | import re |
Éric Araujo | 35a4d01 | 2011-06-04 22:24:59 +0200 | [diff] [blame] | 5 | import csv |
| 6 | import sys |
| 7 | import errno |
Tarek Ziade | 1231a4e | 2011-05-19 13:07:25 +0200 | [diff] [blame] | 8 | import shutil |
| 9 | import string |
Éric Araujo | 35a4d01 | 2011-06-04 22:24:59 +0200 | [diff] [blame] | 10 | import hashlib |
Tarek Ziade | 1231a4e | 2011-05-19 13:07:25 +0200 | [diff] [blame] | 11 | import tarfile |
| 12 | import zipfile |
| 13 | import posixpath |
Tarek Ziade | 1231a4e | 2011-05-19 13:07:25 +0200 | [diff] [blame] | 14 | import subprocess |
Éric Araujo | 35a4d01 | 2011-06-04 22:24:59 +0200 | [diff] [blame] | 15 | import sysconfig |
Tarek Ziade | 1231a4e | 2011-05-19 13:07:25 +0200 | [diff] [blame] | 16 | from glob import iglob as std_iglob |
| 17 | from fnmatch import fnmatchcase |
| 18 | from inspect import getsource |
| 19 | from configparser import RawConfigParser |
| 20 | |
| 21 | from packaging import logger |
| 22 | from packaging.errors import (PackagingPlatformError, PackagingFileError, |
| 23 | PackagingByteCompileError, PackagingExecError, |
| 24 | InstallationException, PackagingInternalError) |
| 25 | |
| 26 | _PLATFORM = None |
| 27 | _DEFAULT_INSTALLER = 'packaging' |
| 28 | |
| 29 | |
| 30 | def newer(source, target): |
| 31 | """Tell if the target is newer than the source. |
| 32 | |
| 33 | Returns true if 'source' exists and is more recently modified than |
| 34 | 'target', or if 'source' exists and 'target' doesn't. |
| 35 | |
| 36 | Returns false if both exist and 'target' is the same age or younger |
| 37 | than 'source'. Raise PackagingFileError if 'source' does not exist. |
| 38 | |
| 39 | Note that this test is not very accurate: files created in the same second |
| 40 | will have the same "age". |
| 41 | """ |
| 42 | if not os.path.exists(source): |
| 43 | raise PackagingFileError("file '%s' does not exist" % |
| 44 | os.path.abspath(source)) |
| 45 | if not os.path.exists(target): |
| 46 | return True |
| 47 | |
| 48 | return os.stat(source).st_mtime > os.stat(target).st_mtime |
| 49 | |
| 50 | |
| 51 | def get_platform(): |
| 52 | """Return a string that identifies the current platform. |
| 53 | |
| 54 | By default, will return the value returned by sysconfig.get_platform(), |
| 55 | but it can be changed by calling set_platform(). |
| 56 | """ |
| 57 | global _PLATFORM |
| 58 | if _PLATFORM is None: |
| 59 | _PLATFORM = sysconfig.get_platform() |
| 60 | return _PLATFORM |
| 61 | |
| 62 | |
| 63 | def set_platform(identifier): |
| 64 | """Set the platform string identifier returned by get_platform(). |
| 65 | |
| 66 | Note that this change doesn't impact the value returned by |
| 67 | sysconfig.get_platform(); it is local to packaging. |
| 68 | """ |
| 69 | global _PLATFORM |
| 70 | _PLATFORM = identifier |
| 71 | |
| 72 | |
| 73 | def convert_path(pathname): |
| 74 | """Return 'pathname' as a name that will work on the native filesystem. |
| 75 | |
| 76 | The path is split on '/' and put back together again using the current |
| 77 | directory separator. Needed because filenames in the setup script are |
| 78 | always supplied in Unix style, and have to be converted to the local |
| 79 | convention before we can actually use them in the filesystem. Raises |
| 80 | ValueError on non-Unix-ish systems if 'pathname' either starts or |
| 81 | ends with a slash. |
| 82 | """ |
| 83 | if os.sep == '/': |
| 84 | return pathname |
| 85 | if not pathname: |
| 86 | return pathname |
| 87 | if pathname[0] == '/': |
| 88 | raise ValueError("path '%s' cannot be absolute" % pathname) |
| 89 | if pathname[-1] == '/': |
| 90 | raise ValueError("path '%s' cannot end with '/'" % pathname) |
| 91 | |
| 92 | paths = pathname.split('/') |
| 93 | while os.curdir in paths: |
| 94 | paths.remove(os.curdir) |
| 95 | if not paths: |
| 96 | return os.curdir |
| 97 | return os.path.join(*paths) |
| 98 | |
| 99 | |
| 100 | def change_root(new_root, pathname): |
| 101 | """Return 'pathname' with 'new_root' prepended. |
| 102 | |
| 103 | If 'pathname' is relative, this is equivalent to |
| 104 | os.path.join(new_root,pathname). Otherwise, it requires making 'pathname' |
| 105 | relative and then joining the two, which is tricky on DOS/Windows. |
| 106 | """ |
| 107 | if os.name == 'posix': |
| 108 | if not os.path.isabs(pathname): |
| 109 | return os.path.join(new_root, pathname) |
| 110 | else: |
| 111 | return os.path.join(new_root, pathname[1:]) |
| 112 | |
| 113 | elif os.name == 'nt': |
| 114 | drive, path = os.path.splitdrive(pathname) |
| 115 | if path[0] == '\\': |
| 116 | path = path[1:] |
| 117 | return os.path.join(new_root, path) |
| 118 | |
| 119 | elif os.name == 'os2': |
| 120 | drive, path = os.path.splitdrive(pathname) |
| 121 | if path[0] == os.sep: |
| 122 | path = path[1:] |
| 123 | return os.path.join(new_root, path) |
| 124 | |
| 125 | else: |
| 126 | raise PackagingPlatformError("nothing known about " |
| 127 | "platform '%s'" % os.name) |
| 128 | |
| 129 | _environ_checked = False |
| 130 | |
| 131 | |
| 132 | def check_environ(): |
| 133 | """Ensure that 'os.environ' has all the environment variables needed. |
| 134 | |
| 135 | We guarantee that users can use in config files, command-line options, |
| 136 | etc. Currently this includes: |
| 137 | HOME - user's home directory (Unix only) |
| 138 | PLAT - description of the current platform, including hardware |
| 139 | and OS (see 'get_platform()') |
| 140 | """ |
| 141 | global _environ_checked |
| 142 | if _environ_checked: |
| 143 | return |
| 144 | |
| 145 | if os.name == 'posix' and 'HOME' not in os.environ: |
| 146 | import pwd |
| 147 | os.environ['HOME'] = pwd.getpwuid(os.getuid())[5] |
| 148 | |
| 149 | if 'PLAT' not in os.environ: |
| 150 | os.environ['PLAT'] = sysconfig.get_platform() |
| 151 | |
| 152 | _environ_checked = True |
| 153 | |
| 154 | |
| 155 | def subst_vars(s, local_vars): |
| 156 | """Perform shell/Perl-style variable substitution on 'string'. |
| 157 | |
| 158 | Every occurrence of '$' followed by a name is considered a variable, and |
| 159 | variable is substituted by the value found in the 'local_vars' |
| 160 | dictionary, or in 'os.environ' if it's not in 'local_vars'. |
| 161 | 'os.environ' is first checked/augmented to guarantee that it contains |
| 162 | certain values: see 'check_environ()'. Raise ValueError for any |
| 163 | variables not found in either 'local_vars' or 'os.environ'. |
| 164 | """ |
| 165 | check_environ() |
| 166 | |
| 167 | def _subst(match, local_vars=local_vars): |
| 168 | var_name = match.group(1) |
| 169 | if var_name in local_vars: |
| 170 | return str(local_vars[var_name]) |
| 171 | else: |
| 172 | return os.environ[var_name] |
| 173 | |
| 174 | try: |
| 175 | return re.sub(r'\$([a-zA-Z_][a-zA-Z_0-9]*)', _subst, s) |
| 176 | except KeyError as var: |
| 177 | raise ValueError("invalid variable '$%s'" % var) |
| 178 | |
| 179 | |
| 180 | # Needed by 'split_quoted()' |
| 181 | _wordchars_re = _squote_re = _dquote_re = None |
| 182 | |
| 183 | |
| 184 | def _init_regex(): |
| 185 | global _wordchars_re, _squote_re, _dquote_re |
| 186 | _wordchars_re = re.compile(r'[^\\\'\"%s ]*' % string.whitespace) |
| 187 | _squote_re = re.compile(r"'(?:[^'\\]|\\.)*'") |
| 188 | _dquote_re = re.compile(r'"(?:[^"\\]|\\.)*"') |
| 189 | |
| 190 | |
| 191 | def split_quoted(s): |
| 192 | """Split a string up according to Unix shell-like rules for quotes and |
| 193 | backslashes. |
| 194 | |
| 195 | In short: words are delimited by spaces, as long as those |
| 196 | spaces are not escaped by a backslash, or inside a quoted string. |
| 197 | Single and double quotes are equivalent, and the quote characters can |
| 198 | be backslash-escaped. The backslash is stripped from any two-character |
| 199 | escape sequence, leaving only the escaped character. The quote |
| 200 | characters are stripped from any quoted string. Returns a list of |
| 201 | words. |
| 202 | """ |
| 203 | # This is a nice algorithm for splitting up a single string, since it |
| 204 | # doesn't require character-by-character examination. It was a little |
| 205 | # bit of a brain-bender to get it working right, though... |
| 206 | if _wordchars_re is None: |
| 207 | _init_regex() |
| 208 | |
| 209 | s = s.strip() |
| 210 | words = [] |
| 211 | pos = 0 |
| 212 | |
| 213 | while s: |
| 214 | m = _wordchars_re.match(s, pos) |
| 215 | end = m.end() |
| 216 | if end == len(s): |
| 217 | words.append(s[:end]) |
| 218 | break |
| 219 | |
| 220 | if s[end] in string.whitespace: # unescaped, unquoted whitespace: now |
| 221 | words.append(s[:end]) # we definitely have a word delimiter |
| 222 | s = s[end:].lstrip() |
| 223 | pos = 0 |
| 224 | |
| 225 | elif s[end] == '\\': # preserve whatever is being escaped; |
| 226 | # will become part of the current word |
| 227 | s = s[:end] + s[end + 1:] |
| 228 | pos = end + 1 |
| 229 | |
| 230 | else: |
| 231 | if s[end] == "'": # slurp singly-quoted string |
| 232 | m = _squote_re.match(s, end) |
| 233 | elif s[end] == '"': # slurp doubly-quoted string |
| 234 | m = _dquote_re.match(s, end) |
| 235 | else: |
| 236 | raise RuntimeError("this can't happen " |
| 237 | "(bad char '%c')" % s[end]) |
| 238 | |
| 239 | if m is None: |
| 240 | raise ValueError("bad string (mismatched %s quotes?)" % s[end]) |
| 241 | |
| 242 | beg, end = m.span() |
| 243 | s = s[:beg] + s[beg + 1:end - 1] + s[end:] |
| 244 | pos = m.end() - 2 |
| 245 | |
| 246 | if pos >= len(s): |
| 247 | words.append(s) |
| 248 | break |
| 249 | |
| 250 | return words |
| 251 | |
| 252 | |
Éric Araujo | 1c1d9a5 | 2011-06-10 23:26:31 +0200 | [diff] [blame] | 253 | def split_multiline(value): |
| 254 | """Split a multiline string into a list, excluding blank lines.""" |
| 255 | |
| 256 | return [element for element in |
| 257 | (line.strip() for line in value.split('\n')) |
| 258 | if element] |
| 259 | |
| 260 | |
Tarek Ziade | 1231a4e | 2011-05-19 13:07:25 +0200 | [diff] [blame] | 261 | def execute(func, args, msg=None, verbose=0, dry_run=False): |
| 262 | """Perform some action that affects the outside world. |
| 263 | |
| 264 | Some actions (e.g. writing to the filesystem) are special because |
| 265 | they are disabled by the 'dry_run' flag. This method takes care of all |
| 266 | that bureaucracy for you; all you have to do is supply the |
| 267 | function to call and an argument tuple for it (to embody the |
| 268 | "external action" being performed), and an optional message to |
| 269 | print. |
| 270 | """ |
| 271 | if msg is None: |
| 272 | msg = "%s%r" % (func.__name__, args) |
| 273 | if msg[-2:] == ',)': # correct for singleton tuple |
| 274 | msg = msg[0:-2] + ')' |
| 275 | |
| 276 | logger.info(msg) |
| 277 | if not dry_run: |
| 278 | func(*args) |
| 279 | |
| 280 | |
| 281 | def strtobool(val): |
Éric Araujo | d5d831b | 2011-06-06 01:13:48 +0200 | [diff] [blame] | 282 | """Convert a string representation of truth to a boolean. |
Tarek Ziade | 1231a4e | 2011-05-19 13:07:25 +0200 | [diff] [blame] | 283 | |
| 284 | True values are 'y', 'yes', 't', 'true', 'on', and '1'; false values |
| 285 | are 'n', 'no', 'f', 'false', 'off', and '0'. Raises ValueError if |
| 286 | 'val' is anything else. |
| 287 | """ |
| 288 | val = val.lower() |
| 289 | if val in ('y', 'yes', 't', 'true', 'on', '1'): |
| 290 | return True |
| 291 | elif val in ('n', 'no', 'f', 'false', 'off', '0'): |
| 292 | return False |
| 293 | else: |
| 294 | raise ValueError("invalid truth value %r" % (val,)) |
| 295 | |
| 296 | |
| 297 | def byte_compile(py_files, optimize=0, force=False, prefix=None, |
| 298 | base_dir=None, verbose=0, dry_run=False, direct=None): |
| 299 | """Byte-compile a collection of Python source files to either .pyc |
| 300 | or .pyo files in the same directory. |
| 301 | |
| 302 | 'py_files' is a list of files to compile; any files that don't end in |
| 303 | ".py" are silently skipped. 'optimize' must be one of the following: |
| 304 | 0 - don't optimize (generate .pyc) |
| 305 | 1 - normal optimization (like "python -O") |
| 306 | 2 - extra optimization (like "python -OO") |
| 307 | If 'force' is true, all files are recompiled regardless of |
| 308 | timestamps. |
| 309 | |
| 310 | The source filename encoded in each bytecode file defaults to the |
| 311 | filenames listed in 'py_files'; you can modify these with 'prefix' and |
| 312 | 'basedir'. 'prefix' is a string that will be stripped off of each |
| 313 | source filename, and 'base_dir' is a directory name that will be |
| 314 | prepended (after 'prefix' is stripped). You can supply either or both |
| 315 | (or neither) of 'prefix' and 'base_dir', as you wish. |
| 316 | |
| 317 | If 'dry_run' is true, doesn't actually do anything that would |
| 318 | affect the filesystem. |
| 319 | |
| 320 | Byte-compilation is either done directly in this interpreter process |
| 321 | with the standard py_compile module, or indirectly by writing a |
| 322 | temporary script and executing it. Normally, you should let |
| 323 | 'byte_compile()' figure out to use direct compilation or not (see |
| 324 | the source for details). The 'direct' flag is used by the script |
| 325 | generated in indirect mode; unless you know what you're doing, leave |
| 326 | it set to None. |
| 327 | """ |
| 328 | # nothing is done if sys.dont_write_bytecode is True |
| 329 | # FIXME this should not raise an error |
| 330 | if hasattr(sys, 'dont_write_bytecode') and sys.dont_write_bytecode: |
| 331 | raise PackagingByteCompileError('byte-compiling is disabled.') |
| 332 | |
| 333 | # First, if the caller didn't force us into direct or indirect mode, |
| 334 | # figure out which mode we should be in. We take a conservative |
| 335 | # approach: choose direct mode *only* if the current interpreter is |
| 336 | # in debug mode and optimize is 0. If we're not in debug mode (-O |
| 337 | # or -OO), we don't know which level of optimization this |
| 338 | # interpreter is running with, so we can't do direct |
| 339 | # byte-compilation and be certain that it's the right thing. Thus, |
| 340 | # always compile indirectly if the current interpreter is in either |
| 341 | # optimize mode, or if either optimization level was requested by |
| 342 | # the caller. |
| 343 | if direct is None: |
| 344 | direct = (__debug__ and optimize == 0) |
| 345 | |
| 346 | # "Indirect" byte-compilation: write a temporary script and then |
| 347 | # run it with the appropriate flags. |
| 348 | if not direct: |
| 349 | from tempfile import mkstemp |
| 350 | # XXX script_fd may leak, use something better than mkstemp |
| 351 | script_fd, script_name = mkstemp(".py") |
| 352 | logger.info("writing byte-compilation script '%s'", script_name) |
| 353 | if not dry_run: |
| 354 | if script_fd is not None: |
Victor Stinner | 9cf6d13 | 2011-05-19 21:42:47 +0200 | [diff] [blame] | 355 | script = os.fdopen(script_fd, "w", encoding='utf-8') |
Tarek Ziade | 1231a4e | 2011-05-19 13:07:25 +0200 | [diff] [blame] | 356 | else: |
Victor Stinner | 9cf6d13 | 2011-05-19 21:42:47 +0200 | [diff] [blame] | 357 | script = open(script_name, "w", encoding='utf-8') |
Tarek Ziade | 1231a4e | 2011-05-19 13:07:25 +0200 | [diff] [blame] | 358 | |
Victor Stinner | 21a9c74 | 2011-05-19 15:51:27 +0200 | [diff] [blame] | 359 | with script: |
Tarek Ziade | 1231a4e | 2011-05-19 13:07:25 +0200 | [diff] [blame] | 360 | script.write("""\ |
| 361 | from packaging.util import byte_compile |
| 362 | files = [ |
| 363 | """) |
| 364 | |
| 365 | # XXX would be nice to write absolute filenames, just for |
| 366 | # safety's sake (script should be more robust in the face of |
| 367 | # chdir'ing before running it). But this requires abspath'ing |
| 368 | # 'prefix' as well, and that breaks the hack in build_lib's |
| 369 | # 'byte_compile()' method that carefully tacks on a trailing |
| 370 | # slash (os.sep really) to make sure the prefix here is "just |
| 371 | # right". This whole prefix business is rather delicate -- the |
| 372 | # problem is that it's really a directory, but I'm treating it |
| 373 | # as a dumb string, so trailing slashes and so forth matter. |
| 374 | |
| 375 | #py_files = map(os.path.abspath, py_files) |
| 376 | #if prefix: |
| 377 | # prefix = os.path.abspath(prefix) |
| 378 | |
| 379 | script.write(",\n".join(map(repr, py_files)) + "]\n") |
| 380 | script.write(""" |
| 381 | byte_compile(files, optimize=%r, force=%r, |
| 382 | prefix=%r, base_dir=%r, |
| 383 | verbose=%r, dry_run=False, |
| 384 | direct=True) |
| 385 | """ % (optimize, force, prefix, base_dir, verbose)) |
| 386 | |
Tarek Ziade | 1231a4e | 2011-05-19 13:07:25 +0200 | [diff] [blame] | 387 | cmd = [sys.executable, script_name] |
| 388 | if optimize == 1: |
| 389 | cmd.insert(1, "-O") |
| 390 | elif optimize == 2: |
| 391 | cmd.insert(1, "-OO") |
| 392 | |
Éric Araujo | 088025f | 2011-06-04 18:45:40 +0200 | [diff] [blame] | 393 | env = os.environ.copy() |
Tarek Ziade | 1231a4e | 2011-05-19 13:07:25 +0200 | [diff] [blame] | 394 | env['PYTHONPATH'] = os.path.pathsep.join(sys.path) |
| 395 | try: |
| 396 | spawn(cmd, env=env) |
| 397 | finally: |
| 398 | execute(os.remove, (script_name,), "removing %s" % script_name, |
| 399 | dry_run=dry_run) |
| 400 | |
| 401 | # "Direct" byte-compilation: use the py_compile module to compile |
| 402 | # right here, right now. Note that the script generated in indirect |
| 403 | # mode simply calls 'byte_compile()' in direct mode, a weird sort of |
| 404 | # cross-process recursion. Hey, it works! |
| 405 | else: |
| 406 | from py_compile import compile |
| 407 | |
| 408 | for file in py_files: |
| 409 | if file[-3:] != ".py": |
| 410 | # This lets us be lazy and not filter filenames in |
| 411 | # the "install_lib" command. |
| 412 | continue |
| 413 | |
| 414 | # Terminology from the py_compile module: |
| 415 | # cfile - byte-compiled file |
| 416 | # dfile - purported source filename (same as 'file' by default) |
| 417 | cfile = file + (__debug__ and "c" or "o") |
| 418 | dfile = file |
| 419 | if prefix: |
| 420 | if file[:len(prefix)] != prefix: |
| 421 | raise ValueError("invalid prefix: filename %r doesn't " |
| 422 | "start with %r" % (file, prefix)) |
| 423 | dfile = dfile[len(prefix):] |
| 424 | if base_dir: |
| 425 | dfile = os.path.join(base_dir, dfile) |
| 426 | |
| 427 | cfile_base = os.path.basename(cfile) |
| 428 | if direct: |
| 429 | if force or newer(file, cfile): |
| 430 | logger.info("byte-compiling %s to %s", file, cfile_base) |
| 431 | if not dry_run: |
| 432 | compile(file, cfile, dfile) |
| 433 | else: |
| 434 | logger.debug("skipping byte-compilation of %s to %s", |
| 435 | file, cfile_base) |
| 436 | |
| 437 | |
| 438 | def rfc822_escape(header): |
| 439 | """Return a form of *header* suitable for inclusion in an RFC 822-header. |
| 440 | |
| 441 | This function ensures there are 8 spaces after each newline. |
| 442 | """ |
| 443 | lines = header.split('\n') |
| 444 | sep = '\n' + 8 * ' ' |
| 445 | return sep.join(lines) |
| 446 | |
| 447 | _RE_VERSION = re.compile('(\d+\.\d+(\.\d+)*)') |
| 448 | _MAC_OS_X_LD_VERSION = re.compile('^@\(#\)PROGRAM:ld ' |
| 449 | 'PROJECT:ld64-((\d+)(\.\d+)*)') |
| 450 | |
| 451 | |
| 452 | def _find_ld_version(): |
| 453 | """Find the ld version. The version scheme differs under Mac OS X.""" |
| 454 | if sys.platform == 'darwin': |
| 455 | return _find_exe_version('ld -v', _MAC_OS_X_LD_VERSION) |
| 456 | else: |
| 457 | return _find_exe_version('ld -v') |
| 458 | |
| 459 | |
| 460 | def _find_exe_version(cmd, pattern=_RE_VERSION): |
| 461 | """Find the version of an executable by running `cmd` in the shell. |
| 462 | |
| 463 | `pattern` is a compiled regular expression. If not provided, defaults |
| 464 | to _RE_VERSION. If the command is not found, or the output does not |
| 465 | match the mattern, returns None. |
| 466 | """ |
| 467 | from subprocess import Popen, PIPE |
| 468 | executable = cmd.split()[0] |
| 469 | if find_executable(executable) is None: |
| 470 | return None |
| 471 | pipe = Popen(cmd, shell=True, stdout=PIPE, stderr=PIPE) |
| 472 | try: |
Victor Stinner | 9904b22 | 2011-05-21 02:20:36 +0200 | [diff] [blame] | 473 | stdout, stderr = pipe.communicate() |
Tarek Ziade | 1231a4e | 2011-05-19 13:07:25 +0200 | [diff] [blame] | 474 | finally: |
| 475 | pipe.stdout.close() |
| 476 | pipe.stderr.close() |
| 477 | # some commands like ld under MacOS X, will give the |
| 478 | # output in the stderr, rather than stdout. |
| 479 | if stdout != '': |
| 480 | out_string = stdout |
| 481 | else: |
| 482 | out_string = stderr |
| 483 | |
| 484 | result = pattern.search(out_string) |
| 485 | if result is None: |
| 486 | return None |
| 487 | return result.group(1) |
| 488 | |
| 489 | |
| 490 | def get_compiler_versions(): |
| 491 | """Return a tuple providing the versions of gcc, ld and dllwrap |
| 492 | |
| 493 | For each command, if a command is not found, None is returned. |
| 494 | Otherwise a string with the version is returned. |
| 495 | """ |
| 496 | gcc = _find_exe_version('gcc -dumpversion') |
| 497 | ld = _find_ld_version() |
| 498 | dllwrap = _find_exe_version('dllwrap --version') |
| 499 | return gcc, ld, dllwrap |
| 500 | |
| 501 | |
| 502 | def newer_group(sources, target, missing='error'): |
| 503 | """Return true if 'target' is out-of-date with respect to any file |
| 504 | listed in 'sources'. |
| 505 | |
| 506 | In other words, if 'target' exists and is newer |
| 507 | than every file in 'sources', return false; otherwise return true. |
| 508 | 'missing' controls what we do when a source file is missing; the |
| 509 | default ("error") is to blow up with an OSError from inside 'stat()'; |
| 510 | if it is "ignore", we silently drop any missing source files; if it is |
| 511 | "newer", any missing source files make us assume that 'target' is |
| 512 | out-of-date (this is handy in "dry-run" mode: it'll make you pretend to |
| 513 | carry out commands that wouldn't work because inputs are missing, but |
| 514 | that doesn't matter because you're not actually going to run the |
| 515 | commands). |
| 516 | """ |
| 517 | # If the target doesn't even exist, then it's definitely out-of-date. |
| 518 | if not os.path.exists(target): |
| 519 | return True |
| 520 | |
| 521 | # Otherwise we have to find out the hard way: if *any* source file |
| 522 | # is more recent than 'target', then 'target' is out-of-date and |
| 523 | # we can immediately return true. If we fall through to the end |
| 524 | # of the loop, then 'target' is up-to-date and we return false. |
| 525 | target_mtime = os.stat(target).st_mtime |
| 526 | |
| 527 | for source in sources: |
| 528 | if not os.path.exists(source): |
| 529 | if missing == 'error': # blow up when we stat() the file |
| 530 | pass |
| 531 | elif missing == 'ignore': # missing source dropped from |
| 532 | continue # target's dependency list |
| 533 | elif missing == 'newer': # missing source means target is |
| 534 | return True # out-of-date |
| 535 | |
| 536 | if os.stat(source).st_mtime > target_mtime: |
| 537 | return True |
| 538 | |
| 539 | return False |
| 540 | |
| 541 | |
| 542 | def write_file(filename, contents): |
| 543 | """Create *filename* and write *contents* to it. |
| 544 | |
| 545 | *contents* is a sequence of strings without line terminators. |
| 546 | """ |
| 547 | with open(filename, "w") as f: |
| 548 | for line in contents: |
| 549 | f.write(line + "\n") |
| 550 | |
| 551 | |
| 552 | def _is_package(path): |
Éric Araujo | 1c1d9a5 | 2011-06-10 23:26:31 +0200 | [diff] [blame] | 553 | return os.path.isdir(path) and os.path.isfile( |
| 554 | os.path.join(path, '__init__.py')) |
Tarek Ziade | 1231a4e | 2011-05-19 13:07:25 +0200 | [diff] [blame] | 555 | |
| 556 | |
| 557 | # Code taken from the pip project |
| 558 | def _is_archive_file(name): |
| 559 | archives = ('.zip', '.tar.gz', '.tar.bz2', '.tgz', '.tar') |
| 560 | ext = splitext(name)[1].lower() |
Éric Araujo | 1c1d9a5 | 2011-06-10 23:26:31 +0200 | [diff] [blame] | 561 | return ext in archives |
Tarek Ziade | 1231a4e | 2011-05-19 13:07:25 +0200 | [diff] [blame] | 562 | |
| 563 | |
| 564 | def _under(path, root): |
| 565 | path = path.split(os.sep) |
| 566 | root = root.split(os.sep) |
| 567 | if len(root) > len(path): |
| 568 | return False |
| 569 | for pos, part in enumerate(root): |
| 570 | if path[pos] != part: |
| 571 | return False |
| 572 | return True |
| 573 | |
| 574 | |
| 575 | def _package_name(root_path, path): |
| 576 | # Return a dotted package name, given a subpath |
| 577 | if not _under(path, root_path): |
| 578 | raise ValueError('"%s" is not a subpath of "%s"' % (path, root_path)) |
| 579 | return path[len(root_path) + 1:].replace(os.sep, '.') |
| 580 | |
| 581 | |
| 582 | def find_packages(paths=(os.curdir,), exclude=()): |
| 583 | """Return a list all Python packages found recursively within |
| 584 | directories 'paths' |
| 585 | |
| 586 | 'paths' should be supplied as a sequence of "cross-platform" |
| 587 | (i.e. URL-style) path; it will be converted to the appropriate local |
| 588 | path syntax. |
| 589 | |
| 590 | 'exclude' is a sequence of package names to exclude; '*' can be used as |
| 591 | a wildcard in the names, such that 'foo.*' will exclude all subpackages |
| 592 | of 'foo' (but not 'foo' itself). |
| 593 | """ |
| 594 | packages = [] |
| 595 | discarded = [] |
| 596 | |
| 597 | def _discarded(path): |
| 598 | for discard in discarded: |
| 599 | if _under(path, discard): |
| 600 | return True |
| 601 | return False |
| 602 | |
| 603 | for path in paths: |
| 604 | path = convert_path(path) |
| 605 | for root, dirs, files in os.walk(path): |
| 606 | for dir_ in dirs: |
| 607 | fullpath = os.path.join(root, dir_) |
| 608 | if _discarded(fullpath): |
| 609 | continue |
| 610 | # we work only with Python packages |
| 611 | if not _is_package(fullpath): |
| 612 | discarded.append(fullpath) |
| 613 | continue |
| 614 | # see if it's excluded |
| 615 | excluded = False |
| 616 | package_name = _package_name(path, fullpath) |
| 617 | for pattern in exclude: |
| 618 | if fnmatchcase(package_name, pattern): |
| 619 | excluded = True |
| 620 | break |
| 621 | if excluded: |
| 622 | continue |
| 623 | |
| 624 | # adding it to the list |
| 625 | packages.append(package_name) |
| 626 | return packages |
| 627 | |
| 628 | |
| 629 | def resolve_name(name): |
| 630 | """Resolve a name like ``module.object`` to an object and return it. |
| 631 | |
| 632 | Raise ImportError if the module or name is not found. |
| 633 | """ |
| 634 | parts = name.split('.') |
| 635 | cursor = len(parts) |
| 636 | module_name = parts[:cursor] |
| 637 | |
| 638 | while cursor > 0: |
| 639 | try: |
| 640 | ret = __import__('.'.join(module_name)) |
| 641 | break |
| 642 | except ImportError: |
| 643 | if cursor == 0: |
| 644 | raise |
| 645 | cursor -= 1 |
| 646 | module_name = parts[:cursor] |
| 647 | ret = '' |
| 648 | |
| 649 | for part in parts[1:]: |
| 650 | try: |
| 651 | ret = getattr(ret, part) |
| 652 | except AttributeError as exc: |
| 653 | raise ImportError(exc) |
| 654 | |
| 655 | return ret |
| 656 | |
| 657 | |
| 658 | def splitext(path): |
| 659 | """Like os.path.splitext, but take off .tar too""" |
| 660 | base, ext = posixpath.splitext(path) |
| 661 | if base.lower().endswith('.tar'): |
| 662 | ext = base[-4:] + ext |
| 663 | base = base[:-4] |
| 664 | return base, ext |
| 665 | |
| 666 | |
| 667 | def unzip_file(filename, location, flatten=True): |
| 668 | """Unzip the file *filename* into the *location* directory.""" |
| 669 | if not os.path.exists(location): |
| 670 | os.makedirs(location) |
| 671 | with open(filename, 'rb') as zipfp: |
| 672 | zip = zipfile.ZipFile(zipfp) |
| 673 | leading = has_leading_dir(zip.namelist()) and flatten |
| 674 | for name in zip.namelist(): |
| 675 | data = zip.read(name) |
| 676 | fn = name |
| 677 | if leading: |
| 678 | fn = split_leading_dir(name)[1] |
| 679 | fn = os.path.join(location, fn) |
| 680 | dir = os.path.dirname(fn) |
| 681 | if not os.path.exists(dir): |
| 682 | os.makedirs(dir) |
| 683 | if fn.endswith('/') or fn.endswith('\\'): |
| 684 | # A directory |
| 685 | if not os.path.exists(fn): |
| 686 | os.makedirs(fn) |
| 687 | else: |
| 688 | with open(fn, 'wb') as fp: |
| 689 | fp.write(data) |
| 690 | |
| 691 | |
| 692 | def untar_file(filename, location): |
| 693 | """Untar the file *filename* into the *location* directory.""" |
| 694 | if not os.path.exists(location): |
| 695 | os.makedirs(location) |
| 696 | if filename.lower().endswith('.gz') or filename.lower().endswith('.tgz'): |
| 697 | mode = 'r:gz' |
| 698 | elif (filename.lower().endswith('.bz2') |
| 699 | or filename.lower().endswith('.tbz')): |
| 700 | mode = 'r:bz2' |
| 701 | elif filename.lower().endswith('.tar'): |
| 702 | mode = 'r' |
| 703 | else: |
| 704 | mode = 'r:*' |
| 705 | with tarfile.open(filename, mode) as tar: |
| 706 | leading = has_leading_dir(member.name for member in tar.getmembers()) |
| 707 | for member in tar.getmembers(): |
| 708 | fn = member.name |
| 709 | if leading: |
| 710 | fn = split_leading_dir(fn)[1] |
| 711 | path = os.path.join(location, fn) |
| 712 | if member.isdir(): |
| 713 | if not os.path.exists(path): |
| 714 | os.makedirs(path) |
| 715 | else: |
| 716 | try: |
| 717 | fp = tar.extractfile(member) |
| 718 | except (KeyError, AttributeError): |
| 719 | # Some corrupt tar files seem to produce this |
| 720 | # (specifically bad symlinks) |
| 721 | continue |
| 722 | try: |
| 723 | if not os.path.exists(os.path.dirname(path)): |
| 724 | os.makedirs(os.path.dirname(path)) |
| 725 | with open(path, 'wb') as destfp: |
| 726 | shutil.copyfileobj(fp, destfp) |
| 727 | finally: |
| 728 | fp.close() |
| 729 | |
| 730 | |
| 731 | def has_leading_dir(paths): |
| 732 | """Return true if all the paths have the same leading path name. |
| 733 | |
| 734 | In other words, check that everything is in one subdirectory in an |
| 735 | archive. |
| 736 | """ |
| 737 | common_prefix = None |
| 738 | for path in paths: |
| 739 | prefix, rest = split_leading_dir(path) |
| 740 | if not prefix: |
| 741 | return False |
| 742 | elif common_prefix is None: |
| 743 | common_prefix = prefix |
| 744 | elif prefix != common_prefix: |
| 745 | return False |
| 746 | return True |
| 747 | |
| 748 | |
| 749 | def split_leading_dir(path): |
| 750 | path = str(path) |
| 751 | path = path.lstrip('/').lstrip('\\') |
| 752 | if '/' in path and (('\\' in path and path.find('/') < path.find('\\')) |
| 753 | or '\\' not in path): |
| 754 | return path.split('/', 1) |
| 755 | elif '\\' in path: |
| 756 | return path.split('\\', 1) |
| 757 | else: |
| 758 | return path, '' |
| 759 | |
Ned Deily | fceb412 | 2011-06-28 20:04:24 -0700 | [diff] [blame] | 760 | if sys.platform == 'darwin': |
| 761 | _cfg_target = None |
| 762 | _cfg_target_split = None |
Tarek Ziade | 1231a4e | 2011-05-19 13:07:25 +0200 | [diff] [blame] | 763 | |
| 764 | def spawn(cmd, search_path=True, verbose=0, dry_run=False, env=None): |
| 765 | """Run another program specified as a command list 'cmd' in a new process. |
| 766 | |
| 767 | 'cmd' is just the argument list for the new process, ie. |
| 768 | cmd[0] is the program to run and cmd[1:] are the rest of its arguments. |
| 769 | There is no way to run a program with a name different from that of its |
| 770 | executable. |
| 771 | |
| 772 | If 'search_path' is true (the default), the system's executable |
| 773 | search path will be used to find the program; otherwise, cmd[0] |
| 774 | must be the exact path to the executable. If 'dry_run' is true, |
| 775 | the command will not actually be run. |
| 776 | |
| 777 | If 'env' is given, it's a environment dictionary used for the execution |
| 778 | environment. |
| 779 | |
| 780 | Raise PackagingExecError if running the program fails in any way; just |
| 781 | return on success. |
| 782 | """ |
Éric Araujo | 6280606 | 2011-06-11 09:46:07 +0200 | [diff] [blame] | 783 | logger.debug('spawn: running %r', cmd) |
Tarek Ziade | 1231a4e | 2011-05-19 13:07:25 +0200 | [diff] [blame] | 784 | if dry_run: |
Éric Araujo | 6280606 | 2011-06-11 09:46:07 +0200 | [diff] [blame] | 785 | logging.debug('dry run, no process actually spawned') |
Tarek Ziade | 1231a4e | 2011-05-19 13:07:25 +0200 | [diff] [blame] | 786 | return |
Ned Deily | fceb412 | 2011-06-28 20:04:24 -0700 | [diff] [blame] | 787 | if sys.platform == 'darwin': |
| 788 | global _cfg_target, _cfg_target_split |
| 789 | if _cfg_target is None: |
| 790 | _cfg_target = sysconfig.get_config_var( |
| 791 | 'MACOSX_DEPLOYMENT_TARGET') or '' |
| 792 | if _cfg_target: |
| 793 | _cfg_target_split = [int(x) for x in _cfg_target.split('.')] |
| 794 | if _cfg_target: |
| 795 | # ensure that the deployment target of build process is not less |
| 796 | # than that used when the interpreter was built. This ensures |
| 797 | # extension modules are built with correct compatibility values |
| 798 | env = env or os.environ |
| 799 | cur_target = env.get('MACOSX_DEPLOYMENT_TARGET', _cfg_target) |
| 800 | if _cfg_target_split > [int(x) for x in cur_target.split('.')]: |
| 801 | my_msg = ('$MACOSX_DEPLOYMENT_TARGET mismatch: ' |
| 802 | 'now "%s" but "%s" during configure' |
| 803 | % (cur_target, _cfg_target)) |
| 804 | raise PackagingPlatformError(my_msg) |
| 805 | env = dict(env, MACOSX_DEPLOYMENT_TARGET=cur_target) |
| 806 | |
Tarek Ziade | 1231a4e | 2011-05-19 13:07:25 +0200 | [diff] [blame] | 807 | exit_status = subprocess.call(cmd, env=env) |
| 808 | if exit_status != 0: |
Éric Araujo | 6280606 | 2011-06-11 09:46:07 +0200 | [diff] [blame] | 809 | msg = "command %r failed with exit status %d" |
Tarek Ziade | 1231a4e | 2011-05-19 13:07:25 +0200 | [diff] [blame] | 810 | raise PackagingExecError(msg % (cmd, exit_status)) |
| 811 | |
| 812 | |
| 813 | def find_executable(executable, path=None): |
| 814 | """Try to find 'executable' in the directories listed in 'path'. |
| 815 | |
| 816 | *path* is a string listing directories separated by 'os.pathsep' and |
| 817 | defaults to os.environ['PATH']. Returns the complete filename or None |
| 818 | if not found. |
| 819 | """ |
| 820 | if path is None: |
| 821 | path = os.environ['PATH'] |
| 822 | paths = path.split(os.pathsep) |
| 823 | base, ext = os.path.splitext(executable) |
| 824 | |
| 825 | if (sys.platform == 'win32' or os.name == 'os2') and (ext != '.exe'): |
| 826 | executable = executable + '.exe' |
| 827 | |
| 828 | if not os.path.isfile(executable): |
| 829 | for p in paths: |
| 830 | f = os.path.join(p, executable) |
| 831 | if os.path.isfile(f): |
| 832 | # the file exists, we have a shot at spawn working |
| 833 | return f |
| 834 | return None |
| 835 | else: |
| 836 | return executable |
| 837 | |
| 838 | |
| 839 | DEFAULT_REPOSITORY = 'http://pypi.python.org/pypi' |
| 840 | DEFAULT_REALM = 'pypi' |
| 841 | DEFAULT_PYPIRC = """\ |
| 842 | [distutils] |
| 843 | index-servers = |
| 844 | pypi |
| 845 | |
| 846 | [pypi] |
| 847 | username:%s |
| 848 | password:%s |
| 849 | """ |
| 850 | |
| 851 | |
| 852 | def get_pypirc_path(): |
| 853 | """Return path to pypirc config file.""" |
| 854 | return os.path.join(os.path.expanduser('~'), '.pypirc') |
| 855 | |
| 856 | |
| 857 | def generate_pypirc(username, password): |
| 858 | """Create a default .pypirc file.""" |
| 859 | rc = get_pypirc_path() |
| 860 | with open(rc, 'w') as f: |
| 861 | f.write(DEFAULT_PYPIRC % (username, password)) |
| 862 | try: |
| 863 | os.chmod(rc, 0o600) |
| 864 | except OSError: |
| 865 | # should do something better here |
| 866 | pass |
| 867 | |
| 868 | |
| 869 | def read_pypirc(repository=DEFAULT_REPOSITORY, realm=DEFAULT_REALM): |
| 870 | """Read the .pypirc file.""" |
| 871 | rc = get_pypirc_path() |
| 872 | if os.path.exists(rc): |
| 873 | config = RawConfigParser() |
| 874 | config.read(rc) |
| 875 | sections = config.sections() |
| 876 | if 'distutils' in sections: |
| 877 | # let's get the list of servers |
| 878 | index_servers = config.get('distutils', 'index-servers') |
| 879 | _servers = [server.strip() for server in |
| 880 | index_servers.split('\n') |
| 881 | if server.strip() != ''] |
| 882 | if _servers == []: |
| 883 | # nothing set, let's try to get the default pypi |
| 884 | if 'pypi' in sections: |
| 885 | _servers = ['pypi'] |
| 886 | else: |
| 887 | # the file is not properly defined, returning |
| 888 | # an empty dict |
| 889 | return {} |
| 890 | for server in _servers: |
| 891 | current = {'server': server} |
| 892 | current['username'] = config.get(server, 'username') |
| 893 | |
| 894 | # optional params |
| 895 | for key, default in (('repository', DEFAULT_REPOSITORY), |
| 896 | ('realm', DEFAULT_REALM), |
| 897 | ('password', None)): |
| 898 | if config.has_option(server, key): |
| 899 | current[key] = config.get(server, key) |
| 900 | else: |
| 901 | current[key] = default |
| 902 | if (current['server'] == repository or |
| 903 | current['repository'] == repository): |
| 904 | return current |
| 905 | elif 'server-login' in sections: |
| 906 | # old format |
| 907 | server = 'server-login' |
| 908 | if config.has_option(server, 'repository'): |
| 909 | repository = config.get(server, 'repository') |
| 910 | else: |
| 911 | repository = DEFAULT_REPOSITORY |
| 912 | |
| 913 | return {'username': config.get(server, 'username'), |
| 914 | 'password': config.get(server, 'password'), |
| 915 | 'repository': repository, |
| 916 | 'server': server, |
| 917 | 'realm': DEFAULT_REALM} |
| 918 | |
| 919 | return {} |
| 920 | |
| 921 | |
| 922 | # utility functions for 2to3 support |
| 923 | |
| 924 | def run_2to3(files, doctests_only=False, fixer_names=None, |
| 925 | options=None, explicit=None): |
| 926 | """ Wrapper function around the refactor() class which |
| 927 | performs the conversions on a list of python files. |
| 928 | Invoke 2to3 on a list of Python files. The files should all come |
| 929 | from the build area, as the modification is done in-place.""" |
| 930 | |
| 931 | #if not files: |
| 932 | # return |
| 933 | |
| 934 | # Make this class local, to delay import of 2to3 |
| 935 | from lib2to3.refactor import get_fixers_from_package, RefactoringTool |
| 936 | fixers = [] |
| 937 | fixers = get_fixers_from_package('lib2to3.fixes') |
| 938 | |
| 939 | if fixer_names: |
| 940 | for fixername in fixer_names: |
| 941 | fixers.extend(fixer for fixer in |
| 942 | get_fixers_from_package(fixername)) |
| 943 | r = RefactoringTool(fixers, options=options) |
| 944 | r.refactor(files, write=True, doctests_only=doctests_only) |
| 945 | |
| 946 | |
| 947 | class Mixin2to3: |
| 948 | """ Wrapper class for commands that run 2to3. |
| 949 | To configure 2to3, setup scripts may either change |
| 950 | the class variables, or inherit from this class |
| 951 | to override how 2to3 is invoked. |
| 952 | """ |
| 953 | # provide list of fixers to run. |
| 954 | # defaults to all from lib2to3.fixers |
| 955 | fixer_names = None |
| 956 | |
| 957 | # options dictionary |
| 958 | options = None |
| 959 | |
| 960 | # list of fixers to invoke even though they are marked as explicit |
| 961 | explicit = None |
| 962 | |
| 963 | def run_2to3(self, files, doctests_only=False): |
| 964 | """ Issues a call to util.run_2to3. """ |
| 965 | return run_2to3(files, doctests_only, self.fixer_names, |
| 966 | self.options, self.explicit) |
| 967 | |
| 968 | RICH_GLOB = re.compile(r'\{([^}]*)\}') |
Tarek Ziade | ec9b76d | 2011-05-21 11:48:16 +0200 | [diff] [blame] | 969 | _CHECK_RECURSIVE_GLOB = re.compile(r'[^/\\,{]\*\*|\*\*[^/\\,}]') |
Tarek Ziade | 1231a4e | 2011-05-19 13:07:25 +0200 | [diff] [blame] | 970 | _CHECK_MISMATCH_SET = re.compile(r'^[^{]*\}|\{[^}]*$') |
| 971 | |
| 972 | |
| 973 | def iglob(path_glob): |
| 974 | """Extended globbing function that supports ** and {opt1,opt2,opt3}.""" |
| 975 | if _CHECK_RECURSIVE_GLOB.search(path_glob): |
| 976 | msg = """invalid glob %r: recursive glob "**" must be used alone""" |
| 977 | raise ValueError(msg % path_glob) |
| 978 | if _CHECK_MISMATCH_SET.search(path_glob): |
| 979 | msg = """invalid glob %r: mismatching set marker '{' or '}'""" |
| 980 | raise ValueError(msg % path_glob) |
| 981 | return _iglob(path_glob) |
| 982 | |
| 983 | |
| 984 | def _iglob(path_glob): |
| 985 | rich_path_glob = RICH_GLOB.split(path_glob, 1) |
| 986 | if len(rich_path_glob) > 1: |
| 987 | assert len(rich_path_glob) == 3, rich_path_glob |
| 988 | prefix, set, suffix = rich_path_glob |
| 989 | for item in set.split(','): |
| 990 | for path in _iglob(''.join((prefix, item, suffix))): |
| 991 | yield path |
| 992 | else: |
| 993 | if '**' not in path_glob: |
| 994 | for item in std_iglob(path_glob): |
| 995 | yield item |
| 996 | else: |
| 997 | prefix, radical = path_glob.split('**', 1) |
| 998 | if prefix == '': |
| 999 | prefix = '.' |
| 1000 | if radical == '': |
| 1001 | radical = '*' |
| 1002 | else: |
Tarek Ziade | ec9b76d | 2011-05-21 11:48:16 +0200 | [diff] [blame] | 1003 | # we support both |
Tarek Ziade | 1231a4e | 2011-05-19 13:07:25 +0200 | [diff] [blame] | 1004 | radical = radical.lstrip('/') |
Tarek Ziade | ec9b76d | 2011-05-21 11:48:16 +0200 | [diff] [blame] | 1005 | radical = radical.lstrip('\\') |
Tarek Ziade | 1231a4e | 2011-05-19 13:07:25 +0200 | [diff] [blame] | 1006 | for path, dir, files in os.walk(prefix): |
| 1007 | path = os.path.normpath(path) |
| 1008 | for file in _iglob(os.path.join(path, radical)): |
| 1009 | yield file |
| 1010 | |
| 1011 | |
| 1012 | def cfg_to_args(path='setup.cfg'): |
| 1013 | """Compatibility helper to use setup.cfg in setup.py. |
| 1014 | |
| 1015 | This functions uses an existing setup.cfg to generate a dictionnary of |
| 1016 | keywords that can be used by distutils.core.setup(**kwargs). It is used |
| 1017 | by generate_setup_py. |
| 1018 | |
| 1019 | *file* is the path to the setup.cfg file. If it doesn't exist, |
| 1020 | PackagingFileError is raised. |
| 1021 | """ |
| 1022 | # We need to declare the following constants here so that it's easier to |
| 1023 | # generate the setup.py afterwards, using inspect.getsource. |
| 1024 | |
| 1025 | # XXX ** == needs testing |
| 1026 | D1_D2_SETUP_ARGS = {"name": ("metadata",), |
| 1027 | "version": ("metadata",), |
| 1028 | "author": ("metadata",), |
| 1029 | "author_email": ("metadata",), |
| 1030 | "maintainer": ("metadata",), |
| 1031 | "maintainer_email": ("metadata",), |
| 1032 | "url": ("metadata", "home_page"), |
| 1033 | "description": ("metadata", "summary"), |
| 1034 | "long_description": ("metadata", "description"), |
| 1035 | "download-url": ("metadata",), |
| 1036 | "classifiers": ("metadata", "classifier"), |
| 1037 | "platforms": ("metadata", "platform"), # ** |
| 1038 | "license": ("metadata",), |
| 1039 | "requires": ("metadata", "requires_dist"), |
| 1040 | "provides": ("metadata", "provides_dist"), # ** |
| 1041 | "obsoletes": ("metadata", "obsoletes_dist"), # ** |
Éric Araujo | 3605030 | 2011-06-10 23:52:26 +0200 | [diff] [blame] | 1042 | "package_dir": ("files", 'packages_root'), |
Tarek Ziade | 1231a4e | 2011-05-19 13:07:25 +0200 | [diff] [blame] | 1043 | "packages": ("files",), |
| 1044 | "scripts": ("files",), |
| 1045 | "py_modules": ("files", "modules"), # ** |
| 1046 | } |
| 1047 | |
| 1048 | MULTI_FIELDS = ("classifiers", |
Tarek Ziade | 1231a4e | 2011-05-19 13:07:25 +0200 | [diff] [blame] | 1049 | "platforms", |
Éric Araujo | 3605030 | 2011-06-10 23:52:26 +0200 | [diff] [blame] | 1050 | "requires", |
| 1051 | "provides", |
| 1052 | "obsoletes", |
Tarek Ziade | 1231a4e | 2011-05-19 13:07:25 +0200 | [diff] [blame] | 1053 | "packages", |
Éric Araujo | 3605030 | 2011-06-10 23:52:26 +0200 | [diff] [blame] | 1054 | "scripts", |
| 1055 | "py_modules") |
Tarek Ziade | 1231a4e | 2011-05-19 13:07:25 +0200 | [diff] [blame] | 1056 | |
| 1057 | def has_get_option(config, section, option): |
| 1058 | if config.has_option(section, option): |
| 1059 | return config.get(section, option) |
| 1060 | elif config.has_option(section, option.replace('_', '-')): |
| 1061 | return config.get(section, option.replace('_', '-')) |
| 1062 | else: |
| 1063 | return False |
| 1064 | |
| 1065 | # The real code starts here |
| 1066 | config = RawConfigParser() |
Éric Araujo | 3605030 | 2011-06-10 23:52:26 +0200 | [diff] [blame] | 1067 | if not os.path.exists(path): |
Tarek Ziade | 1231a4e | 2011-05-19 13:07:25 +0200 | [diff] [blame] | 1068 | raise PackagingFileError("file '%s' does not exist" % |
Éric Araujo | 3605030 | 2011-06-10 23:52:26 +0200 | [diff] [blame] | 1069 | os.path.abspath(path)) |
Éric Araujo | 8d233f2 | 2011-06-12 23:02:57 +0200 | [diff] [blame] | 1070 | config.read(path, encoding='utf-8') |
Tarek Ziade | 1231a4e | 2011-05-19 13:07:25 +0200 | [diff] [blame] | 1071 | |
| 1072 | kwargs = {} |
| 1073 | for arg in D1_D2_SETUP_ARGS: |
| 1074 | if len(D1_D2_SETUP_ARGS[arg]) == 2: |
| 1075 | # The distutils field name is different than packaging's |
| 1076 | section, option = D1_D2_SETUP_ARGS[arg] |
| 1077 | |
| 1078 | else: |
| 1079 | # The distutils field name is the same thant packaging's |
| 1080 | section = D1_D2_SETUP_ARGS[arg][0] |
| 1081 | option = arg |
| 1082 | |
| 1083 | in_cfg_value = has_get_option(config, section, option) |
| 1084 | if not in_cfg_value: |
| 1085 | # There is no such option in the setup.cfg |
Éric Araujo | 3605030 | 2011-06-10 23:52:26 +0200 | [diff] [blame] | 1086 | if arg == 'long_description': |
| 1087 | filenames = has_get_option(config, section, 'description-file') |
| 1088 | if filenames: |
| 1089 | filenames = split_multiline(filenames) |
| 1090 | in_cfg_value = [] |
| 1091 | for filename in filenames: |
| 1092 | with open(filename) as fp: |
| 1093 | in_cfg_value.append(fp.read()) |
| 1094 | in_cfg_value = '\n\n'.join(in_cfg_value) |
Tarek Ziade | 1231a4e | 2011-05-19 13:07:25 +0200 | [diff] [blame] | 1095 | else: |
| 1096 | continue |
| 1097 | |
Éric Araujo | 3605030 | 2011-06-10 23:52:26 +0200 | [diff] [blame] | 1098 | if arg == 'package_dir' and in_cfg_value: |
| 1099 | in_cfg_value = {'': in_cfg_value} |
| 1100 | |
Tarek Ziade | 1231a4e | 2011-05-19 13:07:25 +0200 | [diff] [blame] | 1101 | if arg in MULTI_FIELDS: |
| 1102 | # support multiline options |
Éric Araujo | 3605030 | 2011-06-10 23:52:26 +0200 | [diff] [blame] | 1103 | in_cfg_value = split_multiline(in_cfg_value) |
Tarek Ziade | 1231a4e | 2011-05-19 13:07:25 +0200 | [diff] [blame] | 1104 | |
| 1105 | kwargs[arg] = in_cfg_value |
| 1106 | |
| 1107 | return kwargs |
| 1108 | |
| 1109 | |
| 1110 | _SETUP_TMPL = """\ |
| 1111 | # This script was automatically generated by packaging |
| 1112 | import os |
| 1113 | from distutils.core import setup |
| 1114 | from ConfigParser import RawConfigParser |
| 1115 | |
| 1116 | %(func)s |
| 1117 | |
| 1118 | setup(**cfg_to_args()) |
| 1119 | """ |
| 1120 | |
| 1121 | |
| 1122 | def generate_setup_py(): |
| 1123 | """Generate a distutils compatible setup.py using an existing setup.cfg. |
| 1124 | |
| 1125 | Raises a PackagingFileError when a setup.py already exists. |
| 1126 | """ |
| 1127 | if os.path.exists("setup.py"): |
Tarek Ziade | 721ccd0 | 2011-06-02 12:00:44 +0200 | [diff] [blame] | 1128 | raise PackagingFileError("a setup.py file already exists") |
Tarek Ziade | 1231a4e | 2011-05-19 13:07:25 +0200 | [diff] [blame] | 1129 | |
Victor Stinner | 9cf6d13 | 2011-05-19 21:42:47 +0200 | [diff] [blame] | 1130 | with open("setup.py", "w", encoding='utf-8') as fp: |
Tarek Ziade | 1231a4e | 2011-05-19 13:07:25 +0200 | [diff] [blame] | 1131 | fp.write(_SETUP_TMPL % {'func': getsource(cfg_to_args)}) |
| 1132 | |
| 1133 | |
| 1134 | # Taken from the pip project |
| 1135 | # https://github.com/pypa/pip/blob/master/pip/util.py |
| 1136 | def ask(message, options): |
| 1137 | """Prompt the user with *message*; *options* contains allowed responses.""" |
| 1138 | while True: |
| 1139 | response = input(message) |
| 1140 | response = response.strip().lower() |
| 1141 | if response not in options: |
Éric Araujo | 3cab2f1 | 2011-06-08 04:10:57 +0200 | [diff] [blame] | 1142 | print('invalid response:', repr(response)) |
Tarek Ziade | 1231a4e | 2011-05-19 13:07:25 +0200 | [diff] [blame] | 1143 | print('choose one of', ', '.join(repr(o) for o in options)) |
| 1144 | else: |
| 1145 | return response |
| 1146 | |
| 1147 | |
| 1148 | def _parse_record_file(record_file): |
| 1149 | distinfo, extra_metadata, installed = ({}, [], []) |
| 1150 | with open(record_file, 'r') as rfile: |
| 1151 | for path in rfile: |
| 1152 | path = path.strip() |
| 1153 | if path.endswith('egg-info') and os.path.isfile(path): |
| 1154 | distinfo_dir = path.replace('egg-info', 'dist-info') |
| 1155 | metadata = path |
| 1156 | egginfo = path |
| 1157 | elif path.endswith('egg-info') and os.path.isdir(path): |
| 1158 | distinfo_dir = path.replace('egg-info', 'dist-info') |
| 1159 | egginfo = path |
| 1160 | for metadata_file in os.listdir(path): |
| 1161 | metadata_fpath = os.path.join(path, metadata_file) |
| 1162 | if metadata_file == 'PKG-INFO': |
| 1163 | metadata = metadata_fpath |
| 1164 | else: |
| 1165 | extra_metadata.append(metadata_fpath) |
| 1166 | elif 'egg-info' in path and os.path.isfile(path): |
| 1167 | # skip extra metadata files |
| 1168 | continue |
| 1169 | else: |
| 1170 | installed.append(path) |
| 1171 | |
| 1172 | distinfo['egginfo'] = egginfo |
| 1173 | distinfo['metadata'] = metadata |
| 1174 | distinfo['distinfo_dir'] = distinfo_dir |
| 1175 | distinfo['installer_path'] = os.path.join(distinfo_dir, 'INSTALLER') |
| 1176 | distinfo['metadata_path'] = os.path.join(distinfo_dir, 'METADATA') |
| 1177 | distinfo['record_path'] = os.path.join(distinfo_dir, 'RECORD') |
| 1178 | distinfo['requested_path'] = os.path.join(distinfo_dir, 'REQUESTED') |
| 1179 | installed.extend([distinfo['installer_path'], distinfo['metadata_path']]) |
| 1180 | distinfo['installed'] = installed |
| 1181 | distinfo['extra_metadata'] = extra_metadata |
| 1182 | return distinfo |
| 1183 | |
| 1184 | |
| 1185 | def _write_record_file(record_path, installed_files): |
| 1186 | with open(record_path, 'w', encoding='utf-8') as f: |
| 1187 | writer = csv.writer(f, delimiter=',', lineterminator=os.linesep, |
| 1188 | quotechar='"') |
| 1189 | |
| 1190 | for fpath in installed_files: |
| 1191 | if fpath.endswith('.pyc') or fpath.endswith('.pyo'): |
| 1192 | # do not put size and md5 hash, as in PEP-376 |
| 1193 | writer.writerow((fpath, '', '')) |
| 1194 | else: |
| 1195 | hash = hashlib.md5() |
| 1196 | with open(fpath, 'rb') as fp: |
| 1197 | hash.update(fp.read()) |
| 1198 | md5sum = hash.hexdigest() |
| 1199 | size = os.path.getsize(fpath) |
| 1200 | writer.writerow((fpath, md5sum, size)) |
| 1201 | |
| 1202 | # add the RECORD file itself |
| 1203 | writer.writerow((record_path, '', '')) |
| 1204 | return record_path |
| 1205 | |
| 1206 | |
| 1207 | def egginfo_to_distinfo(record_file, installer=_DEFAULT_INSTALLER, |
| 1208 | requested=False, remove_egginfo=False): |
| 1209 | """Create files and directories required for PEP 376 |
| 1210 | |
| 1211 | :param record_file: path to RECORD file as produced by setup.py --record |
| 1212 | :param installer: installer name |
| 1213 | :param requested: True if not installed as a dependency |
| 1214 | :param remove_egginfo: delete egginfo dir? |
| 1215 | """ |
| 1216 | distinfo = _parse_record_file(record_file) |
| 1217 | distinfo_dir = distinfo['distinfo_dir'] |
| 1218 | if os.path.isdir(distinfo_dir) and not os.path.islink(distinfo_dir): |
| 1219 | shutil.rmtree(distinfo_dir) |
| 1220 | elif os.path.exists(distinfo_dir): |
| 1221 | os.unlink(distinfo_dir) |
| 1222 | |
| 1223 | os.makedirs(distinfo_dir) |
| 1224 | |
| 1225 | # copy setuptools extra metadata files |
| 1226 | if distinfo['extra_metadata']: |
| 1227 | for path in distinfo['extra_metadata']: |
| 1228 | shutil.copy2(path, distinfo_dir) |
| 1229 | new_path = path.replace('egg-info', 'dist-info') |
| 1230 | distinfo['installed'].append(new_path) |
| 1231 | |
| 1232 | metadata_path = distinfo['metadata_path'] |
| 1233 | logger.info('creating %s', metadata_path) |
| 1234 | shutil.copy2(distinfo['metadata'], metadata_path) |
| 1235 | |
| 1236 | installer_path = distinfo['installer_path'] |
| 1237 | logger.info('creating %s', installer_path) |
| 1238 | with open(installer_path, 'w') as f: |
| 1239 | f.write(installer) |
| 1240 | |
| 1241 | if requested: |
| 1242 | requested_path = distinfo['requested_path'] |
| 1243 | logger.info('creating %s', requested_path) |
Victor Stinner | 4c9706b | 2011-05-19 15:52:59 +0200 | [diff] [blame] | 1244 | open(requested_path, 'wb').close() |
Tarek Ziade | 1231a4e | 2011-05-19 13:07:25 +0200 | [diff] [blame] | 1245 | distinfo['installed'].append(requested_path) |
| 1246 | |
| 1247 | record_path = distinfo['record_path'] |
| 1248 | logger.info('creating %s', record_path) |
| 1249 | _write_record_file(record_path, distinfo['installed']) |
| 1250 | |
| 1251 | if remove_egginfo: |
| 1252 | egginfo = distinfo['egginfo'] |
| 1253 | logger.info('removing %s', egginfo) |
| 1254 | if os.path.isfile(egginfo): |
| 1255 | os.remove(egginfo) |
| 1256 | else: |
| 1257 | shutil.rmtree(egginfo) |
| 1258 | |
| 1259 | |
| 1260 | def _has_egg_info(srcdir): |
| 1261 | if os.path.isdir(srcdir): |
| 1262 | for item in os.listdir(srcdir): |
| 1263 | full_path = os.path.join(srcdir, item) |
| 1264 | if item.endswith('.egg-info') and os.path.isdir(full_path): |
Tarek Ziade | b1b6e13 | 2011-05-30 12:07:49 +0200 | [diff] [blame] | 1265 | logger.debug("Found egg-info directory.") |
Tarek Ziade | 1231a4e | 2011-05-19 13:07:25 +0200 | [diff] [blame] | 1266 | return True |
Tarek Ziade | b1b6e13 | 2011-05-30 12:07:49 +0200 | [diff] [blame] | 1267 | logger.debug("No egg-info directory found.") |
Tarek Ziade | 1231a4e | 2011-05-19 13:07:25 +0200 | [diff] [blame] | 1268 | return False |
| 1269 | |
| 1270 | |
| 1271 | def _has_setuptools_text(setup_py): |
| 1272 | return _has_text(setup_py, 'setuptools') |
| 1273 | |
| 1274 | |
| 1275 | def _has_distutils_text(setup_py): |
| 1276 | return _has_text(setup_py, 'distutils') |
| 1277 | |
| 1278 | |
| 1279 | def _has_text(setup_py, installer): |
| 1280 | installer_pattern = re.compile('import {0}|from {0}'.format(installer)) |
| 1281 | with open(setup_py, 'r', encoding='utf-8') as setup: |
| 1282 | for line in setup: |
| 1283 | if re.search(installer_pattern, line): |
Tarek Ziade | b1b6e13 | 2011-05-30 12:07:49 +0200 | [diff] [blame] | 1284 | logger.debug("Found %s text in setup.py.", installer) |
Tarek Ziade | 1231a4e | 2011-05-19 13:07:25 +0200 | [diff] [blame] | 1285 | return True |
Tarek Ziade | b1b6e13 | 2011-05-30 12:07:49 +0200 | [diff] [blame] | 1286 | logger.debug("No %s text found in setup.py.", installer) |
Tarek Ziade | 1231a4e | 2011-05-19 13:07:25 +0200 | [diff] [blame] | 1287 | return False |
| 1288 | |
| 1289 | |
| 1290 | def _has_required_metadata(setup_cfg): |
| 1291 | config = RawConfigParser() |
| 1292 | config.read([setup_cfg], encoding='utf8') |
| 1293 | return (config.has_section('metadata') and |
| 1294 | 'name' in config.options('metadata') and |
| 1295 | 'version' in config.options('metadata')) |
| 1296 | |
| 1297 | |
| 1298 | def _has_pkg_info(srcdir): |
| 1299 | pkg_info = os.path.join(srcdir, 'PKG-INFO') |
| 1300 | has_pkg_info = os.path.isfile(pkg_info) |
| 1301 | if has_pkg_info: |
Tarek Ziade | b1b6e13 | 2011-05-30 12:07:49 +0200 | [diff] [blame] | 1302 | logger.debug("PKG-INFO file found.") |
| 1303 | else: |
| 1304 | logger.debug("No PKG-INFO file found.") |
Tarek Ziade | 1231a4e | 2011-05-19 13:07:25 +0200 | [diff] [blame] | 1305 | return has_pkg_info |
| 1306 | |
| 1307 | |
| 1308 | def _has_setup_py(srcdir): |
| 1309 | setup_py = os.path.join(srcdir, 'setup.py') |
| 1310 | if os.path.isfile(setup_py): |
Tarek Ziade | b1b6e13 | 2011-05-30 12:07:49 +0200 | [diff] [blame] | 1311 | logger.debug('setup.py file found.') |
Tarek Ziade | 1231a4e | 2011-05-19 13:07:25 +0200 | [diff] [blame] | 1312 | return True |
| 1313 | return False |
| 1314 | |
| 1315 | |
| 1316 | def _has_setup_cfg(srcdir): |
| 1317 | setup_cfg = os.path.join(srcdir, 'setup.cfg') |
| 1318 | if os.path.isfile(setup_cfg): |
Tarek Ziade | b1b6e13 | 2011-05-30 12:07:49 +0200 | [diff] [blame] | 1319 | logger.debug('setup.cfg file found.') |
Tarek Ziade | 1231a4e | 2011-05-19 13:07:25 +0200 | [diff] [blame] | 1320 | return True |
Tarek Ziade | b1b6e13 | 2011-05-30 12:07:49 +0200 | [diff] [blame] | 1321 | logger.debug("No setup.cfg file found.") |
Tarek Ziade | 1231a4e | 2011-05-19 13:07:25 +0200 | [diff] [blame] | 1322 | return False |
| 1323 | |
| 1324 | |
| 1325 | def is_setuptools(path): |
| 1326 | """Check if the project is based on setuptools. |
| 1327 | |
| 1328 | :param path: path to source directory containing a setup.py script. |
| 1329 | |
| 1330 | Return True if the project requires setuptools to install, else False. |
| 1331 | """ |
| 1332 | srcdir = os.path.abspath(path) |
| 1333 | setup_py = os.path.join(srcdir, 'setup.py') |
| 1334 | |
| 1335 | return _has_setup_py(srcdir) and (_has_egg_info(srcdir) or |
| 1336 | _has_setuptools_text(setup_py)) |
| 1337 | |
| 1338 | |
| 1339 | def is_distutils(path): |
| 1340 | """Check if the project is based on distutils. |
| 1341 | |
| 1342 | :param path: path to source directory containing a setup.py script. |
| 1343 | |
| 1344 | Return True if the project requires distutils to install, else False. |
| 1345 | """ |
| 1346 | srcdir = os.path.abspath(path) |
| 1347 | setup_py = os.path.join(srcdir, 'setup.py') |
| 1348 | |
| 1349 | return _has_setup_py(srcdir) and (_has_pkg_info(srcdir) or |
| 1350 | _has_distutils_text(setup_py)) |
| 1351 | |
| 1352 | |
| 1353 | def is_packaging(path): |
| 1354 | """Check if the project is based on packaging |
| 1355 | |
| 1356 | :param path: path to source directory containing a setup.cfg file. |
| 1357 | |
| 1358 | Return True if the project has a valid setup.cfg, else False. |
| 1359 | """ |
| 1360 | srcdir = os.path.abspath(path) |
| 1361 | setup_cfg = os.path.join(srcdir, 'setup.cfg') |
| 1362 | |
| 1363 | return _has_setup_cfg(srcdir) and _has_required_metadata(setup_cfg) |
| 1364 | |
| 1365 | |
| 1366 | def get_install_method(path): |
| 1367 | """Check if the project is based on packaging, setuptools, or distutils |
| 1368 | |
| 1369 | :param path: path to source directory containing a setup.cfg file, |
| 1370 | or setup.py. |
| 1371 | |
| 1372 | Returns a string representing the best install method to use. |
| 1373 | """ |
| 1374 | if is_packaging(path): |
| 1375 | return "packaging" |
| 1376 | elif is_setuptools(path): |
| 1377 | return "setuptools" |
| 1378 | elif is_distutils(path): |
| 1379 | return "distutils" |
| 1380 | else: |
| 1381 | raise InstallationException('Cannot detect install method') |
| 1382 | |
| 1383 | |
| 1384 | # XXX to be replaced by shutil.copytree |
| 1385 | def copy_tree(src, dst, preserve_mode=True, preserve_times=True, |
| 1386 | preserve_symlinks=False, update=False, verbose=True, |
| 1387 | dry_run=False): |
| 1388 | from distutils.file_util import copy_file |
| 1389 | |
| 1390 | if not dry_run and not os.path.isdir(src): |
| 1391 | raise PackagingFileError( |
| 1392 | "cannot copy tree '%s': not a directory" % src) |
| 1393 | try: |
| 1394 | names = os.listdir(src) |
| 1395 | except os.error as e: |
| 1396 | errstr = e[1] |
| 1397 | if dry_run: |
| 1398 | names = [] |
| 1399 | else: |
| 1400 | raise PackagingFileError( |
| 1401 | "error listing files in '%s': %s" % (src, errstr)) |
| 1402 | |
| 1403 | if not dry_run: |
| 1404 | _mkpath(dst, verbose=verbose) |
| 1405 | |
| 1406 | outputs = [] |
| 1407 | |
| 1408 | for n in names: |
| 1409 | src_name = os.path.join(src, n) |
| 1410 | dst_name = os.path.join(dst, n) |
| 1411 | |
| 1412 | if preserve_symlinks and os.path.islink(src_name): |
| 1413 | link_dest = os.readlink(src_name) |
| 1414 | if verbose >= 1: |
| 1415 | logger.info("linking %s -> %s", dst_name, link_dest) |
| 1416 | if not dry_run: |
| 1417 | os.symlink(link_dest, dst_name) |
| 1418 | outputs.append(dst_name) |
| 1419 | |
| 1420 | elif os.path.isdir(src_name): |
| 1421 | outputs.extend( |
| 1422 | copy_tree(src_name, dst_name, preserve_mode, |
| 1423 | preserve_times, preserve_symlinks, update, |
| 1424 | verbose=verbose, dry_run=dry_run)) |
| 1425 | else: |
| 1426 | copy_file(src_name, dst_name, preserve_mode, |
| 1427 | preserve_times, update, verbose=verbose, |
| 1428 | dry_run=dry_run) |
| 1429 | outputs.append(dst_name) |
| 1430 | |
| 1431 | return outputs |
| 1432 | |
| 1433 | # cache for by mkpath() -- in addition to cheapening redundant calls, |
| 1434 | # eliminates redundant "creating /foo/bar/baz" messages in dry-run mode |
| 1435 | _path_created = set() |
| 1436 | |
| 1437 | |
| 1438 | # I don't use os.makedirs because a) it's new to Python 1.5.2, and |
| 1439 | # b) it blows up if the directory already exists (I want to silently |
| 1440 | # succeed in that case). |
| 1441 | def _mkpath(name, mode=0o777, verbose=True, dry_run=False): |
| 1442 | # Detect a common bug -- name is None |
| 1443 | if not isinstance(name, str): |
| 1444 | raise PackagingInternalError( |
| 1445 | "mkpath: 'name' must be a string (got %r)" % (name,)) |
| 1446 | |
| 1447 | # XXX what's the better way to handle verbosity? print as we create |
| 1448 | # each directory in the path (the current behaviour), or only announce |
| 1449 | # the creation of the whole path? (quite easy to do the latter since |
| 1450 | # we're not using a recursive algorithm) |
| 1451 | |
| 1452 | name = os.path.normpath(name) |
| 1453 | created_dirs = [] |
| 1454 | if os.path.isdir(name) or name == '': |
| 1455 | return created_dirs |
| 1456 | if os.path.abspath(name) in _path_created: |
| 1457 | return created_dirs |
| 1458 | |
| 1459 | head, tail = os.path.split(name) |
| 1460 | tails = [tail] # stack of lone dirs to create |
| 1461 | |
| 1462 | while head and tail and not os.path.isdir(head): |
| 1463 | head, tail = os.path.split(head) |
| 1464 | tails.insert(0, tail) # push next higher dir onto stack |
| 1465 | |
| 1466 | # now 'head' contains the deepest directory that already exists |
| 1467 | # (that is, the child of 'head' in 'name' is the highest directory |
| 1468 | # that does *not* exist) |
| 1469 | for d in tails: |
| 1470 | head = os.path.join(head, d) |
| 1471 | abs_head = os.path.abspath(head) |
| 1472 | |
| 1473 | if abs_head in _path_created: |
| 1474 | continue |
| 1475 | |
| 1476 | if verbose >= 1: |
| 1477 | logger.info("creating %s", head) |
| 1478 | |
| 1479 | if not dry_run: |
| 1480 | try: |
| 1481 | os.mkdir(head, mode) |
| 1482 | except OSError as exc: |
| 1483 | if not (exc.errno == errno.EEXIST and os.path.isdir(head)): |
| 1484 | raise PackagingFileError( |
| 1485 | "could not create '%s': %s" % (head, exc.args[-1])) |
| 1486 | created_dirs.append(head) |
| 1487 | |
| 1488 | _path_created.add(abs_head) |
| 1489 | return created_dirs |
Éric Araujo | ce5fe83 | 2011-07-08 16:27:12 +0200 | [diff] [blame^] | 1490 | |
| 1491 | |
| 1492 | def encode_multipart(fields, files, boundary=None): |
| 1493 | """Prepare a multipart HTTP request. |
| 1494 | |
| 1495 | *fields* is a sequence of (name: str, value: str) elements for regular |
| 1496 | form fields, *files* is a sequence of (name: str, filename: str, value: |
| 1497 | bytes) elements for data to be uploaded as files. |
| 1498 | |
| 1499 | Returns (content_type: bytes, body: bytes) ready for http.client.HTTP. |
| 1500 | """ |
| 1501 | # Taken from |
| 1502 | # http://code.activestate.com/recipes/146306-http-client-to-post-using-multipartform-data/ |
| 1503 | |
| 1504 | if boundary is None: |
| 1505 | boundary = b'--------------GHSKFJDLGDS7543FJKLFHRE75642756743254' |
| 1506 | elif not isinstance(boundary, bytes): |
| 1507 | raise TypeError('boundary must be bytes, not %r' % type(boundary)) |
| 1508 | |
| 1509 | l = [] |
| 1510 | for key, values in fields: |
| 1511 | # handle multiple entries for the same name |
| 1512 | if not isinstance(values, (tuple, list)): |
| 1513 | values=[values] |
| 1514 | |
| 1515 | for value in values: |
| 1516 | l.extend(( |
| 1517 | b'--' + boundary, |
| 1518 | ('Content-Disposition: form-data; name="%s"' % |
| 1519 | key).encode('utf-8'), |
| 1520 | b'', |
| 1521 | value.encode('utf-8'))) |
| 1522 | |
| 1523 | for key, filename, value in files: |
| 1524 | l.extend(( |
| 1525 | b'--' + boundary, |
| 1526 | ('Content-Disposition: form-data; name="%s"; filename="%s"' % |
| 1527 | (key, filename)).encode('utf-8'), |
| 1528 | b'', |
| 1529 | value)) |
| 1530 | |
| 1531 | l.append(b'--' + boundary + b'--') |
| 1532 | l.append(b'') |
| 1533 | |
| 1534 | body = b'\r\n'.join(l) |
| 1535 | content_type = b'multipart/form-data; boundary=' + boundary |
| 1536 | return content_type, body |