Daniel Dunbar | a567751 | 2009-01-05 19:53:30 +0000 | [diff] [blame^] | 1 | import os |
| 2 | import sys |
| 3 | import tempfile |
| 4 | from pprint import pprint |
| 5 | |
| 6 | ### |
| 7 | |
| 8 | import Arguments |
| 9 | import Jobs |
| 10 | import Phases |
| 11 | import Tools |
| 12 | import Types |
| 13 | import Util |
| 14 | |
| 15 | # FIXME: Clean up naming of options and arguments. Decide whether to |
| 16 | # rename Option and be consistent about use of Option/Arg. |
| 17 | |
| 18 | #### |
| 19 | |
| 20 | class MissingArgumentError(ValueError): |
| 21 | """MissingArgumentError - An option required an argument but none |
| 22 | was given.""" |
| 23 | |
| 24 | ### |
| 25 | |
| 26 | class Driver(object): |
| 27 | def __init__(self): |
| 28 | self.parser = Arguments.createOptionParser() |
| 29 | |
| 30 | def run(self, args): |
| 31 | # FIXME: Things to support from environment: GCC_EXEC_PREFIX, |
| 32 | # COMPILER_PATH, LIBRARY_PATH, LPATH, CC_PRINT_OPTIONS, |
| 33 | # QA_OVERRIDE_GCC3_OPTIONS, ...? |
| 34 | |
| 35 | # FIXME: -V and -b processing |
| 36 | |
| 37 | # Handle some special -ccc- options used for testing which are |
| 38 | # only allowed at the beginning of the command line. |
| 39 | cccPrintOptions = False |
| 40 | cccPrintPhases = False |
| 41 | cccUseDriverDriver = True |
| 42 | while args and args[0].startswith('-ccc-'): |
| 43 | opt,args = args[0][5:],args[1:] |
| 44 | |
| 45 | if opt == 'print-options': |
| 46 | cccPrintOptions = True |
| 47 | elif opt == 'print-phases': |
| 48 | cccPrintPhases = True |
| 49 | elif opt == 'no-driver-driver': |
| 50 | # FIXME: Remove this once we have some way of being a |
| 51 | # cross compiler driver (cross driver compiler? compiler |
| 52 | # cross driver? etc.). |
| 53 | cccUseDriverDriver = False |
| 54 | else: |
| 55 | raise ValueError,"Invalid ccc option: %r" % cccPrintOptions |
| 56 | |
| 57 | options = self.parser.chunkArgs(args) |
| 58 | |
| 59 | # FIXME: Ho hum I have just realized -Xarch_ is broken. We really |
| 60 | # need to reparse the Arguments after they have been expanded by |
| 61 | # -Xarch. How is this going to work? |
| 62 | # |
| 63 | # Scratch that, we aren't going to do that; it really disrupts the |
| 64 | # organization, doesn't consistently work with gcc-dd, and is |
| 65 | # confusing. Instead we are going to enforce that -Xarch_ is only |
| 66 | # used with options which do not alter the driver behavior. Let's |
| 67 | # hope this is ok, because the current architecture is a little |
| 68 | # tied to it. |
| 69 | |
| 70 | if cccPrintOptions: |
| 71 | self.printOptions(args, options) |
| 72 | sys.exit(0) |
| 73 | |
| 74 | self.handleImmediateOptions(args, options) |
| 75 | |
| 76 | if cccUseDriverDriver: |
| 77 | phases = self.buildPipeline(options, args) |
| 78 | else: |
| 79 | phases = self.buildNormalPipeline(options, args) |
| 80 | |
| 81 | if cccPrintPhases: |
| 82 | self.printPhases(args, phases) |
| 83 | sys.exit(0) |
| 84 | |
| 85 | if 0: |
| 86 | print Util.pprint(phases) |
| 87 | |
| 88 | jobs = self.bindPhases(phases, options, args) |
| 89 | |
| 90 | # FIXME: We should provide some basic sanity checking of the |
| 91 | # pipeline as a "verification" sort of stage. For example, the |
| 92 | # pipeline should never end up writing to an output file in two |
| 93 | # places (I think). The pipeline should also never end up writing |
| 94 | # to an output file that is an input. |
| 95 | # |
| 96 | # This is intended to just be a "verify" step, not a functionality |
| 97 | # step. It should catch things like the driver driver not |
| 98 | # preventing -save-temps, but it shouldn't change behavior (so we |
| 99 | # can turn it off in Release-Asserts builds). |
| 100 | |
| 101 | # Print in -### syntax. |
| 102 | hasHashHashHash = None |
| 103 | for oi in options: |
| 104 | if oi.opt and oi.opt.name == '-###': |
| 105 | hasHashHashHash = oi |
| 106 | |
| 107 | if hasHashHashHash: |
| 108 | self.claim(hasHashHashHash) |
| 109 | for j in jobs.iterjobs(): |
| 110 | if isinstance(j, Jobs.Command): |
| 111 | print '"%s"' % '" "'.join(j.render(args)) |
| 112 | elif isinstance(j, Jobs.PipedJob): |
| 113 | for c in j.commands: |
| 114 | print '"%s" %c' % ('" "'.join(c.render(args)), |
| 115 | "| "[c is j.commands[-1]]) |
| 116 | elif not isinstance(j, JobList): |
| 117 | raise ValueError,'Encountered unknown job.' |
| 118 | sys.exit(0) |
| 119 | |
| 120 | for j in jobs.iterjobs(): |
| 121 | if isinstance(j, Jobs.Command): |
| 122 | cmd_args = j.render(args) |
| 123 | res = os.spawnvp(os.P_WAIT, cmd_args[0], cmd_args) |
| 124 | if res: |
| 125 | sys.exit(res) |
| 126 | elif isinstance(j, Jobs.PipedJob): |
| 127 | raise NotImplementedError,"Piped jobs aren't implemented yet." |
| 128 | else: |
| 129 | raise ValueError,'Encountered unknown job.' |
| 130 | |
| 131 | def claim(self, option): |
| 132 | # FIXME: Move to OptionList once introduced and implement. |
| 133 | pass |
| 134 | |
| 135 | def warning(self, message): |
| 136 | print >>sys.stderr,'%s: %s' % (sys.argv[0], message) |
| 137 | |
| 138 | def printOptions(self, args, options): |
| 139 | for i,oi in enumerate(options): |
| 140 | if isinstance(oi, Arguments.InputArg): |
| 141 | name = "<input>" |
| 142 | elif isinstance(oi, Arguments.UnknownArg): |
| 143 | name = "<unknown>" |
| 144 | else: |
| 145 | assert oi.opt |
| 146 | name = oi.opt.name |
| 147 | if isinstance(oi, Arguments.MultipleValuesArg): |
| 148 | values = list(oi.getValues(args)) |
| 149 | elif isinstance(oi, Arguments.ValueArg): |
| 150 | values = [oi.getValue(args)] |
| 151 | elif isinstance(oi, Arguments.JoinedAndSeparateValuesArg): |
| 152 | values = [oi.getJoinedValue(args), oi.getSeparateValue(args)] |
| 153 | else: |
| 154 | values = [] |
| 155 | print 'Option %d - Name: "%s", Values: {%s}' % (i, name, |
| 156 | ', '.join(['"%s"' % v |
| 157 | for v in values])) |
| 158 | |
| 159 | def printPhases(self, args, phases): |
| 160 | def printPhase(p, f, steps, arch=None): |
| 161 | if p in steps: |
| 162 | return steps[p] |
| 163 | elif isinstance(p, Phases.BindArchAction): |
| 164 | for kid in p.inputs: |
| 165 | printPhase(kid, f, steps, p.arch) |
| 166 | steps[p] = len(steps) |
| 167 | return |
| 168 | |
| 169 | if isinstance(p, Phases.InputAction): |
| 170 | phaseName = 'input' |
| 171 | inputStr = '"%s"' % p.filename.getValue(args) |
| 172 | else: |
| 173 | phaseName = p.phase.name |
| 174 | inputs = [printPhase(i, f, steps, arch) |
| 175 | for i in p.inputs] |
| 176 | inputStr = '{%s}' % ', '.join(map(str, inputs)) |
| 177 | if arch is not None: |
| 178 | phaseName += '-' + arch.getValue(args) |
| 179 | steps[p] = index = len(steps) |
| 180 | print "%d: %s, %s, %s" % (index,phaseName,inputStr,p.type.name) |
| 181 | return index |
| 182 | steps = {} |
| 183 | for phase in phases: |
| 184 | printPhase(phase, sys.stdout, steps) |
| 185 | |
| 186 | def handleImmediateOptions(self, args, options): |
| 187 | # FIXME: Some driver Arguments are consumed right off the bat, |
| 188 | # like -dumpversion. Currently the gcc-dd handles these |
| 189 | # poorly, so we should be ok handling them upfront instead of |
| 190 | # after driver-driver level dispatching. |
| 191 | # |
| 192 | # FIXME: The actual order of these options in gcc is all over the |
| 193 | # place. The -dump ones seem to be first and in specification |
| 194 | # order, but there are other levels of precedence. For example, |
| 195 | # -print-search-dirs is evaluated before -print-prog-name=, |
| 196 | # regardless of order (and the last instance of -print-prog-name= |
| 197 | # wins verse itself). |
| 198 | # |
| 199 | # FIXME: Do we want to report "argument unused" type errors in the |
| 200 | # presence of things like -dumpmachine and -print-search-dirs? |
| 201 | # Probably not. |
| 202 | for oi in options: |
| 203 | if oi.opt is not None: |
| 204 | if oi.opt.name == '-dumpmachine': |
| 205 | print 'FIXME: %s' % oi.opt.name |
| 206 | sys.exit(1) |
| 207 | elif oi.opt.name == '-dumpspecs': |
| 208 | print 'FIXME: %s' % oi.opt.name |
| 209 | sys.exit(1) |
| 210 | elif oi.opt.name == '-dumpversion': |
| 211 | print 'FIXME: %s' % oi.opt.name |
| 212 | sys.exit(1) |
| 213 | elif oi.opt.name == '-print-file-name=': |
| 214 | print 'FIXME: %s' % oi.opt.name |
| 215 | sys.exit(1) |
| 216 | elif oi.opt.name == '-print-multi-directory': |
| 217 | print 'FIXME: %s' % oi.opt.name |
| 218 | sys.exit(1) |
| 219 | elif oi.opt.name == '-print-multi-lib': |
| 220 | print 'FIXME: %s' % oi.opt.name |
| 221 | sys.exit(1) |
| 222 | elif oi.opt.name == '-print-prog-name=': |
| 223 | print 'FIXME: %s' % oi.opt.name |
| 224 | sys.exit(1) |
| 225 | elif oi.opt.name == '-print-libgcc-file-name': |
| 226 | print 'FIXME: %s' % oi.opt.name |
| 227 | sys.exit(1) |
| 228 | elif oi.opt.name == '-print-search-dirs': |
| 229 | print 'FIXME: %s' % oi.opt.name |
| 230 | sys.exit(1) |
| 231 | |
| 232 | def buildNormalPipeline(self, args, inputArgs): |
| 233 | hasCombine = None |
| 234 | hasSyntaxOnly = None |
| 235 | hasDashC = hasDashE = hasDashS = None |
| 236 | |
| 237 | inputType = None |
| 238 | inputTypeOpt = None |
| 239 | inputs = [] |
| 240 | for a in args: |
| 241 | if isinstance(a, Arguments.InputArg): |
| 242 | if inputType is None: |
| 243 | base,ext = os.path.splitext(a.getValue(inputArgs)) |
| 244 | if ext and ext in Types.kTypeSuffixMap: |
| 245 | klass = Types.kTypeSuffixMap[ext] |
| 246 | else: |
| 247 | # FIXME: Its not clear why we shouldn't just |
| 248 | # revert to unknown. I think this is more likely a |
| 249 | # bug / unintended behavior in gcc. Not very |
| 250 | # important though. |
| 251 | klass = Types.ObjectType |
| 252 | else: |
| 253 | assert inputTypeOpt is not None |
| 254 | self.claim(inputTypeOpt) |
| 255 | klass = inputType |
| 256 | inputs.append((klass, a)) |
| 257 | elif a.opt is not None: |
| 258 | # FIXME: We should warn about inconsistent and duplicate |
| 259 | # usage of these flags. |
| 260 | if a.opt.name == '-E': |
| 261 | hasDashE = a |
| 262 | elif a.opt.name == '-S': |
| 263 | hasDashS = a |
| 264 | elif a.opt.name == '-c': |
| 265 | hasDashC = a |
| 266 | elif a.opt.name == '-fsyntax-only': |
| 267 | hasSyntaxOnly = a |
| 268 | elif a.opt.name == '-combine': |
| 269 | hasCombine = a |
| 270 | elif a.opt.name == '-filelist': |
| 271 | # FIXME: This might not be good enough. We may |
| 272 | # need to introduce another type of InputArg for |
| 273 | # this case, so that other code which needs to |
| 274 | # know the inputs handles this properly. Best not |
| 275 | # to try and lipo this, for example. |
| 276 | # |
| 277 | # Treat as a linker input. |
| 278 | inputs.append((Types.ObjectType, a)) |
| 279 | elif a.opt.name == '-x': |
| 280 | self.claim(a) |
| 281 | inputTypeOpt = a |
| 282 | value = a.getValue(inputArgs) |
| 283 | if value in Types.kTypeSpecifierMap: |
| 284 | inputType = Types.kTypeSpecifierMap[value] |
| 285 | else: |
| 286 | # FIXME: How are we going to handle diagnostics. |
| 287 | self.warning("language %s not recognized" % value) |
| 288 | |
| 289 | # FIXME: Its not clear why we shouldn't just |
| 290 | # revert to unknown. I think this is more likely a |
| 291 | # bug / unintended behavior in gcc. Not very |
| 292 | # important though. |
| 293 | inputType = ObjectType |
| 294 | |
| 295 | # We claim things here so that options for which we silently allow |
| 296 | # override only ever claim the used option. |
| 297 | if hasCombine: |
| 298 | self.claim(hasCombine) |
| 299 | |
| 300 | finalPhase = Phases.Phase.eOrderPostAssemble |
| 301 | finalPhaseOpt = None |
| 302 | |
| 303 | # Determine what compilation mode we are in. |
| 304 | if hasDashE: |
| 305 | finalPhase = Phases.Phase.eOrderPreprocess |
| 306 | finalPhaseOpt = hasDashE |
| 307 | elif hasSyntaxOnly: |
| 308 | finalPhase = Phases.Phase.eOrderCompile |
| 309 | finalPhaseOpt = hasSyntaxOnly |
| 310 | elif hasDashS: |
| 311 | finalPhase = Phases.Phase.eOrderCompile |
| 312 | finalPhaseOpt = hasDashS |
| 313 | elif hasDashC: |
| 314 | finalPhase = Phases.Phase.eOrderAssemble |
| 315 | finalPhaseOpt = hasDashC |
| 316 | |
| 317 | if finalPhaseOpt: |
| 318 | self.claim(finalPhaseOpt) |
| 319 | |
| 320 | # FIXME: Support -combine. |
| 321 | if hasCombine: |
| 322 | raise NotImplementedError,"-combine is not yet supported." |
| 323 | |
| 324 | actions = [] |
| 325 | linkerInputs = [] |
| 326 | # FIXME: This is gross. |
| 327 | linkPhase = Phases.LinkPhase() |
| 328 | for klass,input in inputs: |
| 329 | # Figure out what step to start at. |
| 330 | |
| 331 | # FIXME: This should be part of the input class probably? |
| 332 | # Altough it doesn't quite fit there either, things like |
| 333 | # asm-with-preprocess don't easily fit into a linear scheme. |
| 334 | |
| 335 | # FIXME: I think we are going to end up wanting to just build |
| 336 | # a simple FSA which we run the inputs down. |
| 337 | sequence = [] |
| 338 | if klass.preprocess: |
| 339 | sequence.append(Phases.PreprocessPhase()) |
| 340 | if klass == Types.ObjectType: |
| 341 | sequence.append(linkPhase) |
| 342 | elif klass.onlyAssemble: |
| 343 | sequence.extend([Phases.AssemblePhase(), |
| 344 | linkPhase]) |
| 345 | elif klass.onlyPrecompile: |
| 346 | sequence.append(Phases.PrecompilePhase()) |
| 347 | else: |
| 348 | sequence.extend([Phases.CompilePhase(), |
| 349 | Phases.AssemblePhase(), |
| 350 | linkPhase]) |
| 351 | |
| 352 | if sequence[0].order > finalPhase: |
| 353 | assert finalPhaseOpt and finalPhaseOpt.opt |
| 354 | # FIXME: Explain what type of input file is. Or just match |
| 355 | # gcc warning. |
| 356 | self.warning("%s: %s input file unused when %s is present" % (input.getValue(inputArgs), |
| 357 | sequence[0].name, |
| 358 | finalPhaseOpt.opt.name)) |
| 359 | else: |
| 360 | # Build the pipeline for this file. |
| 361 | |
| 362 | current = Phases.InputAction(input, klass) |
| 363 | for transition in sequence: |
| 364 | # If the current action produces no output, or we are |
| 365 | # past what the user requested, we are done. |
| 366 | if (current.type is Types.NothingType or |
| 367 | transition.order > finalPhase): |
| 368 | break |
| 369 | else: |
| 370 | if isinstance(transition, Phases.PreprocessPhase): |
| 371 | assert isinstance(klass.preprocess, Types.InputType) |
| 372 | current = Phases.JobAction(transition, |
| 373 | [current], |
| 374 | klass.preprocess) |
| 375 | elif isinstance(transition, Phases.PrecompilePhase): |
| 376 | current = Phases.JobAction(transition, |
| 377 | [current], |
| 378 | Types.PCHType) |
| 379 | elif isinstance(transition, Phases.CompilePhase): |
| 380 | if hasSyntaxOnly: |
| 381 | output = Types.NothingType |
| 382 | else: |
| 383 | output = Types.AsmTypeNoPP |
| 384 | current = Phases.JobAction(transition, |
| 385 | [current], |
| 386 | output) |
| 387 | elif isinstance(transition, Phases.AssemblePhase): |
| 388 | current = Phases.JobAction(transition, |
| 389 | [current], |
| 390 | Types.ObjectType) |
| 391 | elif transition is linkPhase: |
| 392 | linkerInputs.append(current) |
| 393 | current = None |
| 394 | break |
| 395 | else: |
| 396 | raise RuntimeError,'Unrecognized transition: %s.' % transition |
| 397 | pass |
| 398 | |
| 399 | if current is not None: |
| 400 | assert not isinstance(current, Phases.InputAction) |
| 401 | actions.append(current) |
| 402 | |
| 403 | if linkerInputs: |
| 404 | actions.append(Phases.JobAction(linkPhase, |
| 405 | linkerInputs, |
| 406 | Types.ImageType)) |
| 407 | |
| 408 | return actions |
| 409 | |
| 410 | def buildPipeline(self, args, inputArgs): |
| 411 | # FIXME: We need to handle canonicalization of the specified arch. |
| 412 | |
| 413 | archs = [] |
| 414 | hasOutput = None |
| 415 | hasDashM = hasSaveTemps = None |
| 416 | for o in args: |
| 417 | if o.opt is None: |
| 418 | continue |
| 419 | |
| 420 | if isinstance(o, Arguments.ValueArg): |
| 421 | if o.opt.name == '-arch': |
| 422 | archs.append(o) |
| 423 | elif o.opt.name.startswith('-M'): |
| 424 | hasDashM = o |
| 425 | elif o.opt.name in ('-save-temps','--save-temps'): |
| 426 | hasSaveTemps = o |
| 427 | |
| 428 | if not archs: |
| 429 | # FIXME: Need to infer arch so that we sub -Xarch |
| 430 | # correctly. |
| 431 | archs.append(Arguments.DerivedArg('i386')) |
| 432 | |
| 433 | actions = self.buildNormalPipeline(args, inputArgs) |
| 434 | |
| 435 | # FIXME: Use custom exception for this. |
| 436 | # |
| 437 | # FIXME: We killed off some others but these aren't yet detected in |
| 438 | # a functional manner. If we added information to jobs about which |
| 439 | # "auxiliary" files they wrote then we could detect the conflict |
| 440 | # these cause downstream. |
| 441 | if len(archs) > 1: |
| 442 | if hasDashM: |
| 443 | raise ValueError,"Cannot use -M options with multiple arch flags." |
| 444 | elif hasSaveTemps: |
| 445 | raise ValueError,"Cannot use -save-temps with multiple arch flags." |
| 446 | |
| 447 | # Execute once per arch. |
| 448 | finalActions = [] |
| 449 | for p in actions: |
| 450 | # Make sure we can lipo this kind of output. If not (and it |
| 451 | # is an actual output) then we disallow, since we can't |
| 452 | # create an output file with the right name without |
| 453 | # overwriting it. We could remove this oddity by just |
| 454 | # changing the output names to include the arch, which would |
| 455 | # also fix -save-temps. Compatibility wins for now. |
| 456 | # |
| 457 | # FIXME: Is this error substantially less useful than |
| 458 | # gcc-dd's? The main problem is that "Cannot use compiler |
| 459 | # output with multiple arch flags" won't make sense to most |
| 460 | # developers. |
| 461 | if (len(archs) > 1 and |
| 462 | p.type not in (Types.NothingType,Types.ObjectType,Types.ImageType)): |
| 463 | raise ValueError,'Cannot use %s output with multiple arch flags.' % p.type.name |
| 464 | |
| 465 | inputs = [] |
| 466 | for arch in archs: |
| 467 | inputs.append(Phases.BindArchAction(p, arch)) |
| 468 | |
| 469 | # Lipo if necessary. We do it this way because we need to set |
| 470 | # the arch flag so that -Xarch_ gets rewritten. |
| 471 | if len(inputs) == 1 or p.type == Types.NothingType: |
| 472 | finalActions.extend(inputs) |
| 473 | else: |
| 474 | finalActions.append(Phases.JobAction(Phases.LipoPhase(), |
| 475 | inputs, |
| 476 | p.type)) |
| 477 | |
| 478 | # FIXME: We need to add -Wl,arch_multiple and -Wl,final_output in |
| 479 | # certain cases. This may be icky because we need to figure out the |
| 480 | # mode first. Current plan is to hack on the pipeline once it is built |
| 481 | # and we know what is being spit out. This avoids having to handling |
| 482 | # things like -c and -combine in multiple places. |
| 483 | # |
| 484 | # The annoying one of these is -Wl,final_output because it involves |
| 485 | # communication across different phases. |
| 486 | # |
| 487 | # Hopefully we can do this purely as part of the binding, but |
| 488 | # leaving comment here for now until it is clear this works. |
| 489 | |
| 490 | return finalActions |
| 491 | |
| 492 | def bindPhases(self, phases, args, inputArgs): |
| 493 | jobs = Jobs.JobList() |
| 494 | |
| 495 | finalOutput = None |
| 496 | hasSaveTemps = hasNoIntegratedCPP = hasPipe = None |
| 497 | forward = [] |
| 498 | for a in args: |
| 499 | if isinstance(a, Arguments.InputArg): |
| 500 | pass |
| 501 | elif a.opt is not None: |
| 502 | if a.opt.name == '-save-temps': |
| 503 | hasSaveTemps = a |
| 504 | elif a.opt.name == '-no-integrated-cpp': |
| 505 | hasNoIntegratedCPP = a |
| 506 | elif a.opt.name == '-o': |
| 507 | finalOutput = a |
| 508 | elif a.opt.name == '-pipe': |
| 509 | hasPipe = a |
| 510 | elif a.opt.name in ('-E', '-S', '-c', |
| 511 | '-arch', '-fsyntax-only', '-combine', '-x', |
| 512 | '-###'): |
| 513 | pass |
| 514 | else: |
| 515 | forward.append(a) |
| 516 | else: |
| 517 | forward.append(a) |
| 518 | |
| 519 | # We claim things here so that options for which we silently allow |
| 520 | # override only ever claim the used option. |
| 521 | if hasPipe: |
| 522 | self.claim(hasPipe) |
| 523 | # FIXME: Hack, override -pipe till we support it. |
| 524 | hasPipe = None |
| 525 | # Claim these here. Its not completely accurate but any warnings |
| 526 | # about these being unused are likely to be noise anyway. |
| 527 | if hasSaveTemps: |
| 528 | self.claim(hasSaveTemps) |
| 529 | if hasNoIntegratedCPP: |
| 530 | self.claim(hasNoIntegratedCPP) |
| 531 | |
| 532 | toolMap = { |
| 533 | Phases.PreprocessPhase : Tools.GCC_PreprocessTool(), |
| 534 | Phases.CompilePhase : Tools.GCC_CompileTool(), |
| 535 | Phases.PrecompilePhase : Tools.GCC_PrecompileTool(), |
| 536 | Phases.AssemblePhase : Tools.DarwinAssemblerTool(), |
| 537 | Phases.LinkPhase : Tools.Collect2Tool(), |
| 538 | Phases.LipoPhase : Tools.LipoTool(), |
| 539 | } |
| 540 | |
| 541 | class InputInfo: |
| 542 | def __init__(self, source, type, baseInput): |
| 543 | self.source = source |
| 544 | self.type = type |
| 545 | self.baseInput = baseInput |
| 546 | |
| 547 | def __repr__(self): |
| 548 | return '%s(%r, %r, %r)' % (self.__class__.__name__, |
| 549 | self.source, self.type, self.baseInput) |
| 550 | |
| 551 | def createJobs(phase, forwardArgs, |
| 552 | canAcceptPipe=False, atTopLevel=False, arch=None): |
| 553 | if isinstance(phase, Phases.InputAction): |
| 554 | return InputInfo(phase.filename, phase.type, phase.filename) |
| 555 | elif isinstance(phase, Phases.BindArchAction): |
| 556 | archName = phase.arch.getValue(inputArgs) |
| 557 | filteredArgs = [] |
| 558 | for oi in forwardArgs: |
| 559 | if oi.opt is None: |
| 560 | filteredArgs.append(oi) |
| 561 | elif oi.opt.name == '-arch': |
| 562 | if oi is phase.arch: |
| 563 | filteredArgs.append(oi) |
| 564 | elif oi.opt.name == '-Xarch_': |
| 565 | # FIXME: gcc-dd has another conditional for passing |
| 566 | # through, when the arch conditional array has an empty |
| 567 | # string. Why? |
| 568 | if oi.getJoinedValue(inputArgs) == archName: |
| 569 | # FIXME: This is wrong, we don't want a |
| 570 | # DerivedArg we want an actual parsed version |
| 571 | # of this arg. |
| 572 | filteredArgs.append(Arguments.DerivedArg(oi.getSeparateValue(inputArgs))) |
| 573 | else: |
| 574 | filteredArgs.append(oi) |
| 575 | |
| 576 | return createJobs(phase.inputs[0], filteredArgs, |
| 577 | canAcceptPipe, atTopLevel, phase.arch) |
| 578 | |
| 579 | assert isinstance(phase, Phases.JobAction) |
| 580 | tool = toolMap[phase.phase.__class__] |
| 581 | |
| 582 | # See if we should use an integrated CPP. We only use an |
| 583 | # integrated cpp when we have exactly one input, since this is |
| 584 | # the only use case we care about. |
| 585 | useIntegratedCPP = False |
| 586 | inputList = phase.inputs |
| 587 | if (not hasNoIntegratedCPP and |
| 588 | not hasSaveTemps and |
| 589 | tool.hasIntegratedCPP()): |
| 590 | if (len(phase.inputs) == 1 and |
| 591 | isinstance(phase.inputs[0].phase, Phases.PreprocessPhase)): |
| 592 | useIntegratedCPP = True |
| 593 | inputList = phase.inputs[0].inputs |
| 594 | |
| 595 | # Only try to use pipes when exactly one input. |
| 596 | canAcceptPipe = len(inputList) == 1 and tool.acceptsPipedInput() |
| 597 | inputs = [createJobs(p, forwardArgs, canAcceptPipe, False, arch) for p in inputList] |
| 598 | |
| 599 | # Determine if we should output to a pipe. |
| 600 | canOutputToPipe = canAcceptPipe and tool.canPipeOutput() |
| 601 | outputToPipe = False |
| 602 | if canOutputToPipe: |
| 603 | # Some things default to writing to a pipe if the final |
| 604 | # phase and there was no user override. |
| 605 | # |
| 606 | # FIXME: What is the best way to handle this? |
| 607 | if (atTopLevel and |
| 608 | isinstance(phase, Phases.PreprocessPhase) and |
| 609 | not finalOutput): |
| 610 | outputToPipe = True |
| 611 | elif hasPipe: |
| 612 | outputToPipe = True |
| 613 | |
| 614 | # Figure out where to put the job (pipes). |
| 615 | jobList = jobs |
| 616 | if canAcceptPipe and isinstance(inputs[0].source, Jobs.PipedJob): |
| 617 | jobList = inputs[0].source |
| 618 | |
| 619 | # Figure out where to put the output. |
| 620 | baseInput = inputs[0].baseInput |
| 621 | if phase.type == Types.NothingType: |
| 622 | output = None |
| 623 | elif outputToPipe: |
| 624 | if isinstance(jobList, Jobs.PipedJob): |
| 625 | output = jobList |
| 626 | else: |
| 627 | jobList = output = Jobs.PipedJob([]) |
| 628 | jobs.addJob(output) |
| 629 | else: |
| 630 | # Figure out what the derived output location would be. |
| 631 | # |
| 632 | # FIXME: gcc has some special case in here so that it doesn't |
| 633 | # create output files if they would conflict with an input. |
| 634 | inputName = baseInput.getValue(inputArgs) |
| 635 | if phase.type is Types.ImageType: |
| 636 | namedOutput = "a.out" |
| 637 | else: |
| 638 | base,_ = os.path.splitext(inputName) |
| 639 | assert phase.type.tempSuffix is not None |
| 640 | namedOutput = base + phase.type.tempSuffix |
| 641 | |
| 642 | # Output to user requested destination? |
| 643 | if atTopLevel and finalOutput: |
| 644 | output = finalOutput |
| 645 | # Contruct a named destination? |
| 646 | elif atTopLevel or hasSaveTemps: |
| 647 | output = Arguments.DerivedArg(namedOutput) |
| 648 | else: |
| 649 | # Output to temp file... |
| 650 | fd,filename = tempfile.mkstemp(suffix=phase.type.tempSuffix) |
| 651 | output = Arguments.DerivedArg(filename) |
| 652 | |
| 653 | tool.constructJob(phase, arch, jobList, inputs, output, phase.type, forwardArgs) |
| 654 | |
| 655 | return InputInfo(output, phase.type, baseInput) |
| 656 | |
| 657 | # It is an error to provide a -o option if we are making multiple |
| 658 | # output files. |
| 659 | if finalOutput and len([a for a in phases if a.type is not Types.NothingType]) > 1: |
| 660 | # FIXME: Custom exception. |
| 661 | raise ValueError,"Cannot specify -o when generating multiple files." |
| 662 | |
| 663 | for phase in phases: |
| 664 | createJobs(phase, forward, canAcceptPipe=True, atTopLevel=True) |
| 665 | |
| 666 | return jobs |