Second attempt:

Massive check in. This changes the "-fast" flag to "-O#" in llc. If you want to
use the old behavior, the flag is -O0. This change allows for finer-grained
control over which optimizations are run at different -O levels.

Most of this work was pretty mechanical. The majority of the fixes came from
verifying that a "fast" variable wasn't used anymore. The JIT still uses a
"Fast" flag. I'll change the JIT with a follow-up patch.


git-svn-id: https://llvm.org/svn/llvm-project/llvm/trunk@70343 91177308-0d34-0410-b5e6-96231b3b80d8
diff --git a/lib/CodeGen/SelectionDAG/FastISel.cpp b/lib/CodeGen/SelectionDAG/FastISel.cpp
index 12b0b12..a7801eb 100644
--- a/lib/CodeGen/SelectionDAG/FastISel.cpp
+++ b/lib/CodeGen/SelectionDAG/FastISel.cpp
@@ -327,7 +327,7 @@
   default: break;
   case Intrinsic::dbg_stoppoint: {
     DbgStopPointInst *SPI = cast<DbgStopPointInst>(I);
-    if (DW && DW->ValidDebugInfo(SPI->getContext(), true)) {
+    if (DW && DW->ValidDebugInfo(SPI->getContext(), 0)) {
       DICompileUnit CU(cast<GlobalVariable>(SPI->getContext()));
       std::string Dir, FN;
       unsigned SrcFile = DW->getOrCreateSourceID(CU.getDirectory(Dir),
@@ -344,7 +344,7 @@
   }
   case Intrinsic::dbg_region_start: {
     DbgRegionStartInst *RSI = cast<DbgRegionStartInst>(I);
-    if (DW && DW->ValidDebugInfo(RSI->getContext(), true)) {
+    if (DW && DW->ValidDebugInfo(RSI->getContext(), 0)) {
       unsigned ID = 
         DW->RecordRegionStart(cast<GlobalVariable>(RSI->getContext()));
       const TargetInstrDesc &II = TII.get(TargetInstrInfo::DBG_LABEL);
@@ -354,7 +354,7 @@
   }
   case Intrinsic::dbg_region_end: {
     DbgRegionEndInst *REI = cast<DbgRegionEndInst>(I);
-    if (DW && DW->ValidDebugInfo(REI->getContext(), true)) {
+    if (DW && DW->ValidDebugInfo(REI->getContext(), 0)) {
      unsigned ID = 0;
      DISubprogram Subprogram(cast<GlobalVariable>(REI->getContext()));
      if (!Subprogram.isNull() && !Subprogram.describes(MF.getFunction())) {
@@ -380,7 +380,7 @@
     DbgFuncStartInst *FSI = cast<DbgFuncStartInst>(I);
     Value *SP = FSI->getSubprogram();
 
-    if (DW->ValidDebugInfo(SP, true)) {
+    if (DW->ValidDebugInfo(SP, 0)) {
       // llvm.dbg.func.start implicitly defines a dbg_stoppoint which is what
       // (most?) gdb expects.
       DebugLoc PrevLoc = DL;
@@ -425,7 +425,7 @@
   case Intrinsic::dbg_declare: {
     DbgDeclareInst *DI = cast<DbgDeclareInst>(I);
     Value *Variable = DI->getVariable();
-    if (DW && DW->ValidDebugInfo(Variable, true)) {
+    if (DW && DW->ValidDebugInfo(Variable, 0)) {
       // Determine the address of the declared object.
       Value *Address = DI->getAddress();
       if (BitCastInst *BCI = dyn_cast<BitCastInst>(Address))