Basic block combine pass

Combine basic blocks terminated by instruction that we have since
proven not to throw.  This change is intended to relieve some of the
computational load for llvm by reducing the number of basic blocks
it has to contend with.

Also:
  Add stats to show how successful check elimination is.
  Restore mechanism to disable some expensive optimization passes when
  compiling large methods.

Change-Id: I7fae22160988cbefb90ea9fb1cc26d7364e8d229
diff --git a/src/compiler/CompilerIR.h b/src/compiler/CompilerIR.h
index 02c7621..1cbf2b5 100644
--- a/src/compiler/CompilerIR.h
+++ b/src/compiler/CompilerIR.h
@@ -160,6 +160,7 @@
   kExitBlock,
   kExceptionHandling,
   kCatchEntry,
+  kDead,
 };
 
 /* Utility macros to traverse the LIR list */
@@ -239,6 +240,13 @@
   LIR* misPredBranchOver;
 };
 
+struct Checkstats {
+  int nullChecks;
+  int nullChecksEliminated;
+  int rangeChecks;
+  int rangeChecksEliminated;
+};
+
 struct MIR {
   DecodedInstruction dalvikInsn;
   unsigned int width;
@@ -272,7 +280,6 @@
   bool visited;
   bool hidden;
   bool catchEntry;
-  bool fallThroughTarget;             // Reached via fallthrough
 #if defined(ART_USE_QUICK_COMPILER)
   bool hasReturn;
 #endif
@@ -412,6 +419,7 @@
       currentArena(NULL),
       numArenaBlocks(0),
       mstats(NULL),
+      checkstats(NULL),
 #if defined(ART_USE_QUICK_COMPILER)
       genBitcode(false),
       context(NULL),
@@ -571,6 +579,7 @@
   u4 insnsSize;
   bool disableDataflow; // Skip dataflow analysis if possible
   SafeMap<unsigned int, BasicBlock*> blockMap; // findBlock lookup cache
+  SafeMap<unsigned int, unsigned int> blockIdMap; // Block collapse lookup cache
   SafeMap<unsigned int, LIR*> boundaryMap; // boundary lookup cache
   int defCount;         // Used to estimate number of SSA names
 
@@ -582,6 +591,7 @@
   ArenaMemBlock* currentArena;
   int numArenaBlocks;
   Memstats* mstats;
+  Checkstats* checkstats;
 #if defined(ART_USE_QUICK_COMPILER)
   bool genBitcode;
   llvm::LLVMContext* context;