Coalesce stack slot accesses that arise when spilling both sides of a COPY.
This helps avoid silly code:

    %R0<def = LOAD <fi#5>
    STORE <fi#5>, %R0<kill>

git-svn-id: https://llvm.org/svn/llvm-project/llvm/trunk@110266 91177308-0d34-0410-b5e6-96231b3b80d8
diff --git a/lib/CodeGen/InlineSpiller.cpp b/lib/CodeGen/InlineSpiller.cpp
index d9dcbc0..872a829 100644
--- a/lib/CodeGen/InlineSpiller.cpp
+++ b/lib/CodeGen/InlineSpiller.cpp
@@ -85,6 +85,7 @@
   bool reMaterializeFor(MachineBasicBlock::iterator MI);
   void reMaterializeAll();
 
+  bool coalesceStackAccess(MachineInstr *MI);
   bool foldMemoryOperand(MachineBasicBlock::iterator MI,
                          const SmallVectorImpl<unsigned> &Ops);
   void insertReload(LiveInterval &NewLI, MachineBasicBlock::iterator MI);
@@ -291,6 +292,24 @@
   }
 }
 
+/// If MI is a load or store of stackSlot_, it can be removed.
+bool InlineSpiller::coalesceStackAccess(MachineInstr *MI) {
+  int FI = 0;
+  unsigned reg;
+  if (!(reg = tii_.isLoadFromStackSlot(MI, FI)) &&
+      !(reg = tii_.isStoreToStackSlot(MI, FI)))
+    return false;
+
+  // We have a stack access. Is it the right register and slot?
+  if (reg != li_->reg || FI != stackSlot_)
+    return false;
+
+  DEBUG(dbgs() << "Coalescing stack access: " << *MI);
+  lis_.RemoveMachineInstrFromMaps(MI);
+  MI->eraseFromParent();
+  return true;
+}
+
 /// foldMemoryOperand - Try folding stack slot references in Ops into MI.
 /// Return true on success, and MI will be erased.
 bool InlineSpiller::foldMemoryOperand(MachineBasicBlock::iterator MI,
@@ -399,6 +418,10 @@
       continue;
     }
 
+    // Stack slot accesses may coalesce away.
+    if (coalesceStackAccess(MI))
+      continue;
+
     // Analyze instruction.
     bool Reads, Writes;
     SmallVector<unsigned, 8> Ops;