Reapply "Make NumMicroOps a variable in the subtarget's instruction itinerary."

Reapplies r159406 with minor cleanup. The regressions appear to have been spurious.

git-svn-id: https://llvm.org/svn/llvm-project/llvm/trunk@159541 91177308-0d34-0410-b5e6-96231b3b80d8
diff --git a/lib/Target/ARM/ARMBaseInstrInfo.cpp b/lib/Target/ARM/ARMBaseInstrInfo.cpp
index 464c9aa..7227e62 100644
--- a/lib/Target/ARM/ARMBaseInstrInfo.cpp
+++ b/lib/Target/ARM/ARMBaseInstrInfo.cpp
@@ -2176,9 +2176,9 @@
 
   const MCInstrDesc &Desc = MI->getDesc();
   unsigned Class = Desc.getSchedClass();
-  unsigned UOps = ItinData->Itineraries[Class].NumMicroOps;
-  if (UOps)
-    return UOps;
+  int ItinUOps = ItinData->getNumMicroOps(Class);
+  if (ItinUOps >= 0)
+    return ItinUOps;
 
   unsigned Opc = MI->getOpcode();
   switch (Opc) {
@@ -2252,19 +2252,19 @@
         return 2;
       // 4 registers would be issued: 2, 2.
       // 5 registers would be issued: 2, 2, 1.
-      UOps = (NumRegs / 2);
+      int A8UOps = (NumRegs / 2);
       if (NumRegs % 2)
-        ++UOps;
-      return UOps;
+        ++A8UOps;
+      return A8UOps;
     } else if (Subtarget.isCortexA9()) {
-      UOps = (NumRegs / 2);
+      int A9UOps = (NumRegs / 2);
       // If there are odd number of registers or if it's not 64-bit aligned,
       // then it takes an extra AGU (Address Generation Unit) cycle.
       if ((NumRegs % 2) ||
           !MI->hasOneMemOperand() ||
           (*MI->memoperands_begin())->getAlignment() < 8)
-        ++UOps;
-      return UOps;
+        ++A9UOps;
+      return A9UOps;
     } else {
       // Assume the worst.
       return NumRegs;
@@ -3057,7 +3057,8 @@
   unsigned Class = MCID.getSchedClass();
 
   // For instructions with variable uops, use uops as latency.
-  if (!ItinData->isEmpty() && !ItinData->Itineraries[Class].NumMicroOps) {
+  if (!ItinData->isEmpty() && ItinData->getNumMicroOps(Class) < 0) {
+    dbgs() << "UOPS " << getNumMicroOps(ItinData, MI) << " " << *MI << '\n';
     return getNumMicroOps(ItinData, MI);
   }
   // For the common case, fall back on the itinerary's latency.