reapply r101434
with a fix for self-hosting
rotate CallInst operands, i.e. move callee to the back
of the operand array
the motivation for this patch are laid out in my mail to llvm-commits:
more efficient access to operands and callee, faster callgraph-construction,
smaller compiler binary
git-svn-id: https://llvm.org/svn/llvm-project/llvm/trunk@101465 91177308-0d34-0410-b5e6-96231b3b80d8
diff --git a/lib/Transforms/Utils/AddrModeMatcher.cpp b/lib/Transforms/Utils/AddrModeMatcher.cpp
index ea9d1c1..474b5fd 100644
--- a/lib/Transforms/Utils/AddrModeMatcher.cpp
+++ b/lib/Transforms/Utils/AddrModeMatcher.cpp
@@ -382,7 +382,7 @@
std::vector<InlineAsm::ConstraintInfo>
Constraints = IA->ParseConstraints();
- unsigned ArgNo = 1; // ArgNo - The operand of the CallInst.
+ unsigned ArgNo = 0; // ArgNo - The operand of the CallInst.
for (unsigned i = 0, e = Constraints.size(); i != e; ++i) {
TargetLowering::AsmOperandInfo OpInfo(Constraints[i]);
@@ -450,7 +450,7 @@
if (CallInst *CI = dyn_cast<CallInst>(U)) {
InlineAsm *IA = dyn_cast<InlineAsm>(CI->getCalledValue());
- if (IA == 0) return true;
+ if (!IA) return true;
// If this is a memory operand, we're cool, otherwise bail out.
if (!IsOperandAMemoryOperand(CI, IA, I, TLI))