Fix the IntegersSubsetTest unit test when compiled with gcc-4.7. The issue here
is that the unit test doesn't have IntTy equal to APInt, instead it uses a class
derived from APInt. When, as in these lines, an IntTy& reference is returned
but is assigned to an APInt&, the compiler destroys the temporary the IntTy& was
referring to, leaving the APInt& referring to garbage. This causes the unittest
to fail systematically on my machine; it can also be caught by running the test
under valgrind.
git-svn-id: https://llvm.org/svn/llvm-project/llvm/trunk@167356 91177308-0d34-0410-b5e6-96231b3b80d8
diff --git a/include/llvm/Support/IntegersSubset.h b/include/llvm/Support/IntegersSubset.h
index bb9e769..03039fd 100644
--- a/include/llvm/Support/IntegersSubset.h
+++ b/include/llvm/Support/IntegersSubset.h
@@ -411,8 +411,8 @@
unsigned getSize() const {
APInt sz(((const APInt&)getItem(0).getLow()).getBitWidth(), 0);
for (unsigned i = 0, e = getNumItems(); i != e; ++i) {
- const APInt &Low = getItem(i).getLow();
- const APInt &High = getItem(i).getHigh();
+ const APInt Low = getItem(i).getLow();
+ const APInt High = getItem(i).getHigh();
APInt S = High - Low + 1;
sz += S;
}
@@ -426,8 +426,8 @@
APInt getSingleValue(unsigned idx) const {
APInt sz(((const APInt&)getItem(0).getLow()).getBitWidth(), 0);
for (unsigned i = 0, e = getNumItems(); i != e; ++i) {
- const APInt &Low = getItem(i).getLow();
- const APInt &High = getItem(i).getHigh();
+ const APInt Low = getItem(i).getLow();
+ const APInt High = getItem(i).getHigh();
APInt S = High - Low + 1;
APInt oldSz = sz;
sz += S;