Fix the last users of TARGET_CPU_SMP.

Everyone else assumes SMP.

Change-Id: I7ff7faef46fbec6c67d6e446812d599e473cba39
diff --git a/compiler/dex/quick/arm/int_arm.cc b/compiler/dex/quick/arm/int_arm.cc
index 57544b5..cab039b 100644
--- a/compiler/dex/quick/arm/int_arm.cc
+++ b/compiler/dex/quick/arm/int_arm.cc
@@ -16,6 +16,7 @@
 
 /* This file contains codegen for the Thumb2 ISA. */
 
+#include "arch/instruction_set_features.h"
 #include "arm_lir.h"
 #include "codegen_arm.h"
 #include "dex/quick/mir_to_lir-inl.h"
@@ -1119,7 +1120,9 @@
 }
 
 bool ArmMir2Lir::GenMemBarrier(MemBarrierKind barrier_kind) {
-#if ANDROID_SMP != 0
+  if (!cu_->GetInstructionSetFeatures()->IsSmp()) {
+    return false;
+  }
   // Start off with using the last LIR as the barrier. If it is not enough, then we will generate one.
   LIR* barrier = last_lir_insn_;
 
@@ -1149,9 +1152,6 @@
   DCHECK(!barrier->flags.use_def_invalid);
   barrier->u.m.def_mask = &kEncodeAll;
   return ret;
-#else
-  return false;
-#endif
 }
 
 void ArmMir2Lir::GenNegLong(RegLocation rl_dest, RegLocation rl_src) {
diff --git a/compiler/dex/quick/arm64/int_arm64.cc b/compiler/dex/quick/arm64/int_arm64.cc
index dfdb76b..0e00698 100644
--- a/compiler/dex/quick/arm64/int_arm64.cc
+++ b/compiler/dex/quick/arm64/int_arm64.cc
@@ -16,6 +16,7 @@
 
 /* This file contains codegen for the Thumb2 ISA. */
 
+#include "arch/instruction_set_features.h"
 #include "arm64_lir.h"
 #include "codegen_arm64.h"
 #include "dex/quick/mir_to_lir-inl.h"
@@ -978,7 +979,9 @@
 }
 
 bool Arm64Mir2Lir::GenMemBarrier(MemBarrierKind barrier_kind) {
-#if ANDROID_SMP != 0
+  if (!cu_->GetInstructionSetFeatures()->IsSmp()) {
+    return false;
+  }
   // Start off with using the last LIR as the barrier. If it is not enough, then we will generate one.
   LIR* barrier = last_lir_insn_;
 
@@ -1014,9 +1017,6 @@
   DCHECK(!barrier->flags.use_def_invalid);
   barrier->u.m.def_mask = &kEncodeAll;
   return ret;
-#else
-  return false;
-#endif
 }
 
 void Arm64Mir2Lir::GenIntToLong(RegLocation rl_dest, RegLocation rl_src) {
diff --git a/compiler/dex/quick/x86/target_x86.cc b/compiler/dex/quick/x86/target_x86.cc
index f5f7113..ead31b3 100755
--- a/compiler/dex/quick/x86/target_x86.cc
+++ b/compiler/dex/quick/x86/target_x86.cc
@@ -18,6 +18,7 @@
 #include <inttypes.h>
 #include <string>
 
+#include "arch/instruction_set_features.h"
 #include "backend_x86.h"
 #include "codegen_x86.h"
 #include "dex/compiler_internals.h"
@@ -594,7 +595,9 @@
 }
 
 bool X86Mir2Lir::GenMemBarrier(MemBarrierKind barrier_kind) {
-#if ANDROID_SMP != 0
+  if (!cu_->GetInstructionSetFeatures()->IsSmp()) {
+    return false;
+  }
   // Start off with using the last LIR as the barrier. If it is not enough, then we will update it.
   LIR* mem_barrier = last_lir_insn_;
 
@@ -630,9 +633,6 @@
     mem_barrier->u.m.def_mask = &kEncodeAll;
   }
   return ret;
-#else
-  return false;
-#endif
 }
 
 void X86Mir2Lir::CompilerInitializeRegAlloc() {
diff --git a/compiler/llvm/ir_builder.h b/compiler/llvm/ir_builder.h
index 03498ef..990ba02 100644
--- a/compiler/llvm/ir_builder.h
+++ b/compiler/llvm/ir_builder.h
@@ -101,10 +101,8 @@
   // Extend memory barrier
   //--------------------------------------------------------------------------
   void CreateMemoryBarrier(MemBarrierKind barrier_kind) {
-#if ANDROID_SMP
     // TODO: select atomic ordering according to given barrier kind.
     CreateFence(::llvm::SequentiallyConsistent);
-#endif
   }
 
   //--------------------------------------------------------------------------
diff --git a/compiler/utils/arm/assembler_arm32.cc b/compiler/utils/arm/assembler_arm32.cc
index 39ebf68..a1594b0 100644
--- a/compiler/utils/arm/assembler_arm32.cc
+++ b/compiler/utils/arm/assembler_arm32.cc
@@ -1513,10 +1513,8 @@
 
 
 void Arm32Assembler::dmb(DmbOptions flavor) {
-#if ANDROID_SMP != 0
   int32_t encoding = 0xf57ff05f;  // dmb
   Emit(encoding | flavor);
-#endif
 }
 
 
diff --git a/compiler/utils/arm/assembler_thumb2.cc b/compiler/utils/arm/assembler_thumb2.cc
index 3ab9b2b..a349209 100644
--- a/compiler/utils/arm/assembler_thumb2.cc
+++ b/compiler/utils/arm/assembler_thumb2.cc
@@ -2599,10 +2599,8 @@
 
 
 void Thumb2Assembler::dmb(DmbOptions flavor) {
-#if ANDROID_SMP != 0
   int32_t encoding = 0xf3bf8f50;  // dmb in T1 encoding.
   Emit32(encoding | flavor);
-#endif
 }
 
 
diff --git a/compiler/utils/arm64/assembler_arm64.cc b/compiler/utils/arm64/assembler_arm64.cc
index 02011b8..390f2ea 100644
--- a/compiler/utils/arm64/assembler_arm64.cc
+++ b/compiler/utils/arm64/assembler_arm64.cc
@@ -476,9 +476,7 @@
 
 void Arm64Assembler::MemoryBarrier(ManagedRegister m_scratch ATTRIBUTE_UNUSED) {
   // TODO: Should we check that m_scratch is IP? - see arm.
-#if ANDROID_SMP != 0
   ___ Dmb(vixl::InnerShareable, vixl::BarrierAll);
-#endif
 }
 
 void Arm64Assembler::SignExtend(ManagedRegister mreg, size_t size) {
diff --git a/compiler/utils/x86/assembler_x86.cc b/compiler/utils/x86/assembler_x86.cc
index 8ebb40e..afa4a3b 100644
--- a/compiler/utils/x86/assembler_x86.cc
+++ b/compiler/utils/x86/assembler_x86.cc
@@ -1830,9 +1830,7 @@
 }
 
 void X86Assembler::MemoryBarrier(ManagedRegister) {
-#if ANDROID_SMP != 0
   mfence();
-#endif
 }
 
 void X86Assembler::CreateHandleScopeEntry(ManagedRegister mout_reg,
diff --git a/compiler/utils/x86_64/assembler_x86_64.cc b/compiler/utils/x86_64/assembler_x86_64.cc
index 2bb2ed8..8c428f4 100644
--- a/compiler/utils/x86_64/assembler_x86_64.cc
+++ b/compiler/utils/x86_64/assembler_x86_64.cc
@@ -2371,9 +2371,7 @@
 }
 
 void X86_64Assembler::MemoryBarrier(ManagedRegister) {
-#if ANDROID_SMP != 0
   mfence();
-#endif
 }
 
 void X86_64Assembler::CreateHandleScopeEntry(ManagedRegister mout_reg,