Support for CONST_STRING in optimizing compiler.

Change-Id: Iab8517bdadd1d15ffbe570010f093660be7c51aa
diff --git a/compiler/optimizing/builder.cc b/compiler/optimizing/builder.cc
index e4ccd96..434d9ef 100644
--- a/compiler/optimizing/builder.cc
+++ b/compiler/optimizing/builder.cc
@@ -1173,6 +1173,18 @@
       break;
     }
 
+    case Instruction::CONST_STRING: {
+      current_block_->AddInstruction(new (arena_) HLoadString(instruction.VRegB_21c(), dex_offset));
+      UpdateLocal(instruction.VRegA_21c(), current_block_->GetLastInstruction());
+      break;
+    }
+
+    case Instruction::CONST_STRING_JUMBO: {
+      current_block_->AddInstruction(new (arena_) HLoadString(instruction.VRegB_31c(), dex_offset));
+      UpdateLocal(instruction.VRegA_31c(), current_block_->GetLastInstruction());
+      break;
+    }
+
     default:
       return false;
   }
diff --git a/compiler/optimizing/code_generator_arm.cc b/compiler/optimizing/code_generator_arm.cc
index 0f14436..9297475 100644
--- a/compiler/optimizing/code_generator_arm.cc
+++ b/compiler/optimizing/code_generator_arm.cc
@@ -196,6 +196,37 @@
   DISALLOW_COPY_AND_ASSIGN(ClinitCheckSlowPathARM);
 };
 
+class LoadStringSlowPathARM : public SlowPathCodeARM {
+ public:
+  explicit LoadStringSlowPathARM(HLoadString* instruction) : instruction_(instruction) {}
+
+  virtual void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
+    LocationSummary* locations = instruction_->GetLocations();
+    DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
+
+    CodeGeneratorARM* arm_codegen = down_cast<CodeGeneratorARM*>(codegen);
+    __ Bind(GetEntryLabel());
+    codegen->SaveLiveRegisters(locations);
+
+    InvokeRuntimeCallingConvention calling_convention;
+    arm_codegen->LoadCurrentMethod(calling_convention.GetRegisterAt(0));
+    __ LoadImmediate(calling_convention.GetRegisterAt(1), instruction_->GetStringIndex());
+    arm_codegen->InvokeRuntime(
+        QUICK_ENTRY_POINT(pResolveString), instruction_, instruction_->GetDexPc());
+    arm_codegen->Move32(locations->Out(), Location::RegisterLocation(R0));
+
+    codegen->RestoreLiveRegisters(locations);
+    __ b(GetExitLabel());
+  }
+
+ private:
+  HLoadString* const instruction_;
+
+  DISALLOW_COPY_AND_ASSIGN(LoadStringSlowPathARM);
+};
+
+#undef __
+
 #undef __
 #define __ reinterpret_cast<ArmAssembler*>(GetAssembler())->
 
@@ -2261,5 +2292,25 @@
   }
 }
 
+void LocationsBuilderARM::VisitLoadString(HLoadString* load) {
+  LocationSummary* locations =
+      new (GetGraph()->GetArena()) LocationSummary(load, LocationSummary::kCallOnSlowPath);
+  locations->SetOut(Location::RequiresRegister());
+}
+
+void InstructionCodeGeneratorARM::VisitLoadString(HLoadString* load) {
+  SlowPathCodeARM* slow_path = new (GetGraph()->GetArena()) LoadStringSlowPathARM(load);
+  codegen_->AddSlowPath(slow_path);
+
+  Register out = load->GetLocations()->Out().As<Register>();
+  codegen_->LoadCurrentMethod(out);
+  __ LoadFromOffset(
+      kLoadWord, out, out, mirror::ArtMethod::DexCacheStringsOffset().Int32Value());
+  __ LoadFromOffset(kLoadWord, out, out, CodeGenerator::GetCacheOffset(load->GetStringIndex()));
+  __ cmp(out, ShifterOperand(0));
+  __ b(slow_path->GetEntryLabel(), EQ);
+  __ Bind(slow_path->GetExitLabel());
+}
+
 }  // namespace arm
 }  // namespace art
diff --git a/compiler/optimizing/code_generator_arm64.cc b/compiler/optimizing/code_generator_arm64.cc
index 5d504c6..ec9af73 100644
--- a/compiler/optimizing/code_generator_arm64.cc
+++ b/compiler/optimizing/code_generator_arm64.cc
@@ -539,6 +539,7 @@
   M(Div)                                                   \
   M(FloatConstant)                                         \
   M(LoadClass)                                             \
+  M(LoadString)                                            \
   M(Neg)                                                   \
   M(NewArray)                                              \
   M(ParallelMove)                                          \
diff --git a/compiler/optimizing/code_generator_x86.cc b/compiler/optimizing/code_generator_x86.cc
index 2d6d14f..d41d5a0 100644
--- a/compiler/optimizing/code_generator_x86.cc
+++ b/compiler/optimizing/code_generator_x86.cc
@@ -183,6 +183,35 @@
   DISALLOW_COPY_AND_ASSIGN(ClinitCheckSlowPathX86);
 };
 
+class LoadStringSlowPathX86 : public SlowPathCodeX86 {
+ public:
+  explicit LoadStringSlowPathX86(HLoadString* instruction) : instruction_(instruction) {}
+
+  virtual void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
+    LocationSummary* locations = instruction_->GetLocations();
+    DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
+
+    CodeGeneratorX86* x86_codegen = down_cast<CodeGeneratorX86*>(codegen);
+    __ Bind(GetEntryLabel());
+    codegen->SaveLiveRegisters(locations);
+
+    InvokeRuntimeCallingConvention calling_convention;
+    x86_codegen->LoadCurrentMethod(calling_convention.GetRegisterAt(0));
+    __ movl(calling_convention.GetRegisterAt(1), Immediate(instruction_->GetStringIndex()));
+    __ fs()->call(Address::Absolute(QUICK_ENTRYPOINT_OFFSET(kX86WordSize, pResolveString)));
+    codegen->RecordPcInfo(instruction_, instruction_->GetDexPc());
+    x86_codegen->Move32(locations->Out(), Location::RegisterLocation(EAX));
+    codegen->RestoreLiveRegisters(locations);
+
+    __ jmp(GetExitLabel());
+  }
+
+ private:
+  HLoadString* const instruction_;
+
+  DISALLOW_COPY_AND_ASSIGN(LoadStringSlowPathX86);
+};
+
 #undef __
 #define __ reinterpret_cast<X86Assembler*>(GetAssembler())->
 
@@ -2321,5 +2350,24 @@
   }
 }
 
+void LocationsBuilderX86::VisitLoadString(HLoadString* load) {
+  LocationSummary* locations =
+      new (GetGraph()->GetArena()) LocationSummary(load, LocationSummary::kCallOnSlowPath);
+  locations->SetOut(Location::RequiresRegister());
+}
+
+void InstructionCodeGeneratorX86::VisitLoadString(HLoadString* load) {
+  SlowPathCodeX86* slow_path = new (GetGraph()->GetArena()) LoadStringSlowPathX86(load);
+  codegen_->AddSlowPath(slow_path);
+
+  Register out = load->GetLocations()->Out().As<Register>();
+  codegen_->LoadCurrentMethod(out);
+  __ movl(out, Address(out, mirror::ArtMethod::DexCacheStringsOffset().Int32Value()));
+  __ movl(out, Address(out, CodeGenerator::GetCacheOffset(load->GetStringIndex())));
+  __ testl(out, out);
+  __ j(kEqual, slow_path->GetEntryLabel());
+  __ Bind(slow_path->GetExitLabel());
+}
+
 }  // namespace x86
 }  // namespace art
diff --git a/compiler/optimizing/code_generator_x86_64.cc b/compiler/optimizing/code_generator_x86_64.cc
index e8d34e3..bda3520 100644
--- a/compiler/optimizing/code_generator_x86_64.cc
+++ b/compiler/optimizing/code_generator_x86_64.cc
@@ -196,6 +196,36 @@
   DISALLOW_COPY_AND_ASSIGN(ClinitCheckSlowPathX86_64);
 };
 
+class LoadStringSlowPathX86_64 : public SlowPathCodeX86_64 {
+ public:
+  explicit LoadStringSlowPathX86_64(HLoadString* instruction) : instruction_(instruction) {}
+
+  virtual void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
+    LocationSummary* locations = instruction_->GetLocations();
+    DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
+
+    CodeGeneratorX86_64* x64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
+    __ Bind(GetEntryLabel());
+    codegen->SaveLiveRegisters(locations);
+
+    InvokeRuntimeCallingConvention calling_convention;
+    x64_codegen->LoadCurrentMethod(CpuRegister(calling_convention.GetRegisterAt(0)));
+    __ movl(CpuRegister(calling_convention.GetRegisterAt(1)),
+            Immediate(instruction_->GetStringIndex()));
+    __ gs()->call(Address::Absolute(
+        QUICK_ENTRYPOINT_OFFSET(kX86_64WordSize, pResolveString), true));
+    codegen->RecordPcInfo(instruction_, instruction_->GetDexPc());
+    x64_codegen->Move(locations->Out(), Location::RegisterLocation(RAX));
+    codegen->RestoreLiveRegisters(locations);
+    __ jmp(GetExitLabel());
+  }
+
+ private:
+  HLoadString* const instruction_;
+
+  DISALLOW_COPY_AND_ASSIGN(LoadStringSlowPathX86_64);
+};
+
 #undef __
 #define __ reinterpret_cast<X86_64Assembler*>(GetAssembler())->
 
@@ -2270,5 +2300,24 @@
   }
 }
 
+void LocationsBuilderX86_64::VisitLoadString(HLoadString* load) {
+  LocationSummary* locations =
+      new (GetGraph()->GetArena()) LocationSummary(load, LocationSummary::kCallOnSlowPath);
+  locations->SetOut(Location::RequiresRegister());
+}
+
+void InstructionCodeGeneratorX86_64::VisitLoadString(HLoadString* load) {
+  SlowPathCodeX86_64* slow_path = new (GetGraph()->GetArena()) LoadStringSlowPathX86_64(load);
+  codegen_->AddSlowPath(slow_path);
+
+  CpuRegister out = load->GetLocations()->Out().As<CpuRegister>();
+  codegen_->LoadCurrentMethod(CpuRegister(out));
+  __ movl(out, Address(out, mirror::ArtMethod::DexCacheStringsOffset().Int32Value()));
+  __ movl(out, Address(out, CodeGenerator::GetCacheOffset(load->GetStringIndex())));
+  __ testl(out, out);
+  __ j(kEqual, slow_path->GetEntryLabel());
+  __ Bind(slow_path->GetExitLabel());
+}
+
 }  // namespace x86_64
 }  // namespace art
diff --git a/compiler/optimizing/nodes.h b/compiler/optimizing/nodes.h
index 86c36b8..33bfe19 100644
--- a/compiler/optimizing/nodes.h
+++ b/compiler/optimizing/nodes.h
@@ -489,10 +489,11 @@
   M(IntConstant, Constant)                                              \
   M(InvokeStatic, Invoke)                                               \
   M(InvokeVirtual, Invoke)                                              \
-  M(LoadClass, Instruction)                                             \
   M(LessThan, Condition)                                                \
   M(LessThanOrEqual, Condition)                                         \
+  M(LoadClass, Instruction)                                             \
   M(LoadLocal, Instruction)                                             \
+  M(LoadString, Instruction)                                            \
   M(Local, Instruction)                                                 \
   M(LongConstant, Constant)                                             \
   M(Mul, BinaryOperation)                                               \
@@ -2022,7 +2023,8 @@
 };
 
 // TODO: Make this class handle the case the load is null (dex cache
-// is null).
+// is null). This will be required when using it for other things than
+// initialization check.
 /**
  * Instruction to load a Class object.
  */
@@ -2064,6 +2066,34 @@
   DISALLOW_COPY_AND_ASSIGN(HLoadClass);
 };
 
+class HLoadString : public HExpression<0> {
+ public:
+  HLoadString(uint32_t string_index, uint32_t dex_pc)
+      : HExpression(Primitive::kPrimNot, SideEffects::None()),
+        string_index_(string_index),
+        dex_pc_(dex_pc) {}
+
+  bool InstructionDataEquals(HInstruction* other) const OVERRIDE {
+    return other->AsLoadString()->string_index_ == string_index_;
+  }
+
+  size_t ComputeHashCode() const OVERRIDE { return string_index_; }
+
+  uint32_t GetDexPc() const { return dex_pc_; }
+  uint32_t GetStringIndex() const { return string_index_; }
+
+  // TODO: Can we deopt or debug when we resolve a string?
+  bool NeedsEnvironment() const OVERRIDE { return false; }
+
+  DECLARE_INSTRUCTION(LoadString);
+
+ private:
+  const uint32_t string_index_;
+  const uint32_t dex_pc_;
+
+  DISALLOW_COPY_AND_ASSIGN(HLoadString);
+};
+
 // TODO: Pass this check to HInvokeStatic nodes.
 /**
  * Performs an initialization check on its Class object input.
diff --git a/compiler/optimizing/optimizing_compiler.cc b/compiler/optimizing/optimizing_compiler.cc
index 5350dcb..d3fe1c4 100644
--- a/compiler/optimizing/optimizing_compiler.cc
+++ b/compiler/optimizing/optimizing_compiler.cc
@@ -226,6 +226,10 @@
     return nullptr;
   }
 
+  if (Compiler::IsPathologicalCase(*code_item, method_idx, dex_file)) {
+    return nullptr;
+  }
+
   DexCompilationUnit dex_compilation_unit(
     nullptr, class_loader, art::Runtime::Current()->GetClassLinker(), dex_file, code_item,
     class_def_idx, method_idx, access_flags,
diff --git a/compiler/optimizing/register_allocator.cc b/compiler/optimizing/register_allocator.cc
index f95c4a4..497e9b9 100644
--- a/compiler/optimizing/register_allocator.cc
+++ b/compiler/optimizing/register_allocator.cc
@@ -266,15 +266,17 @@
     size_t first_register_use = current->FirstRegisterUse();
     if (first_register_use != kNoLifetime) {
       LiveInterval* split = Split(current, first_register_use - 1);
-      // Don't add direclty to `unhandled`, it needs to be sorted and the start
+      // Don't add directly to `unhandled`, it needs to be sorted and the start
       // of this new interval might be after intervals already in the list.
       AddSorted(&unhandled, split);
     } else {
       // Nothing to do, we won't allocate a register for this value.
     }
   } else {
-    DCHECK(unhandled.IsEmpty() || current->StartsBeforeOrAt(unhandled.Peek()));
-    unhandled.Add(current);
+    // Don't add directly to `unhandled`, temp or safepoint intervals
+    // for this instruction may have been added, and those can be
+    // processed first.
+    AddSorted(&unhandled, current);
   }
 }
 
@@ -973,7 +975,14 @@
       HInstruction* safepoint = safepoints_.Get(i);
       size_t position = safepoint->GetLifetimePosition();
       LocationSummary* locations = safepoint->GetLocations();
-      if (!current->Covers(position)) continue;
+      if (!current->Covers(position)) {
+        continue;
+      }
+      if (interval->GetStart() == position) {
+        // The safepoint is for this instruction, so the location of the instruction
+        // does not need to be saved.
+        continue;
+      }
 
       if ((current->GetType() == Primitive::kPrimNot) && current->GetParent()->HasSpillSlot()) {
         locations->SetStackBit(current->GetParent()->GetSpillSlot() / kVRegSize);