ART: Reference.getReferent intrinsic for arm and arm64
Test: m test-art-host
Test: m test-art-target
Test: export ART_HEAP_POISONING=true; m test-art-host
Test: export ART_HEAP_POISONING=true; m test-art-target
Bug: 32535355
Change-Id: Ie63317689dd9e03a24e701c30411f8014970173a
diff --git a/compiler/optimizing/code_generator_arm.cc b/compiler/optimizing/code_generator_arm.cc
index 8a7f6d3..e469b1d 100644
--- a/compiler/optimizing/code_generator_arm.cc
+++ b/compiler/optimizing/code_generator_arm.cc
@@ -7166,8 +7166,7 @@
// save one load. However, since this is just an intrinsic slow path we prefer this
// simple and more robust approach rather that trying to determine if that's the case.
SlowPathCode* slow_path = GetCurrentSlowPath();
- DCHECK(slow_path != nullptr); // For intrinsified invokes the call is emitted on the slow path.
- if (slow_path->IsCoreRegisterSaved(location.AsRegister<Register>())) {
+ if (slow_path != nullptr && slow_path->IsCoreRegisterSaved(location.AsRegister<Register>())) {
int stack_offset = slow_path->GetStackOffsetOfCoreRegister(location.AsRegister<Register>());
__ LoadFromOffset(kLoadWord, temp, SP, stack_offset);
return temp;
@@ -7175,7 +7174,8 @@
return location.AsRegister<Register>();
}
-void CodeGeneratorARM::GenerateStaticOrDirectCall(HInvokeStaticOrDirect* invoke, Location temp) {
+Location CodeGeneratorARM::GenerateCalleeMethodStaticOrDirectCall(HInvokeStaticOrDirect* invoke,
+ Location temp) {
Location callee_method = temp; // For all kinds except kRecursive, callee will be in temp.
switch (invoke->GetMethodLoadKind()) {
case HInvokeStaticOrDirect::MethodLoadKind::kStringInit: {
@@ -7224,6 +7224,11 @@
break;
}
}
+ return callee_method;
+}
+
+void CodeGeneratorARM::GenerateStaticOrDirectCall(HInvokeStaticOrDirect* invoke, Location temp) {
+ Location callee_method = GenerateCalleeMethodStaticOrDirectCall(invoke, temp);
switch (invoke->GetCodePtrLocation()) {
case HInvokeStaticOrDirect::CodePtrLocation::kCallSelf: