Implement heap poisoning in ART's Optimizing compiler.
- Instrument ARM, ARM64, x86 and x86-64 code generators.
- Note: To turn heap poisoning on in Optimizing, set the
environment variable `ART_HEAP_POISONING' to "true"
before compiling ART.
Bug: 12687968
Change-Id: Ib3120b38cf805a8a50207a314b9ccc90c8d93740
diff --git a/compiler/optimizing/intrinsics_arm64.cc b/compiler/optimizing/intrinsics_arm64.cc
index 8bcb88b..78ac167 100644
--- a/compiler/optimizing/intrinsics_arm64.cc
+++ b/compiler/optimizing/intrinsics_arm64.cc
@@ -683,6 +683,11 @@
} else {
codegen->Load(type, trg, mem_op);
}
+
+ if (type == Primitive::kPrimNot) {
+ DCHECK(trg.IsW());
+ codegen->GetAssembler()->MaybeUnpoisonHeapReference(trg);
+ }
}
static void CreateIntIntIntToIntLocations(ArenaAllocator* arena, HInvoke* invoke) {
@@ -781,22 +786,37 @@
Register base = WRegisterFrom(locations->InAt(1)); // Object pointer.
Register offset = XRegisterFrom(locations->InAt(2)); // Long offset.
Register value = RegisterFrom(locations->InAt(3), type);
+ Register source = value;
bool use_acquire_release = codegen->GetInstructionSetFeatures().PreferAcquireRelease();
MemOperand mem_op(base.X(), offset);
- if (is_volatile || is_ordered) {
- if (use_acquire_release) {
- codegen->StoreRelease(type, value, mem_op);
- } else {
- __ Dmb(InnerShareable, BarrierAll);
- codegen->Store(type, value, mem_op);
- if (is_volatile) {
- __ Dmb(InnerShareable, BarrierReads);
- }
+ {
+ // We use a block to end the scratch scope before the write barrier, thus
+ // freeing the temporary registers so they can be used in `MarkGCCard`.
+ UseScratchRegisterScope temps(masm);
+
+ if (kPoisonHeapReferences && type == Primitive::kPrimNot) {
+ DCHECK(value.IsW());
+ Register temp = temps.AcquireW();
+ __ Mov(temp.W(), value.W());
+ codegen->GetAssembler()->PoisonHeapReference(temp.W());
+ source = temp;
}
- } else {
- codegen->Store(type, value, mem_op);
+
+ if (is_volatile || is_ordered) {
+ if (use_acquire_release) {
+ codegen->StoreRelease(type, source, mem_op);
+ } else {
+ __ Dmb(InnerShareable, BarrierAll);
+ codegen->Store(type, source, mem_op);
+ if (is_volatile) {
+ __ Dmb(InnerShareable, BarrierReads);
+ }
+ }
+ } else {
+ codegen->Store(type, source, mem_op);
+ }
}
if (type == Primitive::kPrimNot) {
@@ -872,6 +892,11 @@
__ Add(tmp_ptr, base.X(), Operand(offset));
+ if (kPoisonHeapReferences && type == Primitive::kPrimNot) {
+ codegen->GetAssembler()->PoisonHeapReference(expected);
+ codegen->GetAssembler()->PoisonHeapReference(value);
+ }
+
// do {
// tmp_value = [tmp_ptr] - expected;
// } while (tmp_value == 0 && failure([tmp_ptr] <- r_new_value));
@@ -897,6 +922,11 @@
}
__ Bind(&exit_loop);
__ Cset(out, eq);
+
+ if (kPoisonHeapReferences && type == Primitive::kPrimNot) {
+ codegen->GetAssembler()->UnpoisonHeapReference(value);
+ codegen->GetAssembler()->UnpoisonHeapReference(expected);
+ }
}
void IntrinsicLocationsBuilderARM64::VisitUnsafeCASInt(HInvoke* invoke) {
@@ -1173,5 +1203,9 @@
UNIMPLEMENTED_INTRINSIC(ReferenceGetReferent)
UNIMPLEMENTED_INTRINSIC(StringGetCharsNoCheck)
+#undef UNIMPLEMENTED_INTRINSIC
+
+#undef __
+
} // namespace arm64
} // namespace art