Merge "Enable ART_BUILD_HOST_STATIC for check builds."
diff --git a/build/Android.gtest.mk b/build/Android.gtest.mk
index 4fc184e..9f6dc9a 100644
--- a/build/Android.gtest.mk
+++ b/build/Android.gtest.mk
@@ -150,6 +150,7 @@
runtime/class_linker_test.cc \
runtime/dex_file_test.cc \
runtime/dex_file_verifier_test.cc \
+ runtime/dex_instruction_test.cc \
runtime/dex_instruction_visitor_test.cc \
runtime/dex_method_iterator_test.cc \
runtime/entrypoints/math_entrypoints_test.cc \
diff --git a/compiler/common_compiler_test.h b/compiler/common_compiler_test.h
index b828fcf..d215662 100644
--- a/compiler/common_compiler_test.h
+++ b/compiler/common_compiler_test.h
@@ -108,6 +108,13 @@
std::list<std::vector<uint8_t>> header_code_and_maps_chunks_;
};
+// TODO: When heap reference poisoning works with all compilers in use, get rid of this.
+#define TEST_DISABLED_FOR_HEAP_REFERENCE_POISONING_WITH_QUICK() \
+ if (kPoisonHeapReferences && GetCompilerKind() == Compiler::kQuick) { \
+ printf("WARNING: TEST DISABLED FOR HEAP REFERENCE POISONING WITH QUICK\n"); \
+ return; \
+ }
+
// TODO: When non-PIC works with all compilers in use, get rid of this.
#define TEST_DISABLED_FOR_NON_PIC_COMPILING_WITH_OPTIMIZING() \
if (GetCompilerKind() == Compiler::kOptimizing) { \
diff --git a/compiler/dex/quick/quick_compiler.cc b/compiler/dex/quick/quick_compiler.cc
index 28c485a..39496a4 100644
--- a/compiler/dex/quick/quick_compiler.cc
+++ b/compiler/dex/quick/quick_compiler.cc
@@ -653,6 +653,12 @@
uint32_t method_idx,
jobject class_loader,
const DexFile& dex_file) const {
+ if (kPoisonHeapReferences) {
+ VLOG(compiler) << "Skipping method : " << PrettyMethod(method_idx, dex_file)
+ << " Reason = Quick does not support heap poisoning.";
+ return nullptr;
+ }
+
// TODO: check method fingerprint here to determine appropriate backend type. Until then, use
// build default.
CompilerDriver* driver = GetCompilerDriver();
diff --git a/compiler/driver/compiler_driver_test.cc b/compiler/driver/compiler_driver_test.cc
index ba03f5a..b358f4f 100644
--- a/compiler/driver/compiler_driver_test.cc
+++ b/compiler/driver/compiler_driver_test.cc
@@ -146,7 +146,7 @@
}
TEST_F(CompilerDriverTest, AbstractMethodErrorStub) {
- TEST_DISABLED_FOR_HEAP_REFERENCE_POISONING();
+ TEST_DISABLED_FOR_HEAP_REFERENCE_POISONING_WITH_QUICK();
jobject class_loader;
{
ScopedObjectAccess soa(Thread::Current());
@@ -192,6 +192,7 @@
};
TEST_F(CompilerDriverMethodsTest, Selection) {
+ TEST_DISABLED_FOR_HEAP_REFERENCE_POISONING_WITH_QUICK();
Thread* self = Thread::Current();
jobject class_loader;
{
diff --git a/compiler/jni/quick/jni_compiler.cc b/compiler/jni/quick/jni_compiler.cc
index 85fd696..953dfcb 100644
--- a/compiler/jni/quick/jni_compiler.cc
+++ b/compiler/jni/quick/jni_compiler.cc
@@ -138,7 +138,7 @@
FrameOffset handle_scope_offset = main_jni_conv->CurrentParamHandleScopeEntryOffset();
// Check handle scope offset is within frame
CHECK_LT(handle_scope_offset.Uint32Value(), frame_size);
- // Note this LoadRef() doesn't need heap poisoning since its from the ArtMethod.
+ // Note this LoadRef() doesn't need heap unpoisoning since it's from the ArtMethod.
// Note this LoadRef() does not include read barrier. It will be handled below.
__ LoadRef(main_jni_conv->InterproceduralScratchRegister(),
mr_conv->MethodRegister(), ArtMethod::DeclaringClassOffset(), false);
diff --git a/compiler/optimizing/builder.cc b/compiler/optimizing/builder.cc
index bde2c70..54155db 100644
--- a/compiler/optimizing/builder.cc
+++ b/compiler/optimizing/builder.cc
@@ -306,6 +306,22 @@
}
}
+void HGraphBuilder::SplitTryBoundaryEdge(HBasicBlock* predecessor,
+ HBasicBlock* successor,
+ HTryBoundary::BoundaryKind kind,
+ const DexFile::CodeItem& code_item,
+ const DexFile::TryItem& try_item) {
+ // Split the edge with a single TryBoundary instruction.
+ HTryBoundary* try_boundary = new (arena_) HTryBoundary(kind);
+ HBasicBlock* try_entry_block = graph_->SplitEdge(predecessor, successor);
+ try_entry_block->AddInstruction(try_boundary);
+
+ // Link the TryBoundary to the handlers of `try_item`.
+ for (CatchHandlerIterator it(code_item, try_item); it.HasNext(); it.Next()) {
+ try_boundary->AddExceptionHandler(FindBlockStartingAt(it.GetHandlerAddress()));
+ }
+}
+
void HGraphBuilder::InsertTryBoundaryBlocks(const DexFile::CodeItem& code_item) {
if (code_item.tries_size_ == 0) {
return;
@@ -326,47 +342,54 @@
continue;
}
- // Find predecessors which are not covered by the same TryItem range. Such
- // edges enter the try block and will have a TryBoundary inserted.
- for (size_t i = 0; i < try_block->GetPredecessors().Size(); ++i) {
- HBasicBlock* predecessor = try_block->GetPredecessors().Get(i);
- HTryBoundary* try_boundary = nullptr;
- if (predecessor->IsSingleTryBoundary()) {
- try_boundary = predecessor->GetLastInstruction()->AsTryBoundary();
- if (try_boundary->GetNormalFlowSuccessor() == try_block
- && try_block->IsFirstIndexOfPredecessor(predecessor, i)) {
+ if (try_block->IsCatchBlock()) {
+ // Catch blocks are always considered an entry point into the TryItem in
+ // order to avoid splitting exceptional edges (they might not have been
+ // created yet). We separate the move-exception (if present) from the
+ // rest of the block and insert a TryBoundary after it, creating a
+ // landing pad for the exceptional edges.
+ HInstruction* first_insn = try_block->GetFirstInstruction();
+ HInstruction* split_position = nullptr;
+ if (first_insn->IsLoadException()) {
+ // Catch block starts with a LoadException. Split the block after the
+ // StoreLocal that must come after the load.
+ DCHECK(first_insn->GetNext()->IsStoreLocal());
+ split_position = first_insn->GetNext()->GetNext();
+ } else {
+ // Catch block does not obtain the exception. Split at the beginning
+ // to create an empty catch block.
+ split_position = first_insn;
+ }
+ DCHECK(split_position != nullptr);
+ HBasicBlock* catch_block = try_block;
+ try_block = catch_block->SplitBefore(split_position);
+ SplitTryBoundaryEdge(catch_block, try_block, HTryBoundary::kEntry, code_item, *try_item);
+ } else {
+ // For non-catch blocks, find predecessors which are not covered by the
+ // same TryItem range. Such edges enter the try block and will have
+ // a TryBoundary inserted.
+ for (size_t i = 0; i < try_block->GetPredecessors().Size(); ++i) {
+ HBasicBlock* predecessor = try_block->GetPredecessors().Get(i);
+ if (predecessor->IsSingleTryBoundary()) {
// The edge was already split because of an exit from a neighbouring
- // TryItem and `predecessor` is the block with a TryBoundary created
- // between the two original blocks. We do not split the edge again.
- DCHECK(!IsBlockInPcRange(predecessor->GetSinglePredecessor(), try_start, try_end));
- DCHECK(try_boundary->IsTryExit());
- DCHECK(!try_boundary->IsTryEntry());
- try_boundary->SetIsTryEntry();
+ // TryItem. We split it again and insert an entry point.
+ if (kIsDebugBuild) {
+ HTryBoundary* last_insn = predecessor->GetLastInstruction()->AsTryBoundary();
+ DCHECK(!last_insn->IsEntry());
+ DCHECK_EQ(last_insn->GetNormalFlowSuccessor(), try_block);
+ DCHECK(try_block->IsFirstIndexOfPredecessor(predecessor, i));
+ DCHECK(!IsBlockInPcRange(predecessor->GetSinglePredecessor(), try_start, try_end));
+ }
+ } else if (!IsBlockInPcRange(predecessor, try_start, try_end)) {
+ // This is an entry point into the TryItem and the edge has not been
+ // split yet. That means that `predecessor` is not in a TryItem, or
+ // it is in a different TryItem and we happened to iterate over this
+ // block first. We split the edge and insert an entry point.
} else {
- // This is an edge between a previously created TryBoundary and its
- // handler. We skip it because it is exceptional flow.
- DCHECK(try_block->IsCatchBlock());
- DCHECK(try_boundary->HasExceptionHandler(try_block));
+ // Not an edge on the boundary of the try block.
continue;
}
- } else if (!IsBlockInPcRange(predecessor, try_start, try_end)) {
- // This is an entry point into the TryItem and the edge has not been
- // split yet. That means that either `predecessor` is not in a TryItem,
- // or it is in a different TryItem and we happened to iterate over
- // this block first. We split the edge and `predecessor` may add its
- // own exception handlers later.
- try_boundary = new (arena_) HTryBoundary(/* is_entry */ true, /* is_exit */ false);
- HBasicBlock* try_entry_block = graph_->SplitEdge(predecessor, try_block);
- try_entry_block->AddInstruction(try_boundary);
- } else {
- // Not an edge on the boundary of the try block.
- continue;
- }
- DCHECK(try_boundary != nullptr);
-
- // Link the TryBoundary block to the handlers of this TryItem.
- for (CatchHandlerIterator it(code_item, *try_item); it.HasNext(); it.Next()) {
- try_boundary->AddExceptionHandler(FindBlockStartingAt(it.GetHandlerAddress()));
+ SplitTryBoundaryEdge(predecessor, try_block, HTryBoundary::kEntry, code_item, *try_item);
}
}
@@ -374,45 +397,37 @@
// edges exit the try block and will have a TryBoundary inserted.
for (size_t i = 0; i < try_block->GetSuccessors().Size(); ++i) {
HBasicBlock* successor = try_block->GetSuccessors().Get(i);
- HTryBoundary* try_boundary = nullptr;
- if (successor->IsSingleTryBoundary()) {
+ if (successor->IsCatchBlock()) {
+ // A catch block is always considered an entry point into its TryItem.
+ // We therefore assume this is an exit point, regardless of whether
+ // the catch block is in a different TryItem or not.
+ } else if (successor->IsSingleTryBoundary()) {
// The edge was already split because of an entry into a neighbouring
- // TryItem. We do not split the edge again.
- try_boundary = successor->GetLastInstruction()->AsTryBoundary();
- DCHECK_EQ(try_block, successor->GetSinglePredecessor());
- DCHECK(try_boundary->IsTryEntry());
- DCHECK(!try_boundary->IsTryExit());
- DCHECK(!IsBlockInPcRange(try_boundary->GetNormalFlowSuccessor(), try_start, try_end));
- try_boundary->SetIsTryExit();
+ // TryItem. We split it again and insert an exit.
+ if (kIsDebugBuild) {
+ HTryBoundary* last_insn = successor->GetLastInstruction()->AsTryBoundary();
+ DCHECK_EQ(try_block, successor->GetSinglePredecessor());
+ DCHECK(last_insn->IsEntry());
+ DCHECK(!IsBlockInPcRange(last_insn->GetNormalFlowSuccessor(), try_start, try_end));
+ }
} else if (!IsBlockInPcRange(successor, try_start, try_end)) {
// This is an exit out of the TryItem and the edge has not been split
// yet. That means that either `successor` is not in a TryItem, or it
// is in a different TryItem and we happened to iterate over this
- // block first. We split the edge and `successor` may add its own
- // exception handlers later.
+ // block first. We split the edge and insert an exit.
HInstruction* last_instruction = try_block->GetLastInstruction();
if (last_instruction->IsReturn() || last_instruction->IsReturnVoid()) {
DCHECK_EQ(successor, exit_block_);
// Control flow exits the try block with a Return(Void). Because
// splitting the edge would invalidate the invariant that Return
// always jumps to Exit, we move the Return outside the try block.
- HBasicBlock* return_block = try_block->SplitBefore(last_instruction);
- graph_->AddBlock(return_block);
- successor = return_block;
+ successor = try_block->SplitBefore(last_instruction);
}
- try_boundary = new (arena_) HTryBoundary(/* is_entry */ false, /* is_exit */ true);
- HBasicBlock* try_exit_block = graph_->SplitEdge(try_block, successor);
- try_exit_block->AddInstruction(try_boundary);
} else {
// Not an edge on the boundary of the try block.
continue;
}
- DCHECK(try_boundary != nullptr);
-
- // Link the TryBoundary block to the handlers of this TryItem.
- for (CatchHandlerIterator it(code_item, *try_item); it.HasNext(); it.Next()) {
- try_boundary->AddExceptionHandler(FindBlockStartingAt(it.GetHandlerAddress()));
- }
+ SplitTryBoundaryEdge(try_block, successor, HTryBoundary::kExit, code_item, *try_item);
}
}
}
diff --git a/compiler/optimizing/builder.h b/compiler/optimizing/builder.h
index 58d85e9..cae762b 100644
--- a/compiler/optimizing/builder.h
+++ b/compiler/optimizing/builder.h
@@ -97,10 +97,26 @@
void MaybeUpdateCurrentBlock(size_t dex_pc);
HBasicBlock* FindBlockStartingAt(int32_t dex_pc) const;
HBasicBlock* FindOrCreateBlockStartingAt(int32_t dex_pc);
+
+ // Returns whether the dex_pc of `block` lies within the given range.
bool IsBlockInPcRange(HBasicBlock* block, uint32_t dex_pc_start, uint32_t dex_pc_end);
+
+ // Adds new blocks to `branch_targets_` starting at the limits of TryItems and
+ // their exception handlers.
void CreateBlocksForTryCatch(const DexFile::CodeItem& code_item);
+
+ // Splits edges which cross the boundaries of TryItems, inserts TryBoundary
+ // instructions and links them to the corresponding catch blocks.
void InsertTryBoundaryBlocks(const DexFile::CodeItem& code_item);
+ // Splits a single edge, inserting a TryBoundary of given `kind` and linking
+ // it to exception handlers of `try_item`.
+ void SplitTryBoundaryEdge(HBasicBlock* predecessor,
+ HBasicBlock* successor,
+ HTryBoundary::BoundaryKind kind,
+ const DexFile::CodeItem& code_item,
+ const DexFile::TryItem& try_item);
+
void InitializeLocals(uint16_t count);
HLocal* GetLocalAt(int register_index) const;
void UpdateLocal(int register_index, HInstruction* instruction) const;
diff --git a/compiler/optimizing/code_generator_arm.cc b/compiler/optimizing/code_generator_arm.cc
index ff9373a..e3683ef 100644
--- a/compiler/optimizing/code_generator_arm.cc
+++ b/compiler/optimizing/code_generator_arm.cc
@@ -332,8 +332,6 @@
};
#undef __
-
-#undef __
#define __ down_cast<ArmAssembler*>(GetAssembler())->
inline Condition ARMCondition(IfCondition cond) {
@@ -1030,9 +1028,8 @@
if (!cond->IsCondition() || cond->AsCondition()->NeedsMaterialization()) {
// Condition has been materialized, compare the output to 0
DCHECK(instruction->GetLocations()->InAt(0).IsRegister());
- __ cmp(instruction->GetLocations()->InAt(0).AsRegister<Register>(),
- ShifterOperand(0));
- __ b(true_target, NE);
+ __ CompareAndBranchIfNonZero(instruction->GetLocations()->InAt(0).AsRegister<Register>(),
+ true_target);
} else {
// Condition has not been materialized, use its inputs as the
// comparison and its condition as the branch condition.
@@ -1384,6 +1381,7 @@
DCHECK(receiver.IsRegister());
__ LoadFromOffset(kLoadWord, temp, receiver.AsRegister<Register>(), class_offset);
codegen_->MaybeRecordImplicitNullCheck(invoke);
+ __ MaybeUnpoisonHeapReference(temp);
// temp = temp->GetMethodAt(method_offset);
uint32_t entry_point = ArtMethod::EntryPointFromQuickCompiledCodeOffset(
kArmWordSize).Int32Value();
@@ -1423,6 +1421,7 @@
__ LoadFromOffset(kLoadWord, temp, receiver.AsRegister<Register>(), class_offset);
}
codegen_->MaybeRecordImplicitNullCheck(invoke);
+ __ MaybeUnpoisonHeapReference(temp);
// temp = temp->GetImtEntryAt(method_offset);
uint32_t entry_point = ArtMethod::EntryPointFromQuickCompiledCodeOffset(
kArmWordSize).Int32Value();
@@ -2591,8 +2590,7 @@
switch (instruction->GetType()) {
case Primitive::kPrimInt: {
if (value.IsRegister()) {
- __ cmp(value.AsRegister<Register>(), ShifterOperand(0));
- __ b(slow_path->GetEntryLabel(), EQ);
+ __ CompareAndBranchIfZero(value.AsRegister<Register>(), slow_path->GetEntryLabel());
} else {
DCHECK(value.IsConstant()) << value;
if (value.GetConstant()->AsIntConstant()->GetValue() == 0) {
@@ -2780,6 +2778,8 @@
void InstructionCodeGeneratorARM::VisitNewInstance(HNewInstance* instruction) {
InvokeRuntimeCallingConvention calling_convention;
__ LoadImmediate(calling_convention.GetRegisterAt(0), instruction->GetTypeIndex());
+ // Note: if heap poisoning is enabled, the entry point takes cares
+ // of poisoning the reference.
codegen_->InvokeRuntime(GetThreadOffset<kArmWordSize>(instruction->GetEntrypoint()).Int32Value(),
instruction,
instruction->GetDexPc(),
@@ -2799,6 +2799,8 @@
void InstructionCodeGeneratorARM::VisitNewArray(HNewArray* instruction) {
InvokeRuntimeCallingConvention calling_convention;
__ LoadImmediate(calling_convention.GetRegisterAt(0), instruction->GetTypeIndex());
+ // Note: if heap poisoning is enabled, the entry point takes cares
+ // of poisoning the reference.
codegen_->InvokeRuntime(GetThreadOffset<kArmWordSize>(instruction->GetEntrypoint()).Int32Value(),
instruction,
instruction->GetDexPc(),
@@ -3011,8 +3013,7 @@
__ ldrexd(temp1, temp2, addr);
codegen_->MaybeRecordImplicitNullCheck(instruction);
__ strexd(temp1, value_lo, value_hi, addr);
- __ cmp(temp1, ShifterOperand(0));
- __ b(&fail, NE);
+ __ CompareAndBranchIfNonZero(temp1, &fail);
}
void LocationsBuilderARM::HandleFieldSet(HInstruction* instruction, const FieldInfo& field_info) {
@@ -3033,10 +3034,12 @@
bool generate_volatile = field_info.IsVolatile()
&& is_wide
&& !codegen_->GetInstructionSetFeatures().HasAtomicLdrdAndStrd();
+ bool needs_write_barrier =
+ CodeGenerator::StoreNeedsWriteBarrier(field_type, instruction->InputAt(1));
// Temporary registers for the write barrier.
// TODO: consider renaming StoreNeedsWriteBarrier to StoreNeedsGCMark.
- if (CodeGenerator::StoreNeedsWriteBarrier(field_type, instruction->InputAt(1))) {
- locations->AddTemp(Location::RequiresRegister());
+ if (needs_write_barrier) {
+ locations->AddTemp(Location::RequiresRegister()); // Possibly used for reference poisoning too.
locations->AddTemp(Location::RequiresRegister());
} else if (generate_volatile) {
// Arm encoding have some additional constraints for ldrexd/strexd:
@@ -3069,6 +3072,8 @@
bool atomic_ldrd_strd = codegen_->GetInstructionSetFeatures().HasAtomicLdrdAndStrd();
Primitive::Type field_type = field_info.GetFieldType();
uint32_t offset = field_info.GetFieldOffset().Uint32Value();
+ bool needs_write_barrier =
+ CodeGenerator::StoreNeedsWriteBarrier(field_type, instruction->InputAt(1));
if (is_volatile) {
GenerateMemoryBarrier(MemBarrierKind::kAnyStore);
@@ -3089,7 +3094,18 @@
case Primitive::kPrimInt:
case Primitive::kPrimNot: {
- __ StoreToOffset(kStoreWord, value.AsRegister<Register>(), base, offset);
+ if (kPoisonHeapReferences && needs_write_barrier) {
+ // Note that in the case where `value` is a null reference,
+ // we do not enter this block, as a null reference does not
+ // need poisoning.
+ DCHECK_EQ(field_type, Primitive::kPrimNot);
+ Register temp = locations->GetTemp(0).AsRegister<Register>();
+ __ Mov(temp, value.AsRegister<Register>());
+ __ PoisonHeapReference(temp);
+ __ StoreToOffset(kStoreWord, temp, base, offset);
+ } else {
+ __ StoreToOffset(kStoreWord, value.AsRegister<Register>(), base, offset);
+ }
break;
}
@@ -3268,6 +3284,10 @@
if (is_volatile) {
GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
}
+
+ if (field_type == Primitive::kPrimNot) {
+ __ MaybeUnpoisonHeapReference(out.AsRegister<Register>());
+ }
}
void LocationsBuilderARM::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
@@ -3328,8 +3348,7 @@
LocationSummary* locations = instruction->GetLocations();
Location obj = locations->InAt(0);
- __ cmp(obj.AsRegister<Register>(), ShifterOperand(0));
- __ b(slow_path->GetEntryLabel(), EQ);
+ __ CompareAndBranchIfZero(obj.AsRegister<Register>(), slow_path->GetEntryLabel());
}
void InstructionCodeGeneratorARM::VisitNullCheck(HNullCheck* instruction) {
@@ -3356,8 +3375,9 @@
LocationSummary* locations = instruction->GetLocations();
Register obj = locations->InAt(0).AsRegister<Register>();
Location index = locations->InAt(1);
+ Primitive::Type type = instruction->GetType();
- switch (instruction->GetType()) {
+ switch (type) {
case Primitive::kPrimBoolean: {
uint32_t data_offset = mirror::Array::DataOffset(sizeof(uint8_t)).Uint32Value();
Register out = locations->Out().AsRegister<Register>();
@@ -3474,10 +3494,15 @@
}
case Primitive::kPrimVoid:
- LOG(FATAL) << "Unreachable type " << instruction->GetType();
+ LOG(FATAL) << "Unreachable type " << type;
UNREACHABLE();
}
codegen_->MaybeRecordImplicitNullCheck(instruction);
+
+ if (type == Primitive::kPrimNot) {
+ Register out = locations->Out().AsRegister<Register>();
+ __ MaybeUnpoisonHeapReference(out);
+ }
}
void LocationsBuilderARM::VisitArraySet(HArraySet* instruction) {
@@ -3505,7 +3530,7 @@
if (needs_write_barrier) {
// Temporary registers for the write barrier.
- locations->AddTemp(Location::RequiresRegister());
+ locations->AddTemp(Location::RequiresRegister()); // Possibly used for ref. poisoning too.
locations->AddTemp(Location::RequiresRegister());
}
}
@@ -3556,14 +3581,25 @@
if (!needs_runtime_call) {
uint32_t data_offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
Register value = locations->InAt(2).AsRegister<Register>();
+ Register source = value;
+ if (kPoisonHeapReferences && needs_write_barrier) {
+ // Note that in the case where `value` is a null reference,
+ // we do not enter this block, as a null reference does not
+ // need poisoning.
+ DCHECK_EQ(value_type, Primitive::kPrimNot);
+ Register temp = locations->GetTemp(0).AsRegister<Register>();
+ __ Mov(temp, value);
+ __ PoisonHeapReference(temp);
+ source = temp;
+ }
if (index.IsConstant()) {
size_t offset =
(index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
- __ StoreToOffset(kStoreWord, value, obj, offset);
+ __ StoreToOffset(kStoreWord, source, obj, offset);
} else {
DCHECK(index.IsRegister()) << index;
__ add(IP, obj, ShifterOperand(index.AsRegister<Register>(), LSL, TIMES_4));
- __ StoreToOffset(kStoreWord, value, IP, data_offset);
+ __ StoreToOffset(kStoreWord, source, IP, data_offset);
}
codegen_->MaybeRecordImplicitNullCheck(instruction);
if (needs_write_barrier) {
@@ -3574,6 +3610,8 @@
}
} else {
DCHECK_EQ(value_type, Primitive::kPrimNot);
+ // Note: if heap poisoning is enabled, pAputObject takes cares
+ // of poisoning the reference.
codegen_->InvokeRuntime(QUICK_ENTRY_POINT(pAputObject),
instruction,
instruction->GetDexPc(),
@@ -3746,13 +3784,11 @@
__ LoadFromOffset(
kLoadUnsignedHalfword, IP, TR, Thread::ThreadFlagsOffset<kArmWordSize>().Int32Value());
- __ cmp(IP, ShifterOperand(0));
- // TODO: Figure out the branch offsets and use cbz/cbnz.
if (successor == nullptr) {
- __ b(slow_path->GetEntryLabel(), NE);
+ __ CompareAndBranchIfNonZero(IP, slow_path->GetEntryLabel());
__ Bind(slow_path->GetReturnLabel());
} else {
- __ b(codegen_->GetLabelOf(successor), EQ);
+ __ CompareAndBranchIfZero(IP, codegen_->GetLabelOf(successor));
__ b(slow_path->GetEntryLabel());
}
}
@@ -4000,12 +4036,12 @@
current_method,
ArtMethod::DexCacheResolvedTypesOffset().Int32Value());
__ LoadFromOffset(kLoadWord, out, out, CodeGenerator::GetCacheOffset(cls->GetTypeIndex()));
+ __ MaybeUnpoisonHeapReference(out);
SlowPathCodeARM* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathARM(
cls, cls, cls->GetDexPc(), cls->MustGenerateClinitCheck());
codegen_->AddSlowPath(slow_path);
- __ cmp(out, ShifterOperand(0));
- __ b(slow_path->GetEntryLabel(), EQ);
+ __ CompareAndBranchIfZero(out, slow_path->GetEntryLabel());
if (cls->MustGenerateClinitCheck()) {
GenerateClassInitializationCheck(slow_path, out);
} else {
@@ -4060,9 +4096,10 @@
__ LoadFromOffset(
kLoadWord, out, current_method, ArtMethod::DeclaringClassOffset().Int32Value());
__ LoadFromOffset(kLoadWord, out, out, mirror::Class::DexCacheStringsOffset().Int32Value());
+ __ MaybeUnpoisonHeapReference(out);
__ LoadFromOffset(kLoadWord, out, out, CodeGenerator::GetCacheOffset(load->GetStringIndex()));
- __ cmp(out, ShifterOperand(0));
- __ b(slow_path->GetEntryLabel(), EQ);
+ __ MaybeUnpoisonHeapReference(out);
+ __ CompareAndBranchIfZero(out, slow_path->GetEntryLabel());
__ Bind(slow_path->GetExitLabel());
}
@@ -4119,6 +4156,7 @@
}
// Compare the class of `obj` with `cls`.
__ LoadFromOffset(kLoadWord, out, obj, class_offset);
+ __ MaybeUnpoisonHeapReference(out);
__ cmp(out, ShifterOperand(cls));
if (instruction->IsClassFinal()) {
// Classes must be equal for the instanceof to succeed.
@@ -4172,7 +4210,10 @@
}
// Compare the class of `obj` with `cls`.
__ LoadFromOffset(kLoadWord, temp, obj, class_offset);
+ __ MaybeUnpoisonHeapReference(temp);
__ cmp(temp, ShifterOperand(cls));
+ // The checkcast succeeds if the classes are equal (fast path).
+ // Otherwise, we need to go into the slow path to check the types.
__ b(slow_path->GetEntryLabel(), NE);
__ Bind(slow_path->GetExitLabel());
}
@@ -4324,5 +4365,8 @@
LOG(FATAL) << "Unreachable";
}
+#undef __
+#undef QUICK_ENTRY_POINT
+
} // namespace arm
} // namespace art
diff --git a/compiler/optimizing/code_generator_arm64.cc b/compiler/optimizing/code_generator_arm64.cc
index 9b7124d..a9a95d3 100644
--- a/compiler/optimizing/code_generator_arm64.cc
+++ b/compiler/optimizing/code_generator_arm64.cc
@@ -1250,6 +1250,7 @@
void InstructionCodeGeneratorARM64::HandleFieldGet(HInstruction* instruction,
const FieldInfo& field_info) {
DCHECK(instruction->IsInstanceFieldGet() || instruction->IsStaticFieldGet());
+ Primitive::Type field_type = field_info.GetFieldType();
BlockPoolsScope block_pools(GetVIXLAssembler());
MemOperand field = HeapOperand(InputRegisterAt(instruction, 0), field_info.GetFieldOffset());
@@ -1260,15 +1261,19 @@
// NB: LoadAcquire will record the pc info if needed.
codegen_->LoadAcquire(instruction, OutputCPURegister(instruction), field);
} else {
- codegen_->Load(field_info.GetFieldType(), OutputCPURegister(instruction), field);
+ codegen_->Load(field_type, OutputCPURegister(instruction), field);
codegen_->MaybeRecordImplicitNullCheck(instruction);
// For IRIW sequential consistency kLoadAny is not sufficient.
GenerateMemoryBarrier(MemBarrierKind::kAnyAny);
}
} else {
- codegen_->Load(field_info.GetFieldType(), OutputCPURegister(instruction), field);
+ codegen_->Load(field_type, OutputCPURegister(instruction), field);
codegen_->MaybeRecordImplicitNullCheck(instruction);
}
+
+ if (field_type == Primitive::kPrimNot) {
+ GetAssembler()->MaybeUnpoisonHeapReference(OutputCPURegister(instruction).W());
+ }
}
void LocationsBuilderARM64::HandleFieldSet(HInstruction* instruction) {
@@ -1290,23 +1295,38 @@
Register obj = InputRegisterAt(instruction, 0);
CPURegister value = InputCPURegisterAt(instruction, 1);
+ CPURegister source = value;
Offset offset = field_info.GetFieldOffset();
Primitive::Type field_type = field_info.GetFieldType();
bool use_acquire_release = codegen_->GetInstructionSetFeatures().PreferAcquireRelease();
- if (field_info.IsVolatile()) {
- if (use_acquire_release) {
- codegen_->StoreRelease(field_type, value, HeapOperand(obj, offset));
- codegen_->MaybeRecordImplicitNullCheck(instruction);
- } else {
- GenerateMemoryBarrier(MemBarrierKind::kAnyStore);
- codegen_->Store(field_type, value, HeapOperand(obj, offset));
- codegen_->MaybeRecordImplicitNullCheck(instruction);
- GenerateMemoryBarrier(MemBarrierKind::kAnyAny);
+ {
+ // We use a block to end the scratch scope before the write barrier, thus
+ // freeing the temporary registers so they can be used in `MarkGCCard`.
+ UseScratchRegisterScope temps(GetVIXLAssembler());
+
+ if (kPoisonHeapReferences && field_type == Primitive::kPrimNot) {
+ DCHECK(value.IsW());
+ Register temp = temps.AcquireW();
+ __ Mov(temp, value.W());
+ GetAssembler()->PoisonHeapReference(temp.W());
+ source = temp;
}
- } else {
- codegen_->Store(field_type, value, HeapOperand(obj, offset));
- codegen_->MaybeRecordImplicitNullCheck(instruction);
+
+ if (field_info.IsVolatile()) {
+ if (use_acquire_release) {
+ codegen_->StoreRelease(field_type, source, HeapOperand(obj, offset));
+ codegen_->MaybeRecordImplicitNullCheck(instruction);
+ } else {
+ GenerateMemoryBarrier(MemBarrierKind::kAnyStore);
+ codegen_->Store(field_type, source, HeapOperand(obj, offset));
+ codegen_->MaybeRecordImplicitNullCheck(instruction);
+ GenerateMemoryBarrier(MemBarrierKind::kAnyAny);
+ }
+ } else {
+ codegen_->Store(field_type, source, HeapOperand(obj, offset));
+ codegen_->MaybeRecordImplicitNullCheck(instruction);
+ }
}
if (CodeGenerator::StoreNeedsWriteBarrier(field_type, instruction->InputAt(1))) {
@@ -1464,6 +1484,10 @@
codegen_->Load(type, OutputCPURegister(instruction), source);
codegen_->MaybeRecordImplicitNullCheck(instruction);
+
+ if (type == Primitive::kPrimNot) {
+ GetAssembler()->MaybeUnpoisonHeapReference(OutputCPURegister(instruction).W());
+ }
}
void LocationsBuilderARM64::VisitArrayLength(HArrayLength* instruction) {
@@ -1506,12 +1530,15 @@
bool needs_runtime_call = locations->WillCall();
if (needs_runtime_call) {
+ // Note: if heap poisoning is enabled, pAputObject takes cares
+ // of poisoning the reference.
codegen_->InvokeRuntime(
QUICK_ENTRY_POINT(pAputObject), instruction, instruction->GetDexPc(), nullptr);
CheckEntrypointTypes<kQuickAputObject, void, mirror::Array*, int32_t, mirror::Object*>();
} else {
Register obj = InputRegisterAt(instruction, 0);
CPURegister value = InputCPURegisterAt(instruction, 2);
+ CPURegister source = value;
Location index = locations->InAt(1);
size_t offset = mirror::Array::DataOffset(Primitive::ComponentSize(value_type)).Uint32Value();
MemOperand destination = HeapOperand(obj);
@@ -1522,6 +1549,14 @@
// freeing the temporary registers so they can be used in `MarkGCCard`.
UseScratchRegisterScope temps(masm);
+ if (kPoisonHeapReferences && value_type == Primitive::kPrimNot) {
+ DCHECK(value.IsW());
+ Register temp = temps.AcquireW();
+ __ Mov(temp, value.W());
+ GetAssembler()->PoisonHeapReference(temp.W());
+ source = temp;
+ }
+
if (index.IsConstant()) {
offset += Int64ConstantFrom(index) << Primitive::ComponentSizeShift(value_type);
destination = HeapOperand(obj, offset);
@@ -1532,7 +1567,7 @@
destination = HeapOperand(temp, offset);
}
- codegen_->Store(value_type, value, destination);
+ codegen_->Store(value_type, source, destination);
codegen_->MaybeRecordImplicitNullCheck(instruction);
}
if (CodeGenerator::StoreNeedsWriteBarrier(value_type, instruction->GetValue())) {
@@ -1585,7 +1620,10 @@
}
// Compare the class of `obj` with `cls`.
__ Ldr(obj_cls, HeapOperand(obj, mirror::Object::ClassOffset()));
+ GetAssembler()->MaybeUnpoisonHeapReference(obj_cls.W());
__ Cmp(obj_cls, cls);
+ // The checkcast succeeds if the classes are equal (fast path).
+ // Otherwise, we need to go into the slow path to check the types.
__ B(ne, slow_path->GetEntryLabel());
__ Bind(slow_path->GetExitLabel());
}
@@ -2152,6 +2190,7 @@
// Compare the class of `obj` with `cls`.
__ Ldr(out, HeapOperand(obj, mirror::Object::ClassOffset()));
+ GetAssembler()->MaybeUnpoisonHeapReference(out.W());
__ Cmp(out, cls);
if (instruction->IsClassFinal()) {
// Classes must be equal for the instanceof to succeed.
@@ -2225,6 +2264,7 @@
__ Ldr(temp.W(), HeapOperandFrom(receiver, class_offset));
}
codegen_->MaybeRecordImplicitNullCheck(invoke);
+ GetAssembler()->MaybeUnpoisonHeapReference(temp.W());
// temp = temp->GetImtEntryAt(method_offset);
__ Ldr(temp, MemOperand(temp, method_offset));
// lr = temp->GetEntryPoint();
@@ -2350,6 +2390,7 @@
DCHECK(receiver.IsRegister());
__ Ldr(temp.W(), HeapOperandFrom(receiver, class_offset));
codegen_->MaybeRecordImplicitNullCheck(invoke);
+ GetAssembler()->MaybeUnpoisonHeapReference(temp.W());
// temp = temp->GetMethodAt(method_offset);
__ Ldr(temp, MemOperand(temp, method_offset));
// lr = temp->GetEntryPoint();
@@ -2379,6 +2420,7 @@
DCHECK(cls->CanCallRuntime());
__ Ldr(out, MemOperand(current_method, ArtMethod::DexCacheResolvedTypesOffset().Int32Value()));
__ Ldr(out, HeapOperand(out, CodeGenerator::GetCacheOffset(cls->GetTypeIndex())));
+ GetAssembler()->MaybeUnpoisonHeapReference(out.W());
SlowPathCodeARM64* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathARM64(
cls, cls, cls->GetDexPc(), cls->MustGenerateClinitCheck());
@@ -2428,7 +2470,9 @@
Register current_method = InputRegisterAt(load, 0);
__ Ldr(out, MemOperand(current_method, ArtMethod::DeclaringClassOffset().Int32Value()));
__ Ldr(out, HeapOperand(out, mirror::Class::DexCacheStringsOffset()));
+ GetAssembler()->MaybeUnpoisonHeapReference(out.W());
__ Ldr(out, HeapOperand(out, CodeGenerator::GetCacheOffset(load->GetStringIndex())));
+ GetAssembler()->MaybeUnpoisonHeapReference(out.W());
__ Cbz(out, slow_path->GetEntryLabel());
__ Bind(slow_path->GetExitLabel());
}
@@ -2563,6 +2607,8 @@
Register type_index = RegisterFrom(locations->GetTemp(0), Primitive::kPrimInt);
DCHECK(type_index.Is(w0));
__ Mov(type_index, instruction->GetTypeIndex());
+ // Note: if heap poisoning is enabled, the entry point takes cares
+ // of poisoning the reference.
codegen_->InvokeRuntime(
GetThreadOffset<kArm64WordSize>(instruction->GetEntrypoint()).Int32Value(),
instruction,
@@ -2586,6 +2632,8 @@
Register type_index = RegisterFrom(locations->GetTemp(0), Primitive::kPrimInt);
DCHECK(type_index.Is(w0));
__ Mov(type_index, instruction->GetTypeIndex());
+ // Note: if heap poisoning is enabled, the entry point takes cares
+ // of poisoning the reference.
codegen_->InvokeRuntime(
GetThreadOffset<kArm64WordSize>(instruction->GetEntrypoint()).Int32Value(),
instruction,
diff --git a/compiler/optimizing/code_generator_x86.cc b/compiler/optimizing/code_generator_x86.cc
index 6c82fe9..262b234 100644
--- a/compiler/optimizing/code_generator_x86.cc
+++ b/compiler/optimizing/code_generator_x86.cc
@@ -1315,9 +1315,11 @@
LocationSummary* locations = invoke->GetLocations();
Location receiver = locations->InAt(0);
uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
+ // temp = object->GetClass();
DCHECK(receiver.IsRegister());
__ movl(temp, Address(receiver.AsRegister<Register>(), class_offset));
codegen_->MaybeRecordImplicitNullCheck(invoke);
+ __ MaybeUnpoisonHeapReference(temp);
// temp = temp->GetMethodAt(method_offset);
__ movl(temp, Address(temp, method_offset));
// call temp->GetEntryPoint();
@@ -1354,7 +1356,8 @@
} else {
__ movl(temp, Address(receiver.AsRegister<Register>(), class_offset));
}
- codegen_->MaybeRecordImplicitNullCheck(invoke);
+ codegen_->MaybeRecordImplicitNullCheck(invoke);
+ __ MaybeUnpoisonHeapReference(temp);
// temp = temp->GetImtEntryAt(method_offset);
__ movl(temp, Address(temp, method_offset));
// call temp->GetEntryPoint();
@@ -3001,6 +3004,8 @@
void InstructionCodeGeneratorX86::VisitNewInstance(HNewInstance* instruction) {
InvokeRuntimeCallingConvention calling_convention;
__ movl(calling_convention.GetRegisterAt(0), Immediate(instruction->GetTypeIndex()));
+ // Note: if heap poisoning is enabled, the entry point takes cares
+ // of poisoning the reference.
__ fs()->call(Address::Absolute(GetThreadOffset<kX86WordSize>(instruction->GetEntrypoint())));
codegen_->RecordPcInfo(instruction, instruction->GetDexPc());
@@ -3021,6 +3026,8 @@
InvokeRuntimeCallingConvention calling_convention;
__ movl(calling_convention.GetRegisterAt(0), Immediate(instruction->GetTypeIndex()));
+ // Note: if heap poisoning is enabled, the entry point takes cares
+ // of poisoning the reference.
__ fs()->call(Address::Absolute(GetThreadOffset<kX86WordSize>(instruction->GetEntrypoint())));
codegen_->RecordPcInfo(instruction, instruction->GetDexPc());
@@ -3397,6 +3404,10 @@
if (is_volatile) {
GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
}
+
+ if (field_type == Primitive::kPrimNot) {
+ __ MaybeUnpoisonHeapReference(out.AsRegister<Register>());
+ }
}
void LocationsBuilderX86::HandleFieldSet(HInstruction* instruction, const FieldInfo& field_info) {
@@ -3420,9 +3431,9 @@
} else {
locations->SetInAt(1, Location::RequiresRegister());
}
- // Temporary registers for the write barrier.
if (CodeGenerator::StoreNeedsWriteBarrier(field_type, instruction->InputAt(1))) {
- locations->AddTemp(Location::RequiresRegister());
+ // Temporary registers for the write barrier.
+ locations->AddTemp(Location::RequiresRegister()); // Possibly used for reference poisoning too.
// Ensure the card is in a byte register.
locations->AddTemp(Location::RegisterLocation(ECX));
} else if (is_volatile && (field_type == Primitive::kPrimLong)) {
@@ -3447,6 +3458,8 @@
bool is_volatile = field_info.IsVolatile();
Primitive::Type field_type = field_info.GetFieldType();
uint32_t offset = field_info.GetFieldOffset().Uint32Value();
+ bool needs_write_barrier =
+ CodeGenerator::StoreNeedsWriteBarrier(field_type, instruction->InputAt(1));
if (is_volatile) {
GenerateMemoryBarrier(MemBarrierKind::kAnyStore);
@@ -3467,7 +3480,18 @@
case Primitive::kPrimInt:
case Primitive::kPrimNot: {
- __ movl(Address(base, offset), value.AsRegister<Register>());
+ if (kPoisonHeapReferences && needs_write_barrier) {
+ // Note that in the case where `value` is a null reference,
+ // we do not enter this block, as the reference does not
+ // need poisoning.
+ DCHECK_EQ(field_type, Primitive::kPrimNot);
+ Register temp = locations->GetTemp(0).AsRegister<Register>();
+ __ movl(temp, value.AsRegister<Register>());
+ __ PoisonHeapReference(temp);
+ __ movl(Address(base, offset), temp);
+ } else {
+ __ movl(Address(base, offset), value.AsRegister<Register>());
+ }
break;
}
@@ -3508,7 +3532,7 @@
codegen_->MaybeRecordImplicitNullCheck(instruction);
}
- if (CodeGenerator::StoreNeedsWriteBarrier(field_type, instruction->InputAt(1))) {
+ if (needs_write_barrier) {
Register temp = locations->GetTemp(0).AsRegister<Register>();
Register card = locations->GetTemp(1).AsRegister<Register>();
codegen_->MarkGCCard(temp, card, base, value.AsRegister<Register>(), value_can_be_null);
@@ -3737,6 +3761,11 @@
if (type != Primitive::kPrimLong) {
codegen_->MaybeRecordImplicitNullCheck(instruction);
}
+
+ if (type == Primitive::kPrimNot) {
+ Register out = locations->Out().AsRegister<Register>();
+ __ MaybeUnpoisonHeapReference(out);
+ }
}
void LocationsBuilderX86::VisitArraySet(HArraySet* instruction) {
@@ -3776,9 +3805,9 @@
} else {
locations->SetInAt(2, Location::RegisterOrConstant(instruction->InputAt(2)));
}
- // Temporary registers for the write barrier.
if (needs_write_barrier) {
- locations->AddTemp(Location::RequiresRegister());
+ // Temporary registers for the write barrier.
+ locations->AddTemp(Location::RequiresRegister()); // Possibly used for ref. poisoning too.
// Ensure the card is in a byte register.
locations->AddTemp(Location::RegisterLocation(ECX));
}
@@ -3852,21 +3881,43 @@
size_t offset =
(index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
if (value.IsRegister()) {
- __ movl(Address(obj, offset), value.AsRegister<Register>());
+ if (kPoisonHeapReferences && value_type == Primitive::kPrimNot) {
+ Register temp = locations->GetTemp(0).AsRegister<Register>();
+ __ movl(temp, value.AsRegister<Register>());
+ __ PoisonHeapReference(temp);
+ __ movl(Address(obj, offset), temp);
+ } else {
+ __ movl(Address(obj, offset), value.AsRegister<Register>());
+ }
} else {
DCHECK(value.IsConstant()) << value;
- __ movl(Address(obj, offset),
- Immediate(value.GetConstant()->AsIntConstant()->GetValue()));
+ int32_t v = CodeGenerator::GetInt32ValueOf(value.GetConstant());
+ // `value_type == Primitive::kPrimNot` implies `v == 0`.
+ DCHECK((value_type != Primitive::kPrimNot) || (v == 0));
+ // Note: if heap poisoning is enabled, no need to poison
+ // (negate) `v` if it is a reference, as it would be null.
+ __ movl(Address(obj, offset), Immediate(v));
}
} else {
DCHECK(index.IsRegister()) << index;
if (value.IsRegister()) {
- __ movl(Address(obj, index.AsRegister<Register>(), TIMES_4, data_offset),
- value.AsRegister<Register>());
+ if (kPoisonHeapReferences && value_type == Primitive::kPrimNot) {
+ Register temp = locations->GetTemp(0).AsRegister<Register>();
+ __ movl(temp, value.AsRegister<Register>());
+ __ PoisonHeapReference(temp);
+ __ movl(Address(obj, index.AsRegister<Register>(), TIMES_4, data_offset), temp);
+ } else {
+ __ movl(Address(obj, index.AsRegister<Register>(), TIMES_4, data_offset),
+ value.AsRegister<Register>());
+ }
} else {
DCHECK(value.IsConstant()) << value;
- __ movl(Address(obj, index.AsRegister<Register>(), TIMES_4, data_offset),
- Immediate(value.GetConstant()->AsIntConstant()->GetValue()));
+ int32_t v = CodeGenerator::GetInt32ValueOf(value.GetConstant());
+ // `value_type == Primitive::kPrimNot` implies `v == 0`.
+ DCHECK((value_type != Primitive::kPrimNot) || (v == 0));
+ // Note: if heap poisoning is enabled, no need to poison
+ // (negate) `v` if it is a reference, as it would be null.
+ __ movl(Address(obj, index.AsRegister<Register>(), TIMES_4, data_offset), Immediate(v));
}
}
codegen_->MaybeRecordImplicitNullCheck(instruction);
@@ -3880,6 +3931,8 @@
} else {
DCHECK_EQ(value_type, Primitive::kPrimNot);
DCHECK(!codegen_->IsLeafMethod());
+ // Note: if heap poisoning is enabled, pAputObject takes cares
+ // of poisoning the reference.
__ fs()->call(Address::Absolute(QUICK_ENTRYPOINT_OFFSET(kX86WordSize, pAputObject)));
codegen_->RecordPcInfo(instruction, instruction->GetDexPc());
}
@@ -4343,6 +4396,7 @@
__ movl(out, Address(
current_method, ArtMethod::DexCacheResolvedTypesOffset().Int32Value()));
__ movl(out, Address(out, CodeGenerator::GetCacheOffset(cls->GetTypeIndex())));
+ __ MaybeUnpoisonHeapReference(out);
SlowPathCodeX86* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathX86(
cls, cls, cls->GetDexPc(), cls->MustGenerateClinitCheck());
@@ -4400,7 +4454,9 @@
Register current_method = locations->InAt(0).AsRegister<Register>();
__ movl(out, Address(current_method, ArtMethod::DeclaringClassOffset().Int32Value()));
__ movl(out, Address(out, mirror::Class::DexCacheStringsOffset().Int32Value()));
+ __ MaybeUnpoisonHeapReference(out);
__ movl(out, Address(out, CodeGenerator::GetCacheOffset(load->GetStringIndex())));
+ __ MaybeUnpoisonHeapReference(out);
__ testl(out, out);
__ j(kEqual, slow_path->GetEntryLabel());
__ Bind(slow_path->GetExitLabel());
@@ -4455,8 +4511,9 @@
__ testl(obj, obj);
__ j(kEqual, &zero);
}
- __ movl(out, Address(obj, class_offset));
// Compare the class of `obj` with `cls`.
+ __ movl(out, Address(obj, class_offset));
+ __ MaybeUnpoisonHeapReference(out);
if (cls.IsRegister()) {
__ cmpl(out, cls.AsRegister<Register>());
} else {
@@ -4514,16 +4571,17 @@
__ testl(obj, obj);
__ j(kEqual, slow_path->GetExitLabel());
}
-
- __ movl(temp, Address(obj, class_offset));
// Compare the class of `obj` with `cls`.
+ __ movl(temp, Address(obj, class_offset));
+ __ MaybeUnpoisonHeapReference(temp);
if (cls.IsRegister()) {
__ cmpl(temp, cls.AsRegister<Register>());
} else {
DCHECK(cls.IsStackSlot()) << cls;
__ cmpl(temp, Address(ESP, cls.GetStackIndex()));
}
-
+ // The checkcast succeeds if the classes are equal (fast path).
+ // Otherwise, we need to go into the slow path to check the types.
__ j(kNotEqual, slow_path->GetEntryLabel());
__ Bind(slow_path->GetExitLabel());
}
@@ -4687,5 +4745,7 @@
LOG(FATAL) << "Unreachable";
}
+#undef __
+
} // namespace x86
} // namespace art
diff --git a/compiler/optimizing/code_generator_x86_64.cc b/compiler/optimizing/code_generator_x86_64.cc
index 22f5d96..c9d19c8 100644
--- a/compiler/optimizing/code_generator_x86_64.cc
+++ b/compiler/optimizing/code_generator_x86_64.cc
@@ -213,7 +213,7 @@
__ movl(CpuRegister(calling_convention.GetRegisterAt(0)), Immediate(cls_->GetTypeIndex()));
__ gs()->call(Address::Absolute((do_clinit_
? QUICK_ENTRYPOINT_OFFSET(kX86_64WordSize, pInitializeStaticStorage)
- : QUICK_ENTRYPOINT_OFFSET(kX86_64WordSize, pInitializeType)) , true));
+ : QUICK_ENTRYPOINT_OFFSET(kX86_64WordSize, pInitializeType)), true));
RecordPcInfo(codegen, at_, dex_pc_);
Location out = locations->Out();
@@ -1429,6 +1429,7 @@
DCHECK(receiver.IsRegister());
__ movl(temp, Address(receiver.AsRegister<CpuRegister>(), class_offset));
codegen_->MaybeRecordImplicitNullCheck(invoke);
+ __ MaybeUnpoisonHeapReference(temp);
// temp = temp->GetMethodAt(method_offset);
__ movq(temp, Address(temp, method_offset));
// call temp->GetEntryPoint();
@@ -1466,6 +1467,7 @@
__ movl(temp, Address(receiver.AsRegister<CpuRegister>(), class_offset));
}
codegen_->MaybeRecordImplicitNullCheck(invoke);
+ __ MaybeUnpoisonHeapReference(temp);
// temp = temp->GetImtEntryAt(method_offset);
__ movq(temp, Address(temp, method_offset));
// call temp->GetEntryPoint();
@@ -3060,6 +3062,8 @@
InvokeRuntimeCallingConvention calling_convention;
codegen_->Load64BitValue(CpuRegister(calling_convention.GetRegisterAt(0)),
instruction->GetTypeIndex());
+ // Note: if heap poisoning is enabled, the entry point takes cares
+ // of poisoning the reference.
__ gs()->call(
Address::Absolute(GetThreadOffset<kX86_64WordSize>(instruction->GetEntrypoint()), true));
@@ -3082,6 +3086,8 @@
codegen_->Load64BitValue(CpuRegister(calling_convention.GetRegisterAt(0)),
instruction->GetTypeIndex());
+ // Note: if heap poisoning is enabled, the entry point takes cares
+ // of poisoning the reference.
__ gs()->call(
Address::Absolute(GetThreadOffset<kX86_64WordSize>(instruction->GetEntrypoint()), true));
@@ -3270,6 +3276,10 @@
if (is_volatile) {
GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
}
+
+ if (field_type == Primitive::kPrimNot) {
+ __ MaybeUnpoisonHeapReference(out.AsRegister<CpuRegister>());
+ }
}
void LocationsBuilderX86_64::HandleFieldSet(HInstruction* instruction,
@@ -3278,8 +3288,9 @@
LocationSummary* locations =
new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
+ Primitive::Type field_type = field_info.GetFieldType();
bool needs_write_barrier =
- CodeGenerator::StoreNeedsWriteBarrier(field_info.GetFieldType(), instruction->InputAt(1));
+ CodeGenerator::StoreNeedsWriteBarrier(field_type, instruction->InputAt(1));
locations->SetInAt(0, Location::RequiresRegister());
if (Primitive::IsFloatingPointType(instruction->InputAt(1)->GetType())) {
@@ -3289,7 +3300,10 @@
}
if (needs_write_barrier) {
// Temporary registers for the write barrier.
+ locations->AddTemp(Location::RequiresRegister()); // Possibly used for reference poisoning too.
locations->AddTemp(Location::RequiresRegister());
+ } else if (kPoisonHeapReferences && field_type == Primitive::kPrimNot) {
+ // Temporary register for the reference poisoning.
locations->AddTemp(Location::RequiresRegister());
}
}
@@ -3337,9 +3351,20 @@
case Primitive::kPrimNot: {
if (value.IsConstant()) {
int32_t v = CodeGenerator::GetInt32ValueOf(value.GetConstant());
- __ movw(Address(base, offset), Immediate(v));
+ // `field_type == Primitive::kPrimNot` implies `v == 0`.
+ DCHECK((field_type != Primitive::kPrimNot) || (v == 0));
+ // Note: if heap poisoning is enabled, no need to poison
+ // (negate) `v` if it is a reference, as it would be null.
+ __ movl(Address(base, offset), Immediate(v));
} else {
- __ movl(Address(base, offset), value.AsRegister<CpuRegister>());
+ if (kPoisonHeapReferences && field_type == Primitive::kPrimNot) {
+ CpuRegister temp = locations->GetTemp(0).AsRegister<CpuRegister>();
+ __ movl(temp, value.AsRegister<CpuRegister>());
+ __ PoisonHeapReference(temp);
+ __ movl(Address(base, offset), temp);
+ } else {
+ __ movl(Address(base, offset), value.AsRegister<CpuRegister>());
+ }
}
break;
}
@@ -3483,8 +3508,9 @@
LocationSummary* locations = instruction->GetLocations();
CpuRegister obj = locations->InAt(0).AsRegister<CpuRegister>();
Location index = locations->InAt(1);
+ Primitive::Type type = instruction->GetType();
- switch (instruction->GetType()) {
+ switch (type) {
case Primitive::kPrimBoolean: {
uint32_t data_offset = mirror::Array::DataOffset(sizeof(uint8_t)).Uint32Value();
CpuRegister out = locations->Out().AsRegister<CpuRegister>();
@@ -3585,10 +3611,15 @@
}
case Primitive::kPrimVoid:
- LOG(FATAL) << "Unreachable type " << instruction->GetType();
+ LOG(FATAL) << "Unreachable type " << type;
UNREACHABLE();
}
codegen_->MaybeRecordImplicitNullCheck(instruction);
+
+ if (type == Primitive::kPrimNot) {
+ CpuRegister out = locations->Out().AsRegister<CpuRegister>();
+ __ MaybeUnpoisonHeapReference(out);
+ }
}
void LocationsBuilderX86_64::VisitArraySet(HArraySet* instruction) {
@@ -3620,7 +3651,7 @@
if (needs_write_barrier) {
// Temporary registers for the write barrier.
- locations->AddTemp(Location::RequiresRegister());
+ locations->AddTemp(Location::RequiresRegister()); // Possibly used for ref. poisoning too.
locations->AddTemp(Location::RequiresRegister());
}
}
@@ -3696,20 +3727,42 @@
size_t offset =
(index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
if (value.IsRegister()) {
- __ movl(Address(obj, offset), value.AsRegister<CpuRegister>());
+ if (kPoisonHeapReferences && value_type == Primitive::kPrimNot) {
+ CpuRegister temp = locations->GetTemp(0).AsRegister<CpuRegister>();
+ __ movl(temp, value.AsRegister<CpuRegister>());
+ __ PoisonHeapReference(temp);
+ __ movl(Address(obj, offset), temp);
+ } else {
+ __ movl(Address(obj, offset), value.AsRegister<CpuRegister>());
+ }
} else {
DCHECK(value.IsConstant()) << value;
int32_t v = CodeGenerator::GetInt32ValueOf(value.GetConstant());
+ // `value_type == Primitive::kPrimNot` implies `v == 0`.
+ DCHECK((value_type != Primitive::kPrimNot) || (v == 0));
+ // Note: if heap poisoning is enabled, no need to poison
+ // (negate) `v` if it is a reference, as it would be null.
__ movl(Address(obj, offset), Immediate(v));
}
} else {
DCHECK(index.IsRegister()) << index;
if (value.IsRegister()) {
- __ movl(Address(obj, index.AsRegister<CpuRegister>(), TIMES_4, data_offset),
- value.AsRegister<CpuRegister>());
+ if (kPoisonHeapReferences && value_type == Primitive::kPrimNot) {
+ CpuRegister temp = locations->GetTemp(0).AsRegister<CpuRegister>();
+ __ movl(temp, value.AsRegister<CpuRegister>());
+ __ PoisonHeapReference(temp);
+ __ movl(Address(obj, index.AsRegister<CpuRegister>(), TIMES_4, data_offset), temp);
+ } else {
+ __ movl(Address(obj, index.AsRegister<CpuRegister>(), TIMES_4, data_offset),
+ value.AsRegister<CpuRegister>());
+ }
} else {
DCHECK(value.IsConstant()) << value;
int32_t v = CodeGenerator::GetInt32ValueOf(value.GetConstant());
+ // `value_type == Primitive::kPrimNot` implies `v == 0`.
+ DCHECK((value_type != Primitive::kPrimNot) || (v == 0));
+ // Note: if heap poisoning is enabled, no need to poison
+ // (negate) `v` if it is a reference, as it would be null.
__ movl(Address(obj, index.AsRegister<CpuRegister>(), TIMES_4, data_offset),
Immediate(v));
}
@@ -3724,6 +3777,8 @@
}
} else {
DCHECK_EQ(value_type, Primitive::kPrimNot);
+ // Note: if heap poisoning is enabled, pAputObject takes cares
+ // of poisoning the reference.
__ gs()->call(Address::Absolute(QUICK_ENTRYPOINT_OFFSET(kX86_64WordSize, pAputObject),
true));
DCHECK(!codegen_->IsLeafMethod());
@@ -3876,7 +3931,7 @@
Thread::CardTableOffset<kX86_64WordSize>().Int32Value(), true));
__ movq(temp, object);
__ shrq(temp, Immediate(gc::accounting::CardTable::kCardShift));
- __ movb(Address(temp, card, TIMES_1, 0), card);
+ __ movb(Address(temp, card, TIMES_1, 0), card);
if (value_can_be_null) {
__ Bind(&is_null);
}
@@ -4187,6 +4242,8 @@
__ movl(out, Address(
current_method, ArtMethod::DexCacheResolvedTypesOffset().Int32Value()));
__ movl(out, Address(out, CodeGenerator::GetCacheOffset(cls->GetTypeIndex())));
+ __ MaybeUnpoisonHeapReference(out);
+
SlowPathCodeX86_64* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathX86_64(
cls, cls, cls->GetDexPc(), cls->MustGenerateClinitCheck());
codegen_->AddSlowPath(slow_path);
@@ -4234,7 +4291,9 @@
CpuRegister current_method = locations->InAt(0).AsRegister<CpuRegister>();
__ movl(out, Address(current_method, ArtMethod::DeclaringClassOffset().Int32Value()));
__ movl(out, Address(out, mirror::Class::DexCacheStringsOffset().Int32Value()));
+ __ MaybeUnpoisonHeapReference(out);
__ movl(out, Address(out, CodeGenerator::GetCacheOffset(load->GetStringIndex())));
+ __ MaybeUnpoisonHeapReference(out);
__ testl(out, out);
__ j(kEqual, slow_path->GetEntryLabel());
__ Bind(slow_path->GetExitLabel());
@@ -4293,6 +4352,7 @@
}
// Compare the class of `obj` with `cls`.
__ movl(out, Address(obj, class_offset));
+ __ MaybeUnpoisonHeapReference(out);
if (cls.IsRegister()) {
__ cmpl(out, cls.AsRegister<CpuRegister>());
} else {
@@ -4351,13 +4411,15 @@
}
// Compare the class of `obj` with `cls`.
__ movl(temp, Address(obj, class_offset));
+ __ MaybeUnpoisonHeapReference(temp);
if (cls.IsRegister()) {
__ cmpl(temp, cls.AsRegister<CpuRegister>());
} else {
DCHECK(cls.IsStackSlot()) << cls;
__ cmpl(temp, Address(CpuRegister(RSP), cls.GetStackIndex()));
}
- // Classes must be equal for the checkcast to succeed.
+ // The checkcast succeeds if the classes are equal (fast path).
+ // Otherwise, we need to go into the slow path to check the types.
__ j(kNotEqual, slow_path->GetEntryLabel());
__ Bind(slow_path->GetExitLabel());
}
@@ -4576,5 +4638,7 @@
return Address::RIP(fixup);
}
+#undef __
+
} // namespace x86_64
} // namespace art
diff --git a/compiler/optimizing/graph_visualizer.cc b/compiler/optimizing/graph_visualizer.cc
index c41574c..504c141 100644
--- a/compiler/optimizing/graph_visualizer.cc
+++ b/compiler/optimizing/graph_visualizer.cc
@@ -152,7 +152,7 @@
/**
* HGraph visitor to generate a file suitable for the c1visualizer tool and IRHydra.
*/
-class HGraphVisualizerPrinter : public HGraphVisitor {
+class HGraphVisualizerPrinter : public HGraphDelegateVisitor {
public:
HGraphVisualizerPrinter(HGraph* graph,
std::ostream& output,
@@ -160,7 +160,7 @@
bool is_after_pass,
const CodeGenerator& codegen,
const DisassemblyInformation* disasm_info = nullptr)
- : HGraphVisitor(graph),
+ : HGraphDelegateVisitor(graph),
output_(output),
pass_name_(pass_name),
is_after_pass_(is_after_pass),
@@ -372,20 +372,21 @@
<< instance_of->MustDoNullCheck() << std::noboolalpha;
}
- void VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) OVERRIDE {
+ void VisitInvoke(HInvoke* invoke) OVERRIDE {
StartAttributeStream("dex_file_index") << invoke->GetDexMethodIndex();
+ StartAttributeStream("method_name") << PrettyMethod(
+ invoke->GetDexMethodIndex(), GetGraph()->GetDexFile(), /* with_signature */ false);
+ }
+
+ void VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) OVERRIDE {
+ VisitInvoke(invoke);
StartAttributeStream("recursive") << std::boolalpha
<< invoke->IsRecursive()
<< std::noboolalpha;
}
void VisitTryBoundary(HTryBoundary* try_boundary) OVERRIDE {
- StartAttributeStream("is_entry") << std::boolalpha
- << try_boundary->IsTryEntry()
- << std::noboolalpha;
- StartAttributeStream("is_exit") << std::boolalpha
- << try_boundary->IsTryExit()
- << std::noboolalpha;
+ StartAttributeStream("kind") << (try_boundary->IsEntry() ? "entry" : "exit");
}
bool IsPass(const char* name) {
diff --git a/compiler/optimizing/instruction_simplifier.cc b/compiler/optimizing/instruction_simplifier.cc
index e375f7b..62f90c2 100644
--- a/compiler/optimizing/instruction_simplifier.cc
+++ b/compiler/optimizing/instruction_simplifier.cc
@@ -398,6 +398,10 @@
}
}
+ if (value->IsNullConstant()) {
+ instruction->ClearNeedsTypeCheck();
+ }
+
if (!value->CanBeNull()) {
instruction->ClearValueCanBeNull();
}
diff --git a/compiler/optimizing/intrinsics_arm.cc b/compiler/optimizing/intrinsics_arm.cc
index 71fadfb..b4dbf75 100644
--- a/compiler/optimizing/intrinsics_arm.cc
+++ b/compiler/optimizing/intrinsics_arm.cc
@@ -510,6 +510,11 @@
if (is_volatile) {
__ dmb(ISH);
}
+
+ if (type == Primitive::kPrimNot) {
+ Register trg = locations->Out().AsRegister<Register>();
+ __ MaybeUnpoisonHeapReference(trg);
+ }
}
static void CreateIntIntIntToIntLocations(ArenaAllocator* arena, HInvoke* invoke) {
@@ -649,8 +654,15 @@
__ strd(value_lo, Address(IP));
}
} else {
- value = locations->InAt(3).AsRegister<Register>();
- __ str(value, Address(base, offset));
+ value = locations->InAt(3).AsRegister<Register>();
+ Register source = value;
+ if (kPoisonHeapReferences && type == Primitive::kPrimNot) {
+ Register temp = locations->GetTemp(0).AsRegister<Register>();
+ __ Mov(temp, value);
+ __ PoisonHeapReference(temp);
+ source = temp;
+ }
+ __ str(source, Address(base, offset));
}
if (is_volatile) {
@@ -738,6 +750,11 @@
__ add(tmp_ptr, base, ShifterOperand(offset));
+ if (kPoisonHeapReferences && type == Primitive::kPrimNot) {
+ codegen->GetAssembler()->PoisonHeapReference(expected_lo);
+ codegen->GetAssembler()->PoisonHeapReference(value_lo);
+ }
+
// do {
// tmp = [r_ptr] - expected;
// } while (tmp == 0 && failure([r_ptr] <- r_new_value));
@@ -761,6 +778,11 @@
__ rsbs(out, tmp_lo, ShifterOperand(1));
__ it(CC);
__ mov(out, ShifterOperand(0), CC);
+
+ if (kPoisonHeapReferences && type == Primitive::kPrimNot) {
+ codegen->GetAssembler()->UnpoisonHeapReference(value_lo);
+ codegen->GetAssembler()->UnpoisonHeapReference(expected_lo);
+ }
}
void IntrinsicLocationsBuilderARM::VisitUnsafeCASInt(HInvoke* invoke) {
@@ -1047,5 +1069,9 @@
UNIMPLEMENTED_INTRINSIC(ReferenceGetReferent)
UNIMPLEMENTED_INTRINSIC(StringGetCharsNoCheck)
+#undef UNIMPLEMENTED_INTRINSIC
+
+#undef __
+
} // namespace arm
} // namespace art
diff --git a/compiler/optimizing/intrinsics_arm64.cc b/compiler/optimizing/intrinsics_arm64.cc
index 8bcb88b..78ac167 100644
--- a/compiler/optimizing/intrinsics_arm64.cc
+++ b/compiler/optimizing/intrinsics_arm64.cc
@@ -683,6 +683,11 @@
} else {
codegen->Load(type, trg, mem_op);
}
+
+ if (type == Primitive::kPrimNot) {
+ DCHECK(trg.IsW());
+ codegen->GetAssembler()->MaybeUnpoisonHeapReference(trg);
+ }
}
static void CreateIntIntIntToIntLocations(ArenaAllocator* arena, HInvoke* invoke) {
@@ -781,22 +786,37 @@
Register base = WRegisterFrom(locations->InAt(1)); // Object pointer.
Register offset = XRegisterFrom(locations->InAt(2)); // Long offset.
Register value = RegisterFrom(locations->InAt(3), type);
+ Register source = value;
bool use_acquire_release = codegen->GetInstructionSetFeatures().PreferAcquireRelease();
MemOperand mem_op(base.X(), offset);
- if (is_volatile || is_ordered) {
- if (use_acquire_release) {
- codegen->StoreRelease(type, value, mem_op);
- } else {
- __ Dmb(InnerShareable, BarrierAll);
- codegen->Store(type, value, mem_op);
- if (is_volatile) {
- __ Dmb(InnerShareable, BarrierReads);
- }
+ {
+ // We use a block to end the scratch scope before the write barrier, thus
+ // freeing the temporary registers so they can be used in `MarkGCCard`.
+ UseScratchRegisterScope temps(masm);
+
+ if (kPoisonHeapReferences && type == Primitive::kPrimNot) {
+ DCHECK(value.IsW());
+ Register temp = temps.AcquireW();
+ __ Mov(temp.W(), value.W());
+ codegen->GetAssembler()->PoisonHeapReference(temp.W());
+ source = temp;
}
- } else {
- codegen->Store(type, value, mem_op);
+
+ if (is_volatile || is_ordered) {
+ if (use_acquire_release) {
+ codegen->StoreRelease(type, source, mem_op);
+ } else {
+ __ Dmb(InnerShareable, BarrierAll);
+ codegen->Store(type, source, mem_op);
+ if (is_volatile) {
+ __ Dmb(InnerShareable, BarrierReads);
+ }
+ }
+ } else {
+ codegen->Store(type, source, mem_op);
+ }
}
if (type == Primitive::kPrimNot) {
@@ -872,6 +892,11 @@
__ Add(tmp_ptr, base.X(), Operand(offset));
+ if (kPoisonHeapReferences && type == Primitive::kPrimNot) {
+ codegen->GetAssembler()->PoisonHeapReference(expected);
+ codegen->GetAssembler()->PoisonHeapReference(value);
+ }
+
// do {
// tmp_value = [tmp_ptr] - expected;
// } while (tmp_value == 0 && failure([tmp_ptr] <- r_new_value));
@@ -897,6 +922,11 @@
}
__ Bind(&exit_loop);
__ Cset(out, eq);
+
+ if (kPoisonHeapReferences && type == Primitive::kPrimNot) {
+ codegen->GetAssembler()->UnpoisonHeapReference(value);
+ codegen->GetAssembler()->UnpoisonHeapReference(expected);
+ }
}
void IntrinsicLocationsBuilderARM64::VisitUnsafeCASInt(HInvoke* invoke) {
@@ -1173,5 +1203,9 @@
UNIMPLEMENTED_INTRINSIC(ReferenceGetReferent)
UNIMPLEMENTED_INTRINSIC(StringGetCharsNoCheck)
+#undef UNIMPLEMENTED_INTRINSIC
+
+#undef __
+
} // namespace arm64
} // namespace art
diff --git a/compiler/optimizing/intrinsics_x86.cc b/compiler/optimizing/intrinsics_x86.cc
index b04cc5c..0d6ca09 100644
--- a/compiler/optimizing/intrinsics_x86.cc
+++ b/compiler/optimizing/intrinsics_x86.cc
@@ -1335,9 +1335,14 @@
switch (type) {
case Primitive::kPrimInt:
- case Primitive::kPrimNot:
- __ movl(output.AsRegister<Register>(), Address(base, offset, ScaleFactor::TIMES_1, 0));
+ case Primitive::kPrimNot: {
+ Register output_reg = output.AsRegister<Register>();
+ __ movl(output_reg, Address(base, offset, ScaleFactor::TIMES_1, 0));
+ if (type == Primitive::kPrimNot) {
+ __ MaybeUnpoisonHeapReference(output_reg);
+ }
break;
+ }
case Primitive::kPrimLong: {
Register output_lo = output.AsRegisterPairLow<Register>();
@@ -1436,7 +1441,7 @@
locations->SetInAt(3, Location::RequiresRegister());
if (type == Primitive::kPrimNot) {
// Need temp registers for card-marking.
- locations->AddTemp(Location::RequiresRegister());
+ locations->AddTemp(Location::RequiresRegister()); // Possibly used for reference poisoning too.
// Ensure the value is in a byte register.
locations->AddTemp(Location::RegisterLocation(ECX));
} else if (type == Primitive::kPrimLong && is_volatile) {
@@ -1498,6 +1503,11 @@
__ movl(Address(base, offset, ScaleFactor::TIMES_1, 0), value_lo);
__ movl(Address(base, offset, ScaleFactor::TIMES_1, 4), value_hi);
}
+ } else if (kPoisonHeapReferences && type == Primitive::kPrimNot) {
+ Register temp = locations->GetTemp(0).AsRegister<Register>();
+ __ movl(temp, value_loc.AsRegister<Register>());
+ __ PoisonHeapReference(temp);
+ __ movl(Address(base, offset, ScaleFactor::TIMES_1, 0), temp);
} else {
__ movl(Address(base, offset, ScaleFactor::TIMES_1, 0), value_loc.AsRegister<Register>());
}
@@ -1604,7 +1614,8 @@
__ LockCmpxchg8b(Address(base, offset, TIMES_1, 0));
} else {
// Integer or object.
- DCHECK_EQ(locations->InAt(3).AsRegister<Register>(), EAX);
+ Register expected = locations->InAt(3).AsRegister<Register>();
+ DCHECK_EQ(expected, EAX);
Register value = locations->InAt(4).AsRegister<Register>();
if (type == Primitive::kPrimNot) {
// Mark card for object assuming new value is stored.
@@ -1614,6 +1625,11 @@
base,
value,
value_can_be_null);
+
+ if (kPoisonHeapReferences) {
+ __ PoisonHeapReference(expected);
+ __ PoisonHeapReference(value);
+ }
}
__ LockCmpxchgl(Address(base, offset, TIMES_1, 0), value);
@@ -1625,6 +1641,13 @@
// Convert ZF into the boolean result.
__ setb(kZero, out.AsRegister<Register>());
__ movzxb(out.AsRegister<Register>(), out.AsRegister<ByteRegister>());
+
+ if (kPoisonHeapReferences && type == Primitive::kPrimNot) {
+ Register value = locations->InAt(4).AsRegister<Register>();
+ __ UnpoisonHeapReference(value);
+ // Do not unpoison the reference contained in register `expected`,
+ // as it is the same as register `out`.
+ }
}
void IntrinsicCodeGeneratorX86::VisitUnsafeCASInt(HInvoke* invoke) {
@@ -1734,5 +1757,9 @@
UNIMPLEMENTED_INTRINSIC(SystemArrayCopyChar)
UNIMPLEMENTED_INTRINSIC(ReferenceGetReferent)
+#undef UNIMPLEMENTED_INTRINSIC
+
+#undef __
+
} // namespace x86
} // namespace art
diff --git a/compiler/optimizing/intrinsics_x86_64.cc b/compiler/optimizing/intrinsics_x86_64.cc
index 888c7b8..ea342e9 100644
--- a/compiler/optimizing/intrinsics_x86_64.cc
+++ b/compiler/optimizing/intrinsics_x86_64.cc
@@ -1251,6 +1251,9 @@
case Primitive::kPrimInt:
case Primitive::kPrimNot:
__ movl(trg, Address(base, offset, ScaleFactor::TIMES_1, 0));
+ if (type == Primitive::kPrimNot) {
+ __ MaybeUnpoisonHeapReference(trg);
+ }
break;
case Primitive::kPrimLong:
@@ -1325,7 +1328,7 @@
locations->SetInAt(3, Location::RequiresRegister());
if (type == Primitive::kPrimNot) {
// Need temp registers for card-marking.
- locations->AddTemp(Location::RequiresRegister());
+ locations->AddTemp(Location::RequiresRegister()); // Possibly used for reference poisoning too.
locations->AddTemp(Location::RequiresRegister());
}
}
@@ -1369,6 +1372,11 @@
if (type == Primitive::kPrimLong) {
__ movq(Address(base, offset, ScaleFactor::TIMES_1, 0), value);
+ } else if (kPoisonHeapReferences && type == Primitive::kPrimNot) {
+ CpuRegister temp = locations->GetTemp(0).AsRegister<CpuRegister>();
+ __ movl(temp, value);
+ __ PoisonHeapReference(temp);
+ __ movl(Address(base, offset, ScaleFactor::TIMES_1, 0), temp);
} else {
__ movl(Address(base, offset, ScaleFactor::TIMES_1, 0), value);
}
@@ -1471,6 +1479,11 @@
base,
value,
value_can_be_null);
+
+ if (kPoisonHeapReferences) {
+ __ PoisonHeapReference(expected);
+ __ PoisonHeapReference(value);
+ }
}
__ LockCmpxchgl(Address(base, offset, TIMES_1, 0), value);
@@ -1482,6 +1495,11 @@
// Convert ZF into the boolean result.
__ setcc(kZero, out);
__ movzxb(out, out);
+
+ if (kPoisonHeapReferences && type == Primitive::kPrimNot) {
+ __ UnpoisonHeapReference(value);
+ __ UnpoisonHeapReference(expected);
+ }
}
void IntrinsicCodeGeneratorX86_64::VisitUnsafeCASInt(HInvoke* invoke) {
@@ -1598,5 +1616,9 @@
UNIMPLEMENTED_INTRINSIC(SystemArrayCopyChar)
UNIMPLEMENTED_INTRINSIC(ReferenceGetReferent)
+#undef UNIMPLEMENTED_INTRINSIC
+
+#undef __
+
} // namespace x86_64
} // namespace art
diff --git a/compiler/optimizing/locations.cc b/compiler/optimizing/locations.cc
index 42aba04..d14dfc1 100644
--- a/compiler/optimizing/locations.cc
+++ b/compiler/optimizing/locations.cc
@@ -51,16 +51,17 @@
}
Location Location::RegisterOrInt32LongConstant(HInstruction* instruction) {
- if (!instruction->IsConstant() || !instruction->AsConstant()->IsLongConstant()) {
+ if (instruction->IsIntConstant() || instruction->IsNullConstant()) {
+ return Location::ConstantLocation(instruction->AsConstant());
+ } else if (instruction->IsLongConstant()) {
+ // Does the long constant fit in a 32 bit int?
+ int64_t value = instruction->AsLongConstant()->GetValue();
+ return IsInt<32>(value)
+ ? Location::ConstantLocation(instruction->AsConstant())
+ : Location::RequiresRegister();
+ } else {
return Location::RequiresRegister();
}
-
- // Does the long constant fit in a 32 bit int?
- int64_t value = instruction->AsConstant()->AsLongConstant()->GetValue();
-
- return IsInt<32>(value)
- ? Location::ConstantLocation(instruction->AsConstant())
- : Location::RequiresRegister();
}
Location Location::ByteRegisterOrConstant(int reg, HInstruction* instruction) {
diff --git a/compiler/optimizing/nodes.cc b/compiler/optimizing/nodes.cc
index 881f9ec..b82e37c 100644
--- a/compiler/optimizing/nodes.cc
+++ b/compiler/optimizing/nodes.cc
@@ -1050,6 +1050,7 @@
successors_.Reset();
AddSuccessor(new_block);
+ GetGraph()->AddBlock(new_block);
return new_block;
}
diff --git a/compiler/optimizing/nodes.h b/compiler/optimizing/nodes.h
index 95ea966..04c3963 100644
--- a/compiler/optimizing/nodes.h
+++ b/compiler/optimizing/nodes.h
@@ -729,9 +729,10 @@
bool IsExceptionalSuccessor(size_t idx) const;
// Split the block into two blocks just before `cursor`. Returns the newly
- // created, latter block. Note that this method will create a Goto at the end
- // of the former block and will create an edge between them. It will not,
- // however, update the graph, reverse post order or loop information.
+ // created, latter block. Note that this method will add the block to the
+ // graph, create a Goto at the end of the former block and will create an edge
+ // between the blocks. It will not, however, update the reverse post order or
+ // loop information.
HBasicBlock* SplitBefore(HInstruction* cursor);
// Split the block into two blocks just after `cursor`. Returns the newly
@@ -1946,8 +1947,13 @@
// higher indices in no particular order.
class HTryBoundary : public HTemplateInstruction<0> {
public:
- HTryBoundary(bool is_entry, bool is_exit)
- : HTemplateInstruction(SideEffects::None()), is_entry_(is_entry), is_exit_(is_exit) {}
+ enum BoundaryKind {
+ kEntry,
+ kExit,
+ };
+
+ explicit HTryBoundary(BoundaryKind kind)
+ : HTemplateInstruction(SideEffects::None()), kind_(kind) {}
bool IsControlFlow() const OVERRIDE { return true; }
@@ -1977,21 +1983,12 @@
}
}
- bool IsTryEntry() const { return is_entry_; }
- bool IsTryExit() const { return is_exit_; }
+ bool IsEntry() const { return kind_ == BoundaryKind::kEntry; }
DECLARE_INSTRUCTION(TryBoundary);
private:
- // Only for debugging purposes.
- bool is_entry_;
- bool is_exit_;
-
- // Only set by HGraphBuilder.
- void SetIsTryEntry() { is_entry_ = true; }
- void SetIsTryExit() { is_exit_ = true; }
-
- friend HGraphBuilder;
+ const BoundaryKind kind_;
DISALLOW_COPY_AND_ASSIGN(HTryBoundary);
};
diff --git a/compiler/utils/arm/assembler_arm.cc b/compiler/utils/arm/assembler_arm.cc
index 0086fe8..09d2270 100644
--- a/compiler/utils/arm/assembler_arm.cc
+++ b/compiler/utils/arm/assembler_arm.cc
@@ -529,13 +529,13 @@
}
void ArmAssembler::LoadRef(ManagedRegister mdest, ManagedRegister base, MemberOffset offs,
- bool poison_reference) {
+ bool unpoison_reference) {
ArmManagedRegister dst = mdest.AsArm();
CHECK(dst.IsCoreRegister() && dst.IsCoreRegister()) << dst;
LoadFromOffset(kLoadWord, dst.AsCoreRegister(),
base.AsArm().AsCoreRegister(), offs.Int32Value());
- if (kPoisonHeapReferences && poison_reference) {
- rsb(dst.AsCoreRegister(), dst.AsCoreRegister(), ShifterOperand(0));
+ if (unpoison_reference) {
+ MaybeUnpoisonHeapReference(dst.AsCoreRegister());
}
}
diff --git a/compiler/utils/arm/assembler_arm.h b/compiler/utils/arm/assembler_arm.h
index f8ca48e..5d85d11 100644
--- a/compiler/utils/arm/assembler_arm.h
+++ b/compiler/utils/arm/assembler_arm.h
@@ -774,7 +774,7 @@
void LoadRef(ManagedRegister dest, FrameOffset src) OVERRIDE;
void LoadRef(ManagedRegister dest, ManagedRegister base, MemberOffset offs,
- bool poison_reference) OVERRIDE;
+ bool unpoison_reference) OVERRIDE;
void LoadRawPtr(ManagedRegister dest, ManagedRegister base, Offset offs) OVERRIDE;
@@ -857,6 +857,27 @@
return r >= R8;
}
+ //
+ // Heap poisoning.
+ //
+
+ // Poison a heap reference contained in `reg`.
+ void PoisonHeapReference(Register reg) {
+ // reg = -reg.
+ rsb(reg, reg, ShifterOperand(0));
+ }
+ // Unpoison a heap reference contained in `reg`.
+ void UnpoisonHeapReference(Register reg) {
+ // reg = -reg.
+ rsb(reg, reg, ShifterOperand(0));
+ }
+ // Unpoison a heap reference contained in `reg` if heap poisoning is enabled.
+ void MaybeUnpoisonHeapReference(Register reg) {
+ if (kPoisonHeapReferences) {
+ UnpoisonHeapReference(reg);
+ }
+ }
+
protected:
// Returns whether or not the given register is used for passing parameters.
static int RegisterCompare(const Register* reg1, const Register* reg2) {
diff --git a/compiler/utils/arm/assembler_thumb2.cc b/compiler/utils/arm/assembler_thumb2.cc
index f9e1ac6..2dde014 100644
--- a/compiler/utils/arm/assembler_thumb2.cc
+++ b/compiler/utils/arm/assembler_thumb2.cc
@@ -3341,7 +3341,7 @@
void Thumb2Assembler::CompareAndBranchIfZero(Register r, Label* label) {
- if (CanRelocateBranches() && IsLowRegister(r)) {
+ if (CanRelocateBranches() && IsLowRegister(r) && !label->IsBound()) {
cbz(r, label);
} else {
cmp(r, ShifterOperand(0));
@@ -3351,7 +3351,7 @@
void Thumb2Assembler::CompareAndBranchIfNonZero(Register r, Label* label) {
- if (CanRelocateBranches() && IsLowRegister(r)) {
+ if (CanRelocateBranches() && IsLowRegister(r) && !label->IsBound()) {
cbnz(r, label);
} else {
cmp(r, ShifterOperand(0));
diff --git a/compiler/utils/arm64/assembler_arm64.cc b/compiler/utils/arm64/assembler_arm64.cc
index 077579c..0e17512 100644
--- a/compiler/utils/arm64/assembler_arm64.cc
+++ b/compiler/utils/arm64/assembler_arm64.cc
@@ -298,15 +298,15 @@
}
void Arm64Assembler::LoadRef(ManagedRegister m_dst, ManagedRegister m_base, MemberOffset offs,
- bool poison_reference) {
+ bool unpoison_reference) {
Arm64ManagedRegister dst = m_dst.AsArm64();
Arm64ManagedRegister base = m_base.AsArm64();
CHECK(dst.IsXRegister() && base.IsXRegister());
LoadWFromOffset(kLoadWord, dst.AsOverlappingWRegister(), base.AsXRegister(),
offs.Int32Value());
- if (kPoisonHeapReferences && poison_reference) {
+ if (unpoison_reference) {
WRegister ref_reg = dst.AsOverlappingWRegister();
- ___ Neg(reg_w(ref_reg), vixl::Operand(reg_w(ref_reg)));
+ MaybeUnpoisonHeapReference(reg_w(ref_reg));
}
}
@@ -784,5 +784,25 @@
cfi_.DefCFAOffset(frame_size);
}
+void Arm64Assembler::PoisonHeapReference(vixl::Register reg) {
+ DCHECK(reg.IsW());
+ // reg = -reg.
+ ___ Neg(reg, vixl::Operand(reg));
+}
+
+void Arm64Assembler::UnpoisonHeapReference(vixl::Register reg) {
+ DCHECK(reg.IsW());
+ // reg = -reg.
+ ___ Neg(reg, vixl::Operand(reg));
+}
+
+void Arm64Assembler::MaybeUnpoisonHeapReference(vixl::Register reg) {
+ if (kPoisonHeapReferences) {
+ UnpoisonHeapReference(reg);
+ }
+}
+
+#undef ___
+
} // namespace arm64
} // namespace art
diff --git a/compiler/utils/arm64/assembler_arm64.h b/compiler/utils/arm64/assembler_arm64.h
index db95537..05882a3 100644
--- a/compiler/utils/arm64/assembler_arm64.h
+++ b/compiler/utils/arm64/assembler_arm64.h
@@ -10,7 +10,7 @@
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-* See the License for the specific language governing permissions and
+ * See the License for the specific language governing permissions and
* limitations under the License.
*/
@@ -116,7 +116,7 @@
void LoadFromThread64(ManagedRegister dest, ThreadOffset<8> src, size_t size) OVERRIDE;
void LoadRef(ManagedRegister dest, FrameOffset src) OVERRIDE;
void LoadRef(ManagedRegister dest, ManagedRegister base, MemberOffset offs,
- bool poison_reference) OVERRIDE;
+ bool unpoison_reference) OVERRIDE;
void LoadRawPtr(ManagedRegister dest, ManagedRegister base, Offset offs) OVERRIDE;
void LoadRawPtrFromThread64(ManagedRegister dest, ThreadOffset<8> offs) OVERRIDE;
@@ -182,6 +182,17 @@
// and branch to a ExceptionSlowPath if it is.
void ExceptionPoll(ManagedRegister scratch, size_t stack_adjust) OVERRIDE;
+ //
+ // Heap poisoning.
+ //
+
+ // Poison a heap reference contained in `reg`.
+ void PoisonHeapReference(vixl::Register reg);
+ // Unpoison a heap reference contained in `reg`.
+ void UnpoisonHeapReference(vixl::Register reg);
+ // Unpoison a heap reference contained in `reg` if heap poisoning is enabled.
+ void MaybeUnpoisonHeapReference(vixl::Register reg);
+
private:
static vixl::Register reg_x(int code) {
CHECK(code < kNumberOfXRegisters) << code;
diff --git a/compiler/utils/assembler.h b/compiler/utils/assembler.h
index ee2d594..3097cd5 100644
--- a/compiler/utils/assembler.h
+++ b/compiler/utils/assembler.h
@@ -441,9 +441,9 @@
virtual void LoadFromThread64(ManagedRegister dest, ThreadOffset<8> src, size_t size);
virtual void LoadRef(ManagedRegister dest, FrameOffset src) = 0;
- // If poison_reference is true and kPoisonReference is true, then we negate the read reference.
+ // If unpoison_reference is true and kPoisonReference is true, then we negate the read reference.
virtual void LoadRef(ManagedRegister dest, ManagedRegister base, MemberOffset offs,
- bool poison_reference) = 0;
+ bool unpoison_reference) = 0;
virtual void LoadRawPtr(ManagedRegister dest, ManagedRegister base, Offset offs) = 0;
diff --git a/compiler/utils/mips/assembler_mips.cc b/compiler/utils/mips/assembler_mips.cc
index e55b461..c09dfcc 100644
--- a/compiler/utils/mips/assembler_mips.cc
+++ b/compiler/utils/mips/assembler_mips.cc
@@ -697,12 +697,12 @@
}
void MipsAssembler::LoadRef(ManagedRegister mdest, ManagedRegister base, MemberOffset offs,
- bool poison_reference) {
+ bool unpoison_reference) {
MipsManagedRegister dest = mdest.AsMips();
CHECK(dest.IsCoreRegister() && dest.IsCoreRegister());
LoadFromOffset(kLoadWord, dest.AsCoreRegister(),
base.AsMips().AsCoreRegister(), offs.Int32Value());
- if (kPoisonHeapReferences && poison_reference) {
+ if (kPoisonHeapReferences && unpoison_reference) {
Subu(dest.AsCoreRegister(), ZERO, dest.AsCoreRegister());
}
}
diff --git a/compiler/utils/mips/assembler_mips.h b/compiler/utils/mips/assembler_mips.h
index 7b0fc39..0d1b82c 100644
--- a/compiler/utils/mips/assembler_mips.h
+++ b/compiler/utils/mips/assembler_mips.h
@@ -192,7 +192,7 @@
void LoadRef(ManagedRegister dest, FrameOffset src) OVERRIDE;
void LoadRef(ManagedRegister mdest, ManagedRegister base, MemberOffset offs,
- bool poison_reference) OVERRIDE;
+ bool unpoison_reference) OVERRIDE;
void LoadRawPtr(ManagedRegister mdest, ManagedRegister base, Offset offs) OVERRIDE;
diff --git a/compiler/utils/mips64/assembler_mips64.cc b/compiler/utils/mips64/assembler_mips64.cc
index 3333cd2..24ea9e2 100644
--- a/compiler/utils/mips64/assembler_mips64.cc
+++ b/compiler/utils/mips64/assembler_mips64.cc
@@ -1242,12 +1242,12 @@
}
void Mips64Assembler::LoadRef(ManagedRegister mdest, ManagedRegister base, MemberOffset offs,
- bool poison_reference) {
+ bool unpoison_reference) {
Mips64ManagedRegister dest = mdest.AsMips64();
CHECK(dest.IsGpuRegister() && base.AsMips64().IsGpuRegister());
LoadFromOffset(kLoadUnsignedWord, dest.AsGpuRegister(),
base.AsMips64().AsGpuRegister(), offs.Int32Value());
- if (kPoisonHeapReferences && poison_reference) {
+ if (kPoisonHeapReferences && unpoison_reference) {
// TODO: review
// Negate the 32-bit ref
Dsubu(dest.AsGpuRegister(), ZERO, dest.AsGpuRegister());
diff --git a/compiler/utils/mips64/assembler_mips64.h b/compiler/utils/mips64/assembler_mips64.h
index 88cc4bc..47b146a 100644
--- a/compiler/utils/mips64/assembler_mips64.h
+++ b/compiler/utils/mips64/assembler_mips64.h
@@ -265,7 +265,7 @@
void LoadRef(ManagedRegister dest, FrameOffset src) OVERRIDE;
void LoadRef(ManagedRegister mdest, ManagedRegister base, MemberOffset offs,
- bool poison_reference) OVERRIDE;
+ bool unpoison_reference) OVERRIDE;
void LoadRawPtr(ManagedRegister mdest, ManagedRegister base, Offset offs) OVERRIDE;
diff --git a/compiler/utils/x86/assembler_x86.cc b/compiler/utils/x86/assembler_x86.cc
index 390d46e..fa85ada 100644
--- a/compiler/utils/x86/assembler_x86.cc
+++ b/compiler/utils/x86/assembler_x86.cc
@@ -1910,12 +1910,12 @@
}
void X86Assembler::LoadRef(ManagedRegister mdest, ManagedRegister base, MemberOffset offs,
- bool poison_reference) {
+ bool unpoison_reference) {
X86ManagedRegister dest = mdest.AsX86();
CHECK(dest.IsCpuRegister() && dest.IsCpuRegister());
movl(dest.AsCpuRegister(), Address(base.AsX86().AsCpuRegister(), offs));
- if (kPoisonHeapReferences && poison_reference) {
- negl(dest.AsCpuRegister());
+ if (unpoison_reference) {
+ MaybeUnpoisonHeapReference(dest.AsCpuRegister());
}
}
diff --git a/compiler/utils/x86/assembler_x86.h b/compiler/utils/x86/assembler_x86.h
index 1c1c023..d1b4e1d 100644
--- a/compiler/utils/x86/assembler_x86.h
+++ b/compiler/utils/x86/assembler_x86.h
@@ -541,7 +541,7 @@
void LoadRef(ManagedRegister dest, FrameOffset src) OVERRIDE;
void LoadRef(ManagedRegister dest, ManagedRegister base, MemberOffset offs,
- bool poison_reference) OVERRIDE;
+ bool unpoison_reference) OVERRIDE;
void LoadRawPtr(ManagedRegister dest, ManagedRegister base, Offset offs) OVERRIDE;
@@ -616,6 +616,21 @@
// and branch to a ExceptionSlowPath if it is.
void ExceptionPoll(ManagedRegister scratch, size_t stack_adjust) OVERRIDE;
+ //
+ // Heap poisoning.
+ //
+
+ // Poison a heap reference contained in `reg`.
+ void PoisonHeapReference(Register reg) { negl(reg); }
+ // Unpoison a heap reference contained in `reg`.
+ void UnpoisonHeapReference(Register reg) { negl(reg); }
+ // Unpoison a heap reference contained in `reg` if heap poisoning is enabled.
+ void MaybeUnpoisonHeapReference(Register reg) {
+ if (kPoisonHeapReferences) {
+ UnpoisonHeapReference(reg);
+ }
+ }
+
private:
inline void EmitUint8(uint8_t value);
inline void EmitInt32(int32_t value);
diff --git a/compiler/utils/x86_64/assembler_x86_64.cc b/compiler/utils/x86_64/assembler_x86_64.cc
index ac95c71..f35f51c 100644
--- a/compiler/utils/x86_64/assembler_x86_64.cc
+++ b/compiler/utils/x86_64/assembler_x86_64.cc
@@ -2597,12 +2597,12 @@
}
void X86_64Assembler::LoadRef(ManagedRegister mdest, ManagedRegister base, MemberOffset offs,
- bool poison_reference) {
+ bool unpoison_reference) {
X86_64ManagedRegister dest = mdest.AsX86_64();
CHECK(dest.IsCpuRegister() && dest.IsCpuRegister());
movl(dest.AsCpuRegister(), Address(base.AsX86_64().AsCpuRegister(), offs));
- if (kPoisonHeapReferences && poison_reference) {
- negl(dest.AsCpuRegister());
+ if (unpoison_reference) {
+ MaybeUnpoisonHeapReference(dest.AsCpuRegister());
}
}
diff --git a/compiler/utils/x86_64/assembler_x86_64.h b/compiler/utils/x86_64/assembler_x86_64.h
index 6b2b65d..61ffeab 100644
--- a/compiler/utils/x86_64/assembler_x86_64.h
+++ b/compiler/utils/x86_64/assembler_x86_64.h
@@ -669,7 +669,7 @@
void LoadRef(ManagedRegister dest, FrameOffset src) OVERRIDE;
void LoadRef(ManagedRegister dest, ManagedRegister base, MemberOffset offs,
- bool poison_reference) OVERRIDE;
+ bool unpoison_reference) OVERRIDE;
void LoadRawPtr(ManagedRegister dest, ManagedRegister base, Offset offs) OVERRIDE;
@@ -767,6 +767,21 @@
// Is the constant area empty? Return true if there are no literals in the constant area.
bool IsConstantAreaEmpty() const { return constant_area_.GetSize() == 0; }
+ //
+ // Heap poisoning.
+ //
+
+ // Poison a heap reference contained in `reg`.
+ void PoisonHeapReference(CpuRegister reg) { negl(reg); }
+ // Unpoison a heap reference contained in `reg`.
+ void UnpoisonHeapReference(CpuRegister reg) { negl(reg); }
+ // Unpoison a heap reference contained in `reg` if heap poisoning is enabled.
+ void MaybeUnpoisonHeapReference(CpuRegister reg) {
+ if (kPoisonHeapReferences) {
+ UnpoisonHeapReference(reg);
+ }
+ }
+
private:
void EmitUint8(uint8_t value);
void EmitInt32(int32_t value);
diff --git a/runtime/common_runtime_test.h b/runtime/common_runtime_test.h
index 0987c00..2332f97 100644
--- a/runtime/common_runtime_test.h
+++ b/runtime/common_runtime_test.h
@@ -174,13 +174,6 @@
DISALLOW_COPY_AND_ASSIGN(CheckJniAbortCatcher);
};
-// TODO: When heap reference poisoning works with the compiler, get rid of this.
-#define TEST_DISABLED_FOR_HEAP_REFERENCE_POISONING() \
- if (kPoisonHeapReferences) { \
- printf("WARNING: TEST DISABLED FOR HEAP REFERENCE POISONING\n"); \
- return; \
- }
-
// TODO: When read barrier works with the compiler, get rid of this.
#define TEST_DISABLED_FOR_READ_BARRIER() \
if (kUseReadBarrier) { \
diff --git a/runtime/dex_instruction.cc b/runtime/dex_instruction.cc
index 537fa15..fc4df14 100644
--- a/runtime/dex_instruction.cc
+++ b/runtime/dex_instruction.cc
@@ -43,6 +43,14 @@
#undef INSTRUCTION_FORMAT
};
+Instruction::IndexType const Instruction::kInstructionIndexTypes[] = {
+#define INSTRUCTION_INDEX_TYPE(o, c, p, f, r, index, a, v) index,
+#include "dex_instruction_list.h"
+ DEX_INSTRUCTION_LIST(INSTRUCTION_INDEX_TYPE)
+#undef DEX_INSTRUCTION_LIST
+#undef INSTRUCTION_FLAGS
+};
+
int const Instruction::kInstructionFlags[] = {
#define INSTRUCTION_FLAGS(o, c, p, f, r, i, flags, v) flags,
#include "dex_instruction_list.h"
diff --git a/runtime/dex_instruction.h b/runtime/dex_instruction.h
index b043aba..0ddbf7c 100644
--- a/runtime/dex_instruction.h
+++ b/runtime/dex_instruction.h
@@ -116,6 +116,17 @@
k51l, // op vAA, #+BBBBBBBBBBBBBBBB
};
+ enum IndexType {
+ kIndexUnknown = 0,
+ kIndexNone, // has no index
+ kIndexTypeRef, // type reference index
+ kIndexStringRef, // string reference index
+ kIndexMethodRef, // method reference index
+ kIndexFieldRef, // field reference index
+ kIndexFieldOffset, // field offset (for static linked fields)
+ kIndexVtableOffset // vtable offset (for static linked methods)
+ };
+
enum Flags {
kBranch = 0x0000001, // conditional or unconditional branch
kContinue = 0x0000002, // flow can continue to next statement
@@ -446,6 +457,11 @@
return kInstructionFormats[opcode];
}
+ // Returns the index type of the given opcode.
+ static IndexType IndexTypeOf(Code opcode) {
+ return kInstructionIndexTypes[opcode];
+ }
+
// Returns the flags for the given opcode.
static int FlagsOf(Code opcode) {
return kInstructionFlags[opcode];
@@ -583,6 +599,7 @@
static const char* const kInstructionNames[];
static Format const kInstructionFormats[];
+ static IndexType const kInstructionIndexTypes[];
static int const kInstructionFlags[];
static int const kInstructionVerifyFlags[];
static int const kInstructionSizeInCodeUnits[];
diff --git a/runtime/dex_instruction_list.h b/runtime/dex_instruction_list.h
index 803d58d..a176772 100644
--- a/runtime/dex_instruction_list.h
+++ b/runtime/dex_instruction_list.h
@@ -18,263 +18,263 @@
#define ART_RUNTIME_DEX_INSTRUCTION_LIST_H_
#define DEX_INSTRUCTION_LIST(V) \
- V(0x00, NOP, "nop", k10x, false, kNone, kContinue, kVerifyNone) \
- V(0x01, MOVE, "move", k12x, true, kNone, kContinue, kVerifyRegA | kVerifyRegB) \
- V(0x02, MOVE_FROM16, "move/from16", k22x, true, kNone, kContinue, kVerifyRegA | kVerifyRegB) \
- V(0x03, MOVE_16, "move/16", k32x, true, kNone, kContinue, kVerifyRegA | kVerifyRegB) \
- V(0x04, MOVE_WIDE, "move-wide", k12x, true, kNone, kContinue, kVerifyRegAWide | kVerifyRegBWide) \
- V(0x05, MOVE_WIDE_FROM16, "move-wide/from16", k22x, true, kNone, kContinue, kVerifyRegAWide | kVerifyRegBWide) \
- V(0x06, MOVE_WIDE_16, "move-wide/16", k32x, true, kNone, kContinue, kVerifyRegAWide | kVerifyRegBWide) \
- V(0x07, MOVE_OBJECT, "move-object", k12x, true, kNone, kContinue, kVerifyRegA | kVerifyRegB) \
- V(0x08, MOVE_OBJECT_FROM16, "move-object/from16", k22x, true, kNone, kContinue, kVerifyRegA | kVerifyRegB) \
- V(0x09, MOVE_OBJECT_16, "move-object/16", k32x, true, kNone, kContinue, kVerifyRegA | kVerifyRegB) \
- V(0x0A, MOVE_RESULT, "move-result", k11x, true, kNone, kContinue, kVerifyRegA) \
- V(0x0B, MOVE_RESULT_WIDE, "move-result-wide", k11x, true, kNone, kContinue, kVerifyRegAWide) \
- V(0x0C, MOVE_RESULT_OBJECT, "move-result-object", k11x, true, kNone, kContinue, kVerifyRegA) \
- V(0x0D, MOVE_EXCEPTION, "move-exception", k11x, true, kNone, kContinue, kVerifyRegA) \
- V(0x0E, RETURN_VOID, "return-void", k10x, false, kNone, kReturn, kVerifyNone) \
- V(0x0F, RETURN, "return", k11x, false, kNone, kReturn, kVerifyRegA) \
- V(0x10, RETURN_WIDE, "return-wide", k11x, false, kNone, kReturn, kVerifyRegAWide) \
- V(0x11, RETURN_OBJECT, "return-object", k11x, false, kNone, kReturn, kVerifyRegA) \
- V(0x12, CONST_4, "const/4", k11n, true, kNone, kContinue | kRegBFieldOrConstant, kVerifyRegA) \
- V(0x13, CONST_16, "const/16", k21s, true, kNone, kContinue | kRegBFieldOrConstant, kVerifyRegA) \
- V(0x14, CONST, "const", k31i, true, kNone, kContinue | kRegBFieldOrConstant, kVerifyRegA) \
- V(0x15, CONST_HIGH16, "const/high16", k21h, true, kNone, kContinue | kRegBFieldOrConstant, kVerifyRegA) \
- V(0x16, CONST_WIDE_16, "const-wide/16", k21s, true, kNone, kContinue | kRegBFieldOrConstant, kVerifyRegAWide) \
- V(0x17, CONST_WIDE_32, "const-wide/32", k31i, true, kNone, kContinue | kRegBFieldOrConstant, kVerifyRegAWide) \
- V(0x18, CONST_WIDE, "const-wide", k51l, true, kNone, kContinue | kRegBFieldOrConstant, kVerifyRegAWide) \
- V(0x19, CONST_WIDE_HIGH16, "const-wide/high16", k21h, true, kNone, kContinue | kRegBFieldOrConstant, kVerifyRegAWide) \
- V(0x1A, CONST_STRING, "const-string", k21c, true, kStringRef, kContinue | kThrow, kVerifyRegA | kVerifyRegBString) \
- V(0x1B, CONST_STRING_JUMBO, "const-string/jumbo", k31c, true, kStringRef, kContinue | kThrow, kVerifyRegA | kVerifyRegBString) \
- V(0x1C, CONST_CLASS, "const-class", k21c, true, kTypeRef, kContinue | kThrow, kVerifyRegA | kVerifyRegBType) \
- V(0x1D, MONITOR_ENTER, "monitor-enter", k11x, false, kNone, kContinue | kThrow | kClobber, kVerifyRegA) \
- V(0x1E, MONITOR_EXIT, "monitor-exit", k11x, false, kNone, kContinue | kThrow | kClobber, kVerifyRegA) \
- V(0x1F, CHECK_CAST, "check-cast", k21c, true, kTypeRef, kContinue | kThrow, kVerifyRegA | kVerifyRegBType) \
- V(0x20, INSTANCE_OF, "instance-of", k22c, true, kTypeRef, kContinue | kThrow, kVerifyRegA | kVerifyRegB | kVerifyRegCType) \
- V(0x21, ARRAY_LENGTH, "array-length", k12x, true, kNone, kContinue | kThrow, kVerifyRegA | kVerifyRegB) \
- V(0x22, NEW_INSTANCE, "new-instance", k21c, true, kTypeRef, kContinue | kThrow | kClobber, kVerifyRegA | kVerifyRegBNewInstance) \
- V(0x23, NEW_ARRAY, "new-array", k22c, true, kTypeRef, kContinue | kThrow | kClobber, kVerifyRegA | kVerifyRegB | kVerifyRegCNewArray) \
- V(0x24, FILLED_NEW_ARRAY, "filled-new-array", k35c, false, kTypeRef, kContinue | kThrow | kClobber, kVerifyRegBType | kVerifyVarArg) \
- V(0x25, FILLED_NEW_ARRAY_RANGE, "filled-new-array/range", k3rc, false, kTypeRef, kContinue | kThrow | kClobber, kVerifyRegBType | kVerifyVarArgRange) \
- V(0x26, FILL_ARRAY_DATA, "fill-array-data", k31t, false, kNone, kContinue | kThrow | kClobber, kVerifyRegA | kVerifyArrayData) \
- V(0x27, THROW, "throw", k11x, false, kNone, kThrow, kVerifyRegA) \
- V(0x28, GOTO, "goto", k10t, false, kNone, kBranch | kUnconditional, kVerifyBranchTarget) \
- V(0x29, GOTO_16, "goto/16", k20t, false, kNone, kBranch | kUnconditional, kVerifyBranchTarget) \
- V(0x2A, GOTO_32, "goto/32", k30t, false, kNone, kBranch | kUnconditional, kVerifyBranchTarget) \
- V(0x2B, PACKED_SWITCH, "packed-switch", k31t, false, kNone, kContinue | kSwitch, kVerifyRegA | kVerifySwitchTargets) \
- V(0x2C, SPARSE_SWITCH, "sparse-switch", k31t, false, kNone, kContinue | kSwitch, kVerifyRegA | kVerifySwitchTargets) \
- V(0x2D, CMPL_FLOAT, "cmpl-float", k23x, true, kNone, kContinue, kVerifyRegA | kVerifyRegB | kVerifyRegC) \
- V(0x2E, CMPG_FLOAT, "cmpg-float", k23x, true, kNone, kContinue, kVerifyRegA | kVerifyRegB | kVerifyRegC) \
- V(0x2F, CMPL_DOUBLE, "cmpl-double", k23x, true, kNone, kContinue, kVerifyRegA | kVerifyRegBWide | kVerifyRegCWide) \
- V(0x30, CMPG_DOUBLE, "cmpg-double", k23x, true, kNone, kContinue, kVerifyRegA | kVerifyRegBWide | kVerifyRegCWide) \
- V(0x31, CMP_LONG, "cmp-long", k23x, true, kNone, kContinue, kVerifyRegA | kVerifyRegBWide | kVerifyRegCWide) \
- V(0x32, IF_EQ, "if-eq", k22t, false, kNone, kContinue | kBranch, kVerifyRegA | kVerifyRegB | kVerifyBranchTarget) \
- V(0x33, IF_NE, "if-ne", k22t, false, kNone, kContinue | kBranch, kVerifyRegA | kVerifyRegB | kVerifyBranchTarget) \
- V(0x34, IF_LT, "if-lt", k22t, false, kNone, kContinue | kBranch, kVerifyRegA | kVerifyRegB | kVerifyBranchTarget) \
- V(0x35, IF_GE, "if-ge", k22t, false, kNone, kContinue | kBranch, kVerifyRegA | kVerifyRegB | kVerifyBranchTarget) \
- V(0x36, IF_GT, "if-gt", k22t, false, kNone, kContinue | kBranch, kVerifyRegA | kVerifyRegB | kVerifyBranchTarget) \
- V(0x37, IF_LE, "if-le", k22t, false, kNone, kContinue | kBranch, kVerifyRegA | kVerifyRegB | kVerifyBranchTarget) \
- V(0x38, IF_EQZ, "if-eqz", k21t, false, kNone, kContinue | kBranch, kVerifyRegA | kVerifyBranchTarget) \
- V(0x39, IF_NEZ, "if-nez", k21t, false, kNone, kContinue | kBranch, kVerifyRegA | kVerifyBranchTarget) \
- V(0x3A, IF_LTZ, "if-ltz", k21t, false, kNone, kContinue | kBranch, kVerifyRegA | kVerifyBranchTarget) \
- V(0x3B, IF_GEZ, "if-gez", k21t, false, kNone, kContinue | kBranch, kVerifyRegA | kVerifyBranchTarget) \
- V(0x3C, IF_GTZ, "if-gtz", k21t, false, kNone, kContinue | kBranch, kVerifyRegA | kVerifyBranchTarget) \
- V(0x3D, IF_LEZ, "if-lez", k21t, false, kNone, kContinue | kBranch, kVerifyRegA | kVerifyBranchTarget) \
- V(0x3E, UNUSED_3E, "unused-3e", k10x, false, kUnknown, 0, kVerifyError) \
- V(0x3F, UNUSED_3F, "unused-3f", k10x, false, kUnknown, 0, kVerifyError) \
- V(0x40, UNUSED_40, "unused-40", k10x, false, kUnknown, 0, kVerifyError) \
- V(0x41, UNUSED_41, "unused-41", k10x, false, kUnknown, 0, kVerifyError) \
- V(0x42, UNUSED_42, "unused-42", k10x, false, kUnknown, 0, kVerifyError) \
- V(0x43, UNUSED_43, "unused-43", k10x, false, kUnknown, 0, kVerifyError) \
- V(0x44, AGET, "aget", k23x, true, kNone, kContinue | kThrow | kLoad, kVerifyRegA | kVerifyRegB | kVerifyRegC) \
- V(0x45, AGET_WIDE, "aget-wide", k23x, true, kNone, kContinue | kThrow | kLoad, kVerifyRegAWide | kVerifyRegB | kVerifyRegC) \
- V(0x46, AGET_OBJECT, "aget-object", k23x, true, kNone, kContinue | kThrow | kLoad, kVerifyRegA | kVerifyRegB | kVerifyRegC) \
- V(0x47, AGET_BOOLEAN, "aget-boolean", k23x, true, kNone, kContinue | kThrow | kLoad, kVerifyRegA | kVerifyRegB | kVerifyRegC) \
- V(0x48, AGET_BYTE, "aget-byte", k23x, true, kNone, kContinue | kThrow | kLoad, kVerifyRegA | kVerifyRegB | kVerifyRegC) \
- V(0x49, AGET_CHAR, "aget-char", k23x, true, kNone, kContinue | kThrow | kLoad, kVerifyRegA | kVerifyRegB | kVerifyRegC) \
- V(0x4A, AGET_SHORT, "aget-short", k23x, true, kNone, kContinue | kThrow | kLoad, kVerifyRegA | kVerifyRegB | kVerifyRegC) \
- V(0x4B, APUT, "aput", k23x, false, kNone, kContinue | kThrow | kStore, kVerifyRegA | kVerifyRegB | kVerifyRegC) \
- V(0x4C, APUT_WIDE, "aput-wide", k23x, false, kNone, kContinue | kThrow | kStore, kVerifyRegAWide | kVerifyRegB | kVerifyRegC) \
- V(0x4D, APUT_OBJECT, "aput-object", k23x, false, kNone, kContinue | kThrow | kStore, kVerifyRegA | kVerifyRegB | kVerifyRegC) \
- V(0x4E, APUT_BOOLEAN, "aput-boolean", k23x, false, kNone, kContinue | kThrow | kStore, kVerifyRegA | kVerifyRegB | kVerifyRegC) \
- V(0x4F, APUT_BYTE, "aput-byte", k23x, false, kNone, kContinue | kThrow | kStore, kVerifyRegA | kVerifyRegB | kVerifyRegC) \
- V(0x50, APUT_CHAR, "aput-char", k23x, false, kNone, kContinue | kThrow | kStore, kVerifyRegA | kVerifyRegB | kVerifyRegC) \
- V(0x51, APUT_SHORT, "aput-short", k23x, false, kNone, kContinue | kThrow | kStore, kVerifyRegA | kVerifyRegB | kVerifyRegC) \
- V(0x52, IGET, "iget", k22c, true, kFieldRef, kContinue | kThrow | kLoad | kRegCFieldOrConstant, kVerifyRegA | kVerifyRegB | kVerifyRegCField) \
- V(0x53, IGET_WIDE, "iget-wide", k22c, true, kFieldRef, kContinue | kThrow | kLoad | kRegCFieldOrConstant, kVerifyRegAWide | kVerifyRegB | kVerifyRegCField) \
- V(0x54, IGET_OBJECT, "iget-object", k22c, true, kFieldRef, kContinue | kThrow | kLoad | kRegCFieldOrConstant, kVerifyRegA | kVerifyRegB | kVerifyRegCField) \
- V(0x55, IGET_BOOLEAN, "iget-boolean", k22c, true, kFieldRef, kContinue | kThrow | kLoad | kRegCFieldOrConstant, kVerifyRegA | kVerifyRegB | kVerifyRegCField) \
- V(0x56, IGET_BYTE, "iget-byte", k22c, true, kFieldRef, kContinue | kThrow | kLoad | kRegCFieldOrConstant, kVerifyRegA | kVerifyRegB | kVerifyRegCField) \
- V(0x57, IGET_CHAR, "iget-char", k22c, true, kFieldRef, kContinue | kThrow | kLoad | kRegCFieldOrConstant, kVerifyRegA | kVerifyRegB | kVerifyRegCField) \
- V(0x58, IGET_SHORT, "iget-short", k22c, true, kFieldRef, kContinue | kThrow | kLoad | kRegCFieldOrConstant, kVerifyRegA | kVerifyRegB | kVerifyRegCField) \
- V(0x59, IPUT, "iput", k22c, false, kFieldRef, kContinue | kThrow | kStore | kRegCFieldOrConstant, kVerifyRegA | kVerifyRegB | kVerifyRegCField) \
- V(0x5A, IPUT_WIDE, "iput-wide", k22c, false, kFieldRef, kContinue | kThrow | kStore | kRegCFieldOrConstant, kVerifyRegAWide | kVerifyRegB | kVerifyRegCField) \
- V(0x5B, IPUT_OBJECT, "iput-object", k22c, false, kFieldRef, kContinue | kThrow | kStore | kRegCFieldOrConstant, kVerifyRegA | kVerifyRegB | kVerifyRegCField) \
- V(0x5C, IPUT_BOOLEAN, "iput-boolean", k22c, false, kFieldRef, kContinue | kThrow | kStore | kRegCFieldOrConstant, kVerifyRegA | kVerifyRegB | kVerifyRegCField) \
- V(0x5D, IPUT_BYTE, "iput-byte", k22c, false, kFieldRef, kContinue | kThrow | kStore | kRegCFieldOrConstant, kVerifyRegA | kVerifyRegB | kVerifyRegCField) \
- V(0x5E, IPUT_CHAR, "iput-char", k22c, false, kFieldRef, kContinue | kThrow | kStore | kRegCFieldOrConstant, kVerifyRegA | kVerifyRegB | kVerifyRegCField) \
- V(0x5F, IPUT_SHORT, "iput-short", k22c, false, kFieldRef, kContinue | kThrow | kStore | kRegCFieldOrConstant, kVerifyRegA | kVerifyRegB | kVerifyRegCField) \
- V(0x60, SGET, "sget", k21c, true, kFieldRef, kContinue | kThrow | kLoad | kRegBFieldOrConstant, kVerifyRegA | kVerifyRegBField) \
- V(0x61, SGET_WIDE, "sget-wide", k21c, true, kFieldRef, kContinue | kThrow | kLoad | kRegBFieldOrConstant, kVerifyRegAWide | kVerifyRegBField) \
- V(0x62, SGET_OBJECT, "sget-object", k21c, true, kFieldRef, kContinue | kThrow | kLoad | kRegBFieldOrConstant, kVerifyRegA | kVerifyRegBField) \
- V(0x63, SGET_BOOLEAN, "sget-boolean", k21c, true, kFieldRef, kContinue | kThrow | kLoad | kRegBFieldOrConstant, kVerifyRegA | kVerifyRegBField) \
- V(0x64, SGET_BYTE, "sget-byte", k21c, true, kFieldRef, kContinue | kThrow | kLoad | kRegBFieldOrConstant, kVerifyRegA | kVerifyRegBField) \
- V(0x65, SGET_CHAR, "sget-char", k21c, true, kFieldRef, kContinue | kThrow | kLoad | kRegBFieldOrConstant, kVerifyRegA | kVerifyRegBField) \
- V(0x66, SGET_SHORT, "sget-short", k21c, true, kFieldRef, kContinue | kThrow | kLoad | kRegBFieldOrConstant, kVerifyRegA | kVerifyRegBField) \
- V(0x67, SPUT, "sput", k21c, false, kFieldRef, kContinue | kThrow | kStore | kRegBFieldOrConstant, kVerifyRegA | kVerifyRegBField) \
- V(0x68, SPUT_WIDE, "sput-wide", k21c, false, kFieldRef, kContinue | kThrow | kStore | kRegBFieldOrConstant, kVerifyRegAWide | kVerifyRegBField) \
- V(0x69, SPUT_OBJECT, "sput-object", k21c, false, kFieldRef, kContinue | kThrow | kStore | kRegBFieldOrConstant, kVerifyRegA | kVerifyRegBField) \
- V(0x6A, SPUT_BOOLEAN, "sput-boolean", k21c, false, kFieldRef, kContinue | kThrow | kStore | kRegBFieldOrConstant, kVerifyRegA | kVerifyRegBField) \
- V(0x6B, SPUT_BYTE, "sput-byte", k21c, false, kFieldRef, kContinue | kThrow | kStore | kRegBFieldOrConstant, kVerifyRegA | kVerifyRegBField) \
- V(0x6C, SPUT_CHAR, "sput-char", k21c, false, kFieldRef, kContinue | kThrow | kStore | kRegBFieldOrConstant, kVerifyRegA | kVerifyRegBField) \
- V(0x6D, SPUT_SHORT, "sput-short", k21c, false, kFieldRef, kContinue | kThrow | kStore | kRegBFieldOrConstant, kVerifyRegA | kVerifyRegBField) \
- V(0x6E, INVOKE_VIRTUAL, "invoke-virtual", k35c, false, kMethodRef, kContinue | kThrow | kInvoke, kVerifyRegBMethod | kVerifyVarArgNonZero) \
- V(0x6F, INVOKE_SUPER, "invoke-super", k35c, false, kMethodRef, kContinue | kThrow | kInvoke, kVerifyRegBMethod | kVerifyVarArgNonZero) \
- V(0x70, INVOKE_DIRECT, "invoke-direct", k35c, false, kMethodRef, kContinue | kThrow | kInvoke, kVerifyRegBMethod | kVerifyVarArgNonZero) \
- V(0x71, INVOKE_STATIC, "invoke-static", k35c, false, kMethodRef, kContinue | kThrow | kInvoke, kVerifyRegBMethod | kVerifyVarArg) \
- V(0x72, INVOKE_INTERFACE, "invoke-interface", k35c, false, kMethodRef, kContinue | kThrow | kInvoke, kVerifyRegBMethod | kVerifyVarArgNonZero) \
- V(0x73, RETURN_VOID_NO_BARRIER, "return-void-no-barrier", k10x, false, kNone, kReturn, kVerifyNone) \
- V(0x74, INVOKE_VIRTUAL_RANGE, "invoke-virtual/range", k3rc, false, kMethodRef, kContinue | kThrow | kInvoke, kVerifyRegBMethod | kVerifyVarArgRangeNonZero) \
- V(0x75, INVOKE_SUPER_RANGE, "invoke-super/range", k3rc, false, kMethodRef, kContinue | kThrow | kInvoke, kVerifyRegBMethod | kVerifyVarArgRangeNonZero) \
- V(0x76, INVOKE_DIRECT_RANGE, "invoke-direct/range", k3rc, false, kMethodRef, kContinue | kThrow | kInvoke, kVerifyRegBMethod | kVerifyVarArgRangeNonZero) \
- V(0x77, INVOKE_STATIC_RANGE, "invoke-static/range", k3rc, false, kMethodRef, kContinue | kThrow | kInvoke, kVerifyRegBMethod | kVerifyVarArgRange) \
- V(0x78, INVOKE_INTERFACE_RANGE, "invoke-interface/range", k3rc, false, kMethodRef, kContinue | kThrow | kInvoke, kVerifyRegBMethod | kVerifyVarArgRangeNonZero) \
- V(0x79, UNUSED_79, "unused-79", k10x, false, kUnknown, 0, kVerifyError) \
- V(0x7A, UNUSED_7A, "unused-7a", k10x, false, kUnknown, 0, kVerifyError) \
- V(0x7B, NEG_INT, "neg-int", k12x, true, kNone, kContinue, kVerifyRegA | kVerifyRegB) \
- V(0x7C, NOT_INT, "not-int", k12x, true, kNone, kContinue, kVerifyRegA | kVerifyRegB) \
- V(0x7D, NEG_LONG, "neg-long", k12x, true, kNone, kContinue, kVerifyRegAWide | kVerifyRegBWide) \
- V(0x7E, NOT_LONG, "not-long", k12x, true, kNone, kContinue, kVerifyRegAWide | kVerifyRegBWide) \
- V(0x7F, NEG_FLOAT, "neg-float", k12x, true, kNone, kContinue, kVerifyRegA | kVerifyRegB) \
- V(0x80, NEG_DOUBLE, "neg-double", k12x, true, kNone, kContinue, kVerifyRegAWide | kVerifyRegBWide) \
- V(0x81, INT_TO_LONG, "int-to-long", k12x, true, kNone, kContinue | kCast, kVerifyRegAWide | kVerifyRegB) \
- V(0x82, INT_TO_FLOAT, "int-to-float", k12x, true, kNone, kContinue | kCast, kVerifyRegA | kVerifyRegB) \
- V(0x83, INT_TO_DOUBLE, "int-to-double", k12x, true, kNone, kContinue | kCast, kVerifyRegAWide | kVerifyRegB) \
- V(0x84, LONG_TO_INT, "long-to-int", k12x, true, kNone, kContinue | kCast, kVerifyRegA | kVerifyRegBWide) \
- V(0x85, LONG_TO_FLOAT, "long-to-float", k12x, true, kNone, kContinue | kCast, kVerifyRegA | kVerifyRegBWide) \
- V(0x86, LONG_TO_DOUBLE, "long-to-double", k12x, true, kNone, kContinue | kCast, kVerifyRegAWide | kVerifyRegBWide) \
- V(0x87, FLOAT_TO_INT, "float-to-int", k12x, true, kNone, kContinue | kCast, kVerifyRegA | kVerifyRegB) \
- V(0x88, FLOAT_TO_LONG, "float-to-long", k12x, true, kNone, kContinue | kCast, kVerifyRegAWide | kVerifyRegB) \
- V(0x89, FLOAT_TO_DOUBLE, "float-to-double", k12x, true, kNone, kContinue | kCast, kVerifyRegAWide | kVerifyRegB) \
- V(0x8A, DOUBLE_TO_INT, "double-to-int", k12x, true, kNone, kContinue | kCast, kVerifyRegA | kVerifyRegBWide) \
- V(0x8B, DOUBLE_TO_LONG, "double-to-long", k12x, true, kNone, kContinue | kCast, kVerifyRegAWide | kVerifyRegBWide) \
- V(0x8C, DOUBLE_TO_FLOAT, "double-to-float", k12x, true, kNone, kContinue | kCast, kVerifyRegA | kVerifyRegBWide) \
- V(0x8D, INT_TO_BYTE, "int-to-byte", k12x, true, kNone, kContinue | kCast, kVerifyRegA | kVerifyRegB) \
- V(0x8E, INT_TO_CHAR, "int-to-char", k12x, true, kNone, kContinue | kCast, kVerifyRegA | kVerifyRegB) \
- V(0x8F, INT_TO_SHORT, "int-to-short", k12x, true, kNone, kContinue | kCast, kVerifyRegA | kVerifyRegB) \
- V(0x90, ADD_INT, "add-int", k23x, true, kNone, kContinue | kAdd, kVerifyRegA | kVerifyRegB | kVerifyRegC) \
- V(0x91, SUB_INT, "sub-int", k23x, true, kNone, kContinue | kSubtract, kVerifyRegA | kVerifyRegB | kVerifyRegC) \
- V(0x92, MUL_INT, "mul-int", k23x, true, kNone, kContinue | kMultiply, kVerifyRegA | kVerifyRegB | kVerifyRegC) \
- V(0x93, DIV_INT, "div-int", k23x, true, kNone, kContinue | kThrow | kDivide, kVerifyRegA | kVerifyRegB | kVerifyRegC) \
- V(0x94, REM_INT, "rem-int", k23x, true, kNone, kContinue | kThrow | kRemainder, kVerifyRegA | kVerifyRegB | kVerifyRegC) \
- V(0x95, AND_INT, "and-int", k23x, true, kNone, kContinue | kAnd, kVerifyRegA | kVerifyRegB | kVerifyRegC) \
- V(0x96, OR_INT, "or-int", k23x, true, kNone, kContinue | kOr, kVerifyRegA | kVerifyRegB | kVerifyRegC) \
- V(0x97, XOR_INT, "xor-int", k23x, true, kNone, kContinue | kXor, kVerifyRegA | kVerifyRegB | kVerifyRegC) \
- V(0x98, SHL_INT, "shl-int", k23x, true, kNone, kContinue | kShl, kVerifyRegA | kVerifyRegB | kVerifyRegC) \
- V(0x99, SHR_INT, "shr-int", k23x, true, kNone, kContinue | kShr, kVerifyRegA | kVerifyRegB | kVerifyRegC) \
- V(0x9A, USHR_INT, "ushr-int", k23x, true, kNone, kContinue | kUshr, kVerifyRegA | kVerifyRegB | kVerifyRegC) \
- V(0x9B, ADD_LONG, "add-long", k23x, true, kNone, kContinue | kAdd, kVerifyRegAWide | kVerifyRegBWide | kVerifyRegCWide) \
- V(0x9C, SUB_LONG, "sub-long", k23x, true, kNone, kContinue | kSubtract, kVerifyRegAWide | kVerifyRegBWide | kVerifyRegCWide) \
- V(0x9D, MUL_LONG, "mul-long", k23x, true, kNone, kContinue | kMultiply, kVerifyRegAWide | kVerifyRegBWide | kVerifyRegCWide) \
- V(0x9E, DIV_LONG, "div-long", k23x, true, kNone, kContinue | kThrow | kDivide, kVerifyRegAWide | kVerifyRegBWide | kVerifyRegCWide) \
- V(0x9F, REM_LONG, "rem-long", k23x, true, kNone, kContinue | kThrow | kRemainder, kVerifyRegAWide | kVerifyRegBWide | kVerifyRegCWide) \
- V(0xA0, AND_LONG, "and-long", k23x, true, kNone, kContinue | kAnd, kVerifyRegAWide | kVerifyRegBWide | kVerifyRegCWide) \
- V(0xA1, OR_LONG, "or-long", k23x, true, kNone, kContinue | kOr, kVerifyRegAWide | kVerifyRegBWide | kVerifyRegCWide) \
- V(0xA2, XOR_LONG, "xor-long", k23x, true, kNone, kContinue | kXor, kVerifyRegAWide | kVerifyRegBWide | kVerifyRegCWide) \
- V(0xA3, SHL_LONG, "shl-long", k23x, true, kNone, kContinue | kShl, kVerifyRegAWide | kVerifyRegBWide | kVerifyRegC) \
- V(0xA4, SHR_LONG, "shr-long", k23x, true, kNone, kContinue | kShr, kVerifyRegAWide | kVerifyRegBWide | kVerifyRegC) \
- V(0xA5, USHR_LONG, "ushr-long", k23x, true, kNone, kContinue | kUshr, kVerifyRegAWide | kVerifyRegBWide | kVerifyRegC) \
- V(0xA6, ADD_FLOAT, "add-float", k23x, true, kNone, kContinue | kAdd, kVerifyRegA | kVerifyRegB | kVerifyRegC) \
- V(0xA7, SUB_FLOAT, "sub-float", k23x, true, kNone, kContinue | kSubtract, kVerifyRegA | kVerifyRegB | kVerifyRegC) \
- V(0xA8, MUL_FLOAT, "mul-float", k23x, true, kNone, kContinue | kMultiply, kVerifyRegA | kVerifyRegB | kVerifyRegC) \
- V(0xA9, DIV_FLOAT, "div-float", k23x, true, kNone, kContinue | kDivide, kVerifyRegA | kVerifyRegB | kVerifyRegC) \
- V(0xAA, REM_FLOAT, "rem-float", k23x, true, kNone, kContinue | kRemainder, kVerifyRegA | kVerifyRegB | kVerifyRegC) \
- V(0xAB, ADD_DOUBLE, "add-double", k23x, true, kNone, kContinue | kAdd, kVerifyRegAWide | kVerifyRegBWide | kVerifyRegCWide) \
- V(0xAC, SUB_DOUBLE, "sub-double", k23x, true, kNone, kContinue | kSubtract, kVerifyRegAWide | kVerifyRegBWide | kVerifyRegCWide) \
- V(0xAD, MUL_DOUBLE, "mul-double", k23x, true, kNone, kContinue | kMultiply, kVerifyRegAWide | kVerifyRegBWide | kVerifyRegCWide) \
- V(0xAE, DIV_DOUBLE, "div-double", k23x, true, kNone, kContinue | kDivide, kVerifyRegAWide | kVerifyRegBWide | kVerifyRegCWide) \
- V(0xAF, REM_DOUBLE, "rem-double", k23x, true, kNone, kContinue | kRemainder, kVerifyRegAWide | kVerifyRegBWide | kVerifyRegCWide) \
- V(0xB0, ADD_INT_2ADDR, "add-int/2addr", k12x, true, kNone, kContinue | kAdd, kVerifyRegA | kVerifyRegB) \
- V(0xB1, SUB_INT_2ADDR, "sub-int/2addr", k12x, true, kNone, kContinue | kSubtract, kVerifyRegA | kVerifyRegB) \
- V(0xB2, MUL_INT_2ADDR, "mul-int/2addr", k12x, true, kNone, kContinue | kMultiply, kVerifyRegA | kVerifyRegB) \
- V(0xB3, DIV_INT_2ADDR, "div-int/2addr", k12x, true, kNone, kContinue | kThrow | kDivide, kVerifyRegA | kVerifyRegB) \
- V(0xB4, REM_INT_2ADDR, "rem-int/2addr", k12x, true, kNone, kContinue | kThrow | kRemainder, kVerifyRegA | kVerifyRegB) \
- V(0xB5, AND_INT_2ADDR, "and-int/2addr", k12x, true, kNone, kContinue | kAnd, kVerifyRegA | kVerifyRegB) \
- V(0xB6, OR_INT_2ADDR, "or-int/2addr", k12x, true, kNone, kContinue | kOr, kVerifyRegA | kVerifyRegB) \
- V(0xB7, XOR_INT_2ADDR, "xor-int/2addr", k12x, true, kNone, kContinue | kXor, kVerifyRegA | kVerifyRegB) \
- V(0xB8, SHL_INT_2ADDR, "shl-int/2addr", k12x, true, kNone, kContinue | kShl, kVerifyRegA | kVerifyRegB) \
- V(0xB9, SHR_INT_2ADDR, "shr-int/2addr", k12x, true, kNone, kContinue | kShr, kVerifyRegA | kVerifyRegB) \
- V(0xBA, USHR_INT_2ADDR, "ushr-int/2addr", k12x, true, kNone, kContinue | kUshr, kVerifyRegA | kVerifyRegB) \
- V(0xBB, ADD_LONG_2ADDR, "add-long/2addr", k12x, true, kNone, kContinue | kAdd, kVerifyRegAWide | kVerifyRegBWide) \
- V(0xBC, SUB_LONG_2ADDR, "sub-long/2addr", k12x, true, kNone, kContinue | kSubtract, kVerifyRegAWide | kVerifyRegBWide) \
- V(0xBD, MUL_LONG_2ADDR, "mul-long/2addr", k12x, true, kNone, kContinue | kMultiply, kVerifyRegAWide | kVerifyRegBWide) \
- V(0xBE, DIV_LONG_2ADDR, "div-long/2addr", k12x, true, kNone, kContinue | kThrow | kDivide, kVerifyRegAWide | kVerifyRegBWide) \
- V(0xBF, REM_LONG_2ADDR, "rem-long/2addr", k12x, true, kNone, kContinue | kThrow | kRemainder, kVerifyRegAWide | kVerifyRegBWide) \
- V(0xC0, AND_LONG_2ADDR, "and-long/2addr", k12x, true, kNone, kContinue | kAnd, kVerifyRegAWide | kVerifyRegBWide) \
- V(0xC1, OR_LONG_2ADDR, "or-long/2addr", k12x, true, kNone, kContinue | kOr, kVerifyRegAWide | kVerifyRegBWide) \
- V(0xC2, XOR_LONG_2ADDR, "xor-long/2addr", k12x, true, kNone, kContinue | kXor, kVerifyRegAWide | kVerifyRegBWide) \
- V(0xC3, SHL_LONG_2ADDR, "shl-long/2addr", k12x, true, kNone, kContinue | kShl, kVerifyRegAWide | kVerifyRegB) \
- V(0xC4, SHR_LONG_2ADDR, "shr-long/2addr", k12x, true, kNone, kContinue | kShr, kVerifyRegAWide | kVerifyRegB) \
- V(0xC5, USHR_LONG_2ADDR, "ushr-long/2addr", k12x, true, kNone, kContinue | kUshr, kVerifyRegAWide | kVerifyRegB) \
- V(0xC6, ADD_FLOAT_2ADDR, "add-float/2addr", k12x, true, kNone, kContinue | kAdd, kVerifyRegA | kVerifyRegB) \
- V(0xC7, SUB_FLOAT_2ADDR, "sub-float/2addr", k12x, true, kNone, kContinue | kSubtract, kVerifyRegA | kVerifyRegB) \
- V(0xC8, MUL_FLOAT_2ADDR, "mul-float/2addr", k12x, true, kNone, kContinue | kMultiply, kVerifyRegA | kVerifyRegB) \
- V(0xC9, DIV_FLOAT_2ADDR, "div-float/2addr", k12x, true, kNone, kContinue | kDivide, kVerifyRegA | kVerifyRegB) \
- V(0xCA, REM_FLOAT_2ADDR, "rem-float/2addr", k12x, true, kNone, kContinue | kRemainder, kVerifyRegA | kVerifyRegB) \
- V(0xCB, ADD_DOUBLE_2ADDR, "add-double/2addr", k12x, true, kNone, kContinue | kAdd, kVerifyRegAWide | kVerifyRegBWide) \
- V(0xCC, SUB_DOUBLE_2ADDR, "sub-double/2addr", k12x, true, kNone, kContinue | kSubtract, kVerifyRegAWide | kVerifyRegBWide) \
- V(0xCD, MUL_DOUBLE_2ADDR, "mul-double/2addr", k12x, true, kNone, kContinue | kMultiply, kVerifyRegAWide | kVerifyRegBWide) \
- V(0xCE, DIV_DOUBLE_2ADDR, "div-double/2addr", k12x, true, kNone, kContinue | kDivide, kVerifyRegAWide | kVerifyRegBWide) \
- V(0xCF, REM_DOUBLE_2ADDR, "rem-double/2addr", k12x, true, kNone, kContinue | kRemainder, kVerifyRegAWide | kVerifyRegBWide) \
- V(0xD0, ADD_INT_LIT16, "add-int/lit16", k22s, true, kNone, kContinue | kAdd | kRegCFieldOrConstant, kVerifyRegA | kVerifyRegB) \
- V(0xD1, RSUB_INT, "rsub-int", k22s, true, kNone, kContinue | kSubtract | kRegCFieldOrConstant, kVerifyRegA | kVerifyRegB) \
- V(0xD2, MUL_INT_LIT16, "mul-int/lit16", k22s, true, kNone, kContinue | kMultiply | kRegCFieldOrConstant, kVerifyRegA | kVerifyRegB) \
- V(0xD3, DIV_INT_LIT16, "div-int/lit16", k22s, true, kNone, kContinue | kThrow | kDivide | kRegCFieldOrConstant, kVerifyRegA | kVerifyRegB) \
- V(0xD4, REM_INT_LIT16, "rem-int/lit16", k22s, true, kNone, kContinue | kThrow | kRemainder | kRegCFieldOrConstant, kVerifyRegA | kVerifyRegB) \
- V(0xD5, AND_INT_LIT16, "and-int/lit16", k22s, true, kNone, kContinue | kAnd | kRegCFieldOrConstant, kVerifyRegA | kVerifyRegB) \
- V(0xD6, OR_INT_LIT16, "or-int/lit16", k22s, true, kNone, kContinue | kOr | kRegCFieldOrConstant, kVerifyRegA | kVerifyRegB) \
- V(0xD7, XOR_INT_LIT16, "xor-int/lit16", k22s, true, kNone, kContinue | kXor | kRegCFieldOrConstant, kVerifyRegA | kVerifyRegB) \
- V(0xD8, ADD_INT_LIT8, "add-int/lit8", k22b, true, kNone, kContinue | kAdd | kRegCFieldOrConstant, kVerifyRegA | kVerifyRegB) \
- V(0xD9, RSUB_INT_LIT8, "rsub-int/lit8", k22b, true, kNone, kContinue | kSubtract | kRegCFieldOrConstant, kVerifyRegA | kVerifyRegB) \
- V(0xDA, MUL_INT_LIT8, "mul-int/lit8", k22b, true, kNone, kContinue | kMultiply | kRegCFieldOrConstant, kVerifyRegA | kVerifyRegB) \
- V(0xDB, DIV_INT_LIT8, "div-int/lit8", k22b, true, kNone, kContinue | kThrow | kDivide | kRegCFieldOrConstant, kVerifyRegA | kVerifyRegB) \
- V(0xDC, REM_INT_LIT8, "rem-int/lit8", k22b, true, kNone, kContinue | kThrow | kRemainder | kRegCFieldOrConstant, kVerifyRegA | kVerifyRegB) \
- V(0xDD, AND_INT_LIT8, "and-int/lit8", k22b, true, kNone, kContinue | kAnd | kRegCFieldOrConstant, kVerifyRegA | kVerifyRegB) \
- V(0xDE, OR_INT_LIT8, "or-int/lit8", k22b, true, kNone, kContinue | kOr | kRegCFieldOrConstant, kVerifyRegA | kVerifyRegB) \
- V(0xDF, XOR_INT_LIT8, "xor-int/lit8", k22b, true, kNone, kContinue | kXor | kRegCFieldOrConstant, kVerifyRegA | kVerifyRegB) \
- V(0xE0, SHL_INT_LIT8, "shl-int/lit8", k22b, true, kNone, kContinue | kShl | kRegCFieldOrConstant, kVerifyRegA | kVerifyRegB) \
- V(0xE1, SHR_INT_LIT8, "shr-int/lit8", k22b, true, kNone, kContinue | kShr | kRegCFieldOrConstant, kVerifyRegA | kVerifyRegB) \
- V(0xE2, USHR_INT_LIT8, "ushr-int/lit8", k22b, true, kNone, kContinue | kUshr | kRegCFieldOrConstant, kVerifyRegA | kVerifyRegB) \
- V(0xE3, IGET_QUICK, "iget-quick", k22c, true, kFieldRef, kContinue | kThrow | kLoad | kRegCFieldOrConstant, kVerifyRegA | kVerifyRegB | kVerifyRuntimeOnly) \
- V(0xE4, IGET_WIDE_QUICK, "iget-wide-quick", k22c, true, kFieldRef, kContinue | kThrow | kLoad | kRegCFieldOrConstant, kVerifyRegAWide | kVerifyRegB | kVerifyRuntimeOnly) \
- V(0xE5, IGET_OBJECT_QUICK, "iget-object-quick", k22c, true, kFieldRef, kContinue | kThrow | kLoad | kRegCFieldOrConstant, kVerifyRegA | kVerifyRegB | kVerifyRuntimeOnly) \
- V(0xE6, IPUT_QUICK, "iput-quick", k22c, false, kFieldRef, kContinue | kThrow | kStore | kRegCFieldOrConstant, kVerifyRegA | kVerifyRegB | kVerifyRuntimeOnly) \
- V(0xE7, IPUT_WIDE_QUICK, "iput-wide-quick", k22c, false, kFieldRef, kContinue | kThrow | kStore | kRegCFieldOrConstant, kVerifyRegAWide | kVerifyRegB | kVerifyRuntimeOnly) \
- V(0xE8, IPUT_OBJECT_QUICK, "iput-object-quick", k22c, false, kFieldRef, kContinue | kThrow | kStore | kRegCFieldOrConstant, kVerifyRegA | kVerifyRegB | kVerifyRuntimeOnly) \
- V(0xE9, INVOKE_VIRTUAL_QUICK, "invoke-virtual-quick", k35c, false, kMethodRef, kContinue | kThrow | kInvoke, kVerifyVarArgNonZero | kVerifyRuntimeOnly) \
- V(0xEA, INVOKE_VIRTUAL_RANGE_QUICK, "invoke-virtual/range-quick", k3rc, false, kMethodRef, kContinue | kThrow | kInvoke, kVerifyVarArgRangeNonZero | kVerifyRuntimeOnly) \
- V(0xEB, IPUT_BOOLEAN_QUICK, "iput-boolean-quick", k22c, false, kFieldRef, kContinue | kThrow | kStore | kRegCFieldOrConstant, kVerifyRegA | kVerifyRegB | kVerifyRuntimeOnly) \
- V(0xEC, IPUT_BYTE_QUICK, "iput-byte-quick", k22c, false, kFieldRef, kContinue | kThrow | kStore | kRegCFieldOrConstant, kVerifyRegA | kVerifyRegB | kVerifyRuntimeOnly) \
- V(0xED, IPUT_CHAR_QUICK, "iput-char-quick", k22c, false, kFieldRef, kContinue | kThrow | kStore | kRegCFieldOrConstant, kVerifyRegA | kVerifyRegB | kVerifyRuntimeOnly) \
- V(0xEE, IPUT_SHORT_QUICK, "iput-short-quick", k22c, false, kFieldRef, kContinue | kThrow | kStore | kRegCFieldOrConstant, kVerifyRegA | kVerifyRegB | kVerifyRuntimeOnly) \
- V(0xEF, IGET_BOOLEAN_QUICK, "iget-boolean-quick", k22c, true, kFieldRef, kContinue | kThrow | kLoad | kRegCFieldOrConstant, kVerifyRegA | kVerifyRegB | kVerifyRuntimeOnly) \
- V(0xF0, IGET_BYTE_QUICK, "iget-byte-quick", k22c, true, kFieldRef, kContinue | kThrow | kLoad | kRegCFieldOrConstant, kVerifyRegA | kVerifyRegB | kVerifyRuntimeOnly) \
- V(0xF1, IGET_CHAR_QUICK, "iget-char-quick", k22c, true, kFieldRef, kContinue | kThrow | kLoad | kRegCFieldOrConstant, kVerifyRegA | kVerifyRegB | kVerifyRuntimeOnly) \
- V(0xF2, IGET_SHORT_QUICK, "iget-short-quick", k22c, true, kFieldRef, kContinue | kThrow | kLoad | kRegCFieldOrConstant, kVerifyRegA | kVerifyRegB | kVerifyRuntimeOnly) \
- V(0xF3, INVOKE_LAMBDA, "invoke-lambda", k25x, false, kNone, kContinue | kThrow | kInvoke | kExperimental, kVerifyRegC /*TODO: | kVerifyVarArg*/) \
- V(0xF4, UNUSED_F4, "unused-f4", k10x, false, kUnknown, 0, kVerifyError) \
- V(0xF5, UNUSED_F5, "unused-f5", k10x, false, kUnknown, 0, kVerifyError) \
+ V(0x00, NOP, "nop", k10x, false, kIndexNone, kContinue, kVerifyNone) \
+ V(0x01, MOVE, "move", k12x, true, kIndexNone, kContinue, kVerifyRegA | kVerifyRegB) \
+ V(0x02, MOVE_FROM16, "move/from16", k22x, true, kIndexNone, kContinue, kVerifyRegA | kVerifyRegB) \
+ V(0x03, MOVE_16, "move/16", k32x, true, kIndexNone, kContinue, kVerifyRegA | kVerifyRegB) \
+ V(0x04, MOVE_WIDE, "move-wide", k12x, true, kIndexNone, kContinue, kVerifyRegAWide | kVerifyRegBWide) \
+ V(0x05, MOVE_WIDE_FROM16, "move-wide/from16", k22x, true, kIndexNone, kContinue, kVerifyRegAWide | kVerifyRegBWide) \
+ V(0x06, MOVE_WIDE_16, "move-wide/16", k32x, true, kIndexNone, kContinue, kVerifyRegAWide | kVerifyRegBWide) \
+ V(0x07, MOVE_OBJECT, "move-object", k12x, true, kIndexNone, kContinue, kVerifyRegA | kVerifyRegB) \
+ V(0x08, MOVE_OBJECT_FROM16, "move-object/from16", k22x, true, kIndexNone, kContinue, kVerifyRegA | kVerifyRegB) \
+ V(0x09, MOVE_OBJECT_16, "move-object/16", k32x, true, kIndexNone, kContinue, kVerifyRegA | kVerifyRegB) \
+ V(0x0A, MOVE_RESULT, "move-result", k11x, true, kIndexNone, kContinue, kVerifyRegA) \
+ V(0x0B, MOVE_RESULT_WIDE, "move-result-wide", k11x, true, kIndexNone, kContinue, kVerifyRegAWide) \
+ V(0x0C, MOVE_RESULT_OBJECT, "move-result-object", k11x, true, kIndexNone, kContinue, kVerifyRegA) \
+ V(0x0D, MOVE_EXCEPTION, "move-exception", k11x, true, kIndexNone, kContinue, kVerifyRegA) \
+ V(0x0E, RETURN_VOID, "return-void", k10x, false, kIndexNone, kReturn, kVerifyNone) \
+ V(0x0F, RETURN, "return", k11x, false, kIndexNone, kReturn, kVerifyRegA) \
+ V(0x10, RETURN_WIDE, "return-wide", k11x, false, kIndexNone, kReturn, kVerifyRegAWide) \
+ V(0x11, RETURN_OBJECT, "return-object", k11x, false, kIndexNone, kReturn, kVerifyRegA) \
+ V(0x12, CONST_4, "const/4", k11n, true, kIndexNone, kContinue | kRegBFieldOrConstant, kVerifyRegA) \
+ V(0x13, CONST_16, "const/16", k21s, true, kIndexNone, kContinue | kRegBFieldOrConstant, kVerifyRegA) \
+ V(0x14, CONST, "const", k31i, true, kIndexNone, kContinue | kRegBFieldOrConstant, kVerifyRegA) \
+ V(0x15, CONST_HIGH16, "const/high16", k21h, true, kIndexNone, kContinue | kRegBFieldOrConstant, kVerifyRegA) \
+ V(0x16, CONST_WIDE_16, "const-wide/16", k21s, true, kIndexNone, kContinue | kRegBFieldOrConstant, kVerifyRegAWide) \
+ V(0x17, CONST_WIDE_32, "const-wide/32", k31i, true, kIndexNone, kContinue | kRegBFieldOrConstant, kVerifyRegAWide) \
+ V(0x18, CONST_WIDE, "const-wide", k51l, true, kIndexNone, kContinue | kRegBFieldOrConstant, kVerifyRegAWide) \
+ V(0x19, CONST_WIDE_HIGH16, "const-wide/high16", k21h, true, kIndexNone, kContinue | kRegBFieldOrConstant, kVerifyRegAWide) \
+ V(0x1A, CONST_STRING, "const-string", k21c, true, kIndexStringRef, kContinue | kThrow, kVerifyRegA | kVerifyRegBString) \
+ V(0x1B, CONST_STRING_JUMBO, "const-string/jumbo", k31c, true, kIndexStringRef, kContinue | kThrow, kVerifyRegA | kVerifyRegBString) \
+ V(0x1C, CONST_CLASS, "const-class", k21c, true, kIndexTypeRef, kContinue | kThrow, kVerifyRegA | kVerifyRegBType) \
+ V(0x1D, MONITOR_ENTER, "monitor-enter", k11x, false, kIndexNone, kContinue | kThrow | kClobber, kVerifyRegA) \
+ V(0x1E, MONITOR_EXIT, "monitor-exit", k11x, false, kIndexNone, kContinue | kThrow | kClobber, kVerifyRegA) \
+ V(0x1F, CHECK_CAST, "check-cast", k21c, true, kIndexTypeRef, kContinue | kThrow, kVerifyRegA | kVerifyRegBType) \
+ V(0x20, INSTANCE_OF, "instance-of", k22c, true, kIndexTypeRef, kContinue | kThrow, kVerifyRegA | kVerifyRegB | kVerifyRegCType) \
+ V(0x21, ARRAY_LENGTH, "array-length", k12x, true, kIndexNone, kContinue | kThrow, kVerifyRegA | kVerifyRegB) \
+ V(0x22, NEW_INSTANCE, "new-instance", k21c, true, kIndexTypeRef, kContinue | kThrow | kClobber, kVerifyRegA | kVerifyRegBNewInstance) \
+ V(0x23, NEW_ARRAY, "new-array", k22c, true, kIndexTypeRef, kContinue | kThrow | kClobber, kVerifyRegA | kVerifyRegB | kVerifyRegCNewArray) \
+ V(0x24, FILLED_NEW_ARRAY, "filled-new-array", k35c, false, kIndexTypeRef, kContinue | kThrow | kClobber, kVerifyRegBType | kVerifyVarArg) \
+ V(0x25, FILLED_NEW_ARRAY_RANGE, "filled-new-array/range", k3rc, false, kIndexTypeRef, kContinue | kThrow | kClobber, kVerifyRegBType | kVerifyVarArgRange) \
+ V(0x26, FILL_ARRAY_DATA, "fill-array-data", k31t, false, kIndexNone, kContinue | kThrow | kClobber, kVerifyRegA | kVerifyArrayData) \
+ V(0x27, THROW, "throw", k11x, false, kIndexNone, kThrow, kVerifyRegA) \
+ V(0x28, GOTO, "goto", k10t, false, kIndexNone, kBranch | kUnconditional, kVerifyBranchTarget) \
+ V(0x29, GOTO_16, "goto/16", k20t, false, kIndexNone, kBranch | kUnconditional, kVerifyBranchTarget) \
+ V(0x2A, GOTO_32, "goto/32", k30t, false, kIndexNone, kBranch | kUnconditional, kVerifyBranchTarget) \
+ V(0x2B, PACKED_SWITCH, "packed-switch", k31t, false, kIndexNone, kContinue | kSwitch, kVerifyRegA | kVerifySwitchTargets) \
+ V(0x2C, SPARSE_SWITCH, "sparse-switch", k31t, false, kIndexNone, kContinue | kSwitch, kVerifyRegA | kVerifySwitchTargets) \
+ V(0x2D, CMPL_FLOAT, "cmpl-float", k23x, true, kIndexNone, kContinue, kVerifyRegA | kVerifyRegB | kVerifyRegC) \
+ V(0x2E, CMPG_FLOAT, "cmpg-float", k23x, true, kIndexNone, kContinue, kVerifyRegA | kVerifyRegB | kVerifyRegC) \
+ V(0x2F, CMPL_DOUBLE, "cmpl-double", k23x, true, kIndexNone, kContinue, kVerifyRegA | kVerifyRegBWide | kVerifyRegCWide) \
+ V(0x30, CMPG_DOUBLE, "cmpg-double", k23x, true, kIndexNone, kContinue, kVerifyRegA | kVerifyRegBWide | kVerifyRegCWide) \
+ V(0x31, CMP_LONG, "cmp-long", k23x, true, kIndexNone, kContinue, kVerifyRegA | kVerifyRegBWide | kVerifyRegCWide) \
+ V(0x32, IF_EQ, "if-eq", k22t, false, kIndexNone, kContinue | kBranch, kVerifyRegA | kVerifyRegB | kVerifyBranchTarget) \
+ V(0x33, IF_NE, "if-ne", k22t, false, kIndexNone, kContinue | kBranch, kVerifyRegA | kVerifyRegB | kVerifyBranchTarget) \
+ V(0x34, IF_LT, "if-lt", k22t, false, kIndexNone, kContinue | kBranch, kVerifyRegA | kVerifyRegB | kVerifyBranchTarget) \
+ V(0x35, IF_GE, "if-ge", k22t, false, kIndexNone, kContinue | kBranch, kVerifyRegA | kVerifyRegB | kVerifyBranchTarget) \
+ V(0x36, IF_GT, "if-gt", k22t, false, kIndexNone, kContinue | kBranch, kVerifyRegA | kVerifyRegB | kVerifyBranchTarget) \
+ V(0x37, IF_LE, "if-le", k22t, false, kIndexNone, kContinue | kBranch, kVerifyRegA | kVerifyRegB | kVerifyBranchTarget) \
+ V(0x38, IF_EQZ, "if-eqz", k21t, false, kIndexNone, kContinue | kBranch, kVerifyRegA | kVerifyBranchTarget) \
+ V(0x39, IF_NEZ, "if-nez", k21t, false, kIndexNone, kContinue | kBranch, kVerifyRegA | kVerifyBranchTarget) \
+ V(0x3A, IF_LTZ, "if-ltz", k21t, false, kIndexNone, kContinue | kBranch, kVerifyRegA | kVerifyBranchTarget) \
+ V(0x3B, IF_GEZ, "if-gez", k21t, false, kIndexNone, kContinue | kBranch, kVerifyRegA | kVerifyBranchTarget) \
+ V(0x3C, IF_GTZ, "if-gtz", k21t, false, kIndexNone, kContinue | kBranch, kVerifyRegA | kVerifyBranchTarget) \
+ V(0x3D, IF_LEZ, "if-lez", k21t, false, kIndexNone, kContinue | kBranch, kVerifyRegA | kVerifyBranchTarget) \
+ V(0x3E, UNUSED_3E, "unused-3e", k10x, false, kIndexUnknown, 0, kVerifyError) \
+ V(0x3F, UNUSED_3F, "unused-3f", k10x, false, kIndexUnknown, 0, kVerifyError) \
+ V(0x40, UNUSED_40, "unused-40", k10x, false, kIndexUnknown, 0, kVerifyError) \
+ V(0x41, UNUSED_41, "unused-41", k10x, false, kIndexUnknown, 0, kVerifyError) \
+ V(0x42, UNUSED_42, "unused-42", k10x, false, kIndexUnknown, 0, kVerifyError) \
+ V(0x43, UNUSED_43, "unused-43", k10x, false, kIndexUnknown, 0, kVerifyError) \
+ V(0x44, AGET, "aget", k23x, true, kIndexNone, kContinue | kThrow | kLoad, kVerifyRegA | kVerifyRegB | kVerifyRegC) \
+ V(0x45, AGET_WIDE, "aget-wide", k23x, true, kIndexNone, kContinue | kThrow | kLoad, kVerifyRegAWide | kVerifyRegB | kVerifyRegC) \
+ V(0x46, AGET_OBJECT, "aget-object", k23x, true, kIndexNone, kContinue | kThrow | kLoad, kVerifyRegA | kVerifyRegB | kVerifyRegC) \
+ V(0x47, AGET_BOOLEAN, "aget-boolean", k23x, true, kIndexNone, kContinue | kThrow | kLoad, kVerifyRegA | kVerifyRegB | kVerifyRegC) \
+ V(0x48, AGET_BYTE, "aget-byte", k23x, true, kIndexNone, kContinue | kThrow | kLoad, kVerifyRegA | kVerifyRegB | kVerifyRegC) \
+ V(0x49, AGET_CHAR, "aget-char", k23x, true, kIndexNone, kContinue | kThrow | kLoad, kVerifyRegA | kVerifyRegB | kVerifyRegC) \
+ V(0x4A, AGET_SHORT, "aget-short", k23x, true, kIndexNone, kContinue | kThrow | kLoad, kVerifyRegA | kVerifyRegB | kVerifyRegC) \
+ V(0x4B, APUT, "aput", k23x, false, kIndexNone, kContinue | kThrow | kStore, kVerifyRegA | kVerifyRegB | kVerifyRegC) \
+ V(0x4C, APUT_WIDE, "aput-wide", k23x, false, kIndexNone, kContinue | kThrow | kStore, kVerifyRegAWide | kVerifyRegB | kVerifyRegC) \
+ V(0x4D, APUT_OBJECT, "aput-object", k23x, false, kIndexNone, kContinue | kThrow | kStore, kVerifyRegA | kVerifyRegB | kVerifyRegC) \
+ V(0x4E, APUT_BOOLEAN, "aput-boolean", k23x, false, kIndexNone, kContinue | kThrow | kStore, kVerifyRegA | kVerifyRegB | kVerifyRegC) \
+ V(0x4F, APUT_BYTE, "aput-byte", k23x, false, kIndexNone, kContinue | kThrow | kStore, kVerifyRegA | kVerifyRegB | kVerifyRegC) \
+ V(0x50, APUT_CHAR, "aput-char", k23x, false, kIndexNone, kContinue | kThrow | kStore, kVerifyRegA | kVerifyRegB | kVerifyRegC) \
+ V(0x51, APUT_SHORT, "aput-short", k23x, false, kIndexNone, kContinue | kThrow | kStore, kVerifyRegA | kVerifyRegB | kVerifyRegC) \
+ V(0x52, IGET, "iget", k22c, true, kIndexFieldRef, kContinue | kThrow | kLoad | kRegCFieldOrConstant, kVerifyRegA | kVerifyRegB | kVerifyRegCField) \
+ V(0x53, IGET_WIDE, "iget-wide", k22c, true, kIndexFieldRef, kContinue | kThrow | kLoad | kRegCFieldOrConstant, kVerifyRegAWide | kVerifyRegB | kVerifyRegCField) \
+ V(0x54, IGET_OBJECT, "iget-object", k22c, true, kIndexFieldRef, kContinue | kThrow | kLoad | kRegCFieldOrConstant, kVerifyRegA | kVerifyRegB | kVerifyRegCField) \
+ V(0x55, IGET_BOOLEAN, "iget-boolean", k22c, true, kIndexFieldRef, kContinue | kThrow | kLoad | kRegCFieldOrConstant, kVerifyRegA | kVerifyRegB | kVerifyRegCField) \
+ V(0x56, IGET_BYTE, "iget-byte", k22c, true, kIndexFieldRef, kContinue | kThrow | kLoad | kRegCFieldOrConstant, kVerifyRegA | kVerifyRegB | kVerifyRegCField) \
+ V(0x57, IGET_CHAR, "iget-char", k22c, true, kIndexFieldRef, kContinue | kThrow | kLoad | kRegCFieldOrConstant, kVerifyRegA | kVerifyRegB | kVerifyRegCField) \
+ V(0x58, IGET_SHORT, "iget-short", k22c, true, kIndexFieldRef, kContinue | kThrow | kLoad | kRegCFieldOrConstant, kVerifyRegA | kVerifyRegB | kVerifyRegCField) \
+ V(0x59, IPUT, "iput", k22c, false, kIndexFieldRef, kContinue | kThrow | kStore | kRegCFieldOrConstant, kVerifyRegA | kVerifyRegB | kVerifyRegCField) \
+ V(0x5A, IPUT_WIDE, "iput-wide", k22c, false, kIndexFieldRef, kContinue | kThrow | kStore | kRegCFieldOrConstant, kVerifyRegAWide | kVerifyRegB | kVerifyRegCField) \
+ V(0x5B, IPUT_OBJECT, "iput-object", k22c, false, kIndexFieldRef, kContinue | kThrow | kStore | kRegCFieldOrConstant, kVerifyRegA | kVerifyRegB | kVerifyRegCField) \
+ V(0x5C, IPUT_BOOLEAN, "iput-boolean", k22c, false, kIndexFieldRef, kContinue | kThrow | kStore | kRegCFieldOrConstant, kVerifyRegA | kVerifyRegB | kVerifyRegCField) \
+ V(0x5D, IPUT_BYTE, "iput-byte", k22c, false, kIndexFieldRef, kContinue | kThrow | kStore | kRegCFieldOrConstant, kVerifyRegA | kVerifyRegB | kVerifyRegCField) \
+ V(0x5E, IPUT_CHAR, "iput-char", k22c, false, kIndexFieldRef, kContinue | kThrow | kStore | kRegCFieldOrConstant, kVerifyRegA | kVerifyRegB | kVerifyRegCField) \
+ V(0x5F, IPUT_SHORT, "iput-short", k22c, false, kIndexFieldRef, kContinue | kThrow | kStore | kRegCFieldOrConstant, kVerifyRegA | kVerifyRegB | kVerifyRegCField) \
+ V(0x60, SGET, "sget", k21c, true, kIndexFieldRef, kContinue | kThrow | kLoad | kRegBFieldOrConstant, kVerifyRegA | kVerifyRegBField) \
+ V(0x61, SGET_WIDE, "sget-wide", k21c, true, kIndexFieldRef, kContinue | kThrow | kLoad | kRegBFieldOrConstant, kVerifyRegAWide | kVerifyRegBField) \
+ V(0x62, SGET_OBJECT, "sget-object", k21c, true, kIndexFieldRef, kContinue | kThrow | kLoad | kRegBFieldOrConstant, kVerifyRegA | kVerifyRegBField) \
+ V(0x63, SGET_BOOLEAN, "sget-boolean", k21c, true, kIndexFieldRef, kContinue | kThrow | kLoad | kRegBFieldOrConstant, kVerifyRegA | kVerifyRegBField) \
+ V(0x64, SGET_BYTE, "sget-byte", k21c, true, kIndexFieldRef, kContinue | kThrow | kLoad | kRegBFieldOrConstant, kVerifyRegA | kVerifyRegBField) \
+ V(0x65, SGET_CHAR, "sget-char", k21c, true, kIndexFieldRef, kContinue | kThrow | kLoad | kRegBFieldOrConstant, kVerifyRegA | kVerifyRegBField) \
+ V(0x66, SGET_SHORT, "sget-short", k21c, true, kIndexFieldRef, kContinue | kThrow | kLoad | kRegBFieldOrConstant, kVerifyRegA | kVerifyRegBField) \
+ V(0x67, SPUT, "sput", k21c, false, kIndexFieldRef, kContinue | kThrow | kStore | kRegBFieldOrConstant, kVerifyRegA | kVerifyRegBField) \
+ V(0x68, SPUT_WIDE, "sput-wide", k21c, false, kIndexFieldRef, kContinue | kThrow | kStore | kRegBFieldOrConstant, kVerifyRegAWide | kVerifyRegBField) \
+ V(0x69, SPUT_OBJECT, "sput-object", k21c, false, kIndexFieldRef, kContinue | kThrow | kStore | kRegBFieldOrConstant, kVerifyRegA | kVerifyRegBField) \
+ V(0x6A, SPUT_BOOLEAN, "sput-boolean", k21c, false, kIndexFieldRef, kContinue | kThrow | kStore | kRegBFieldOrConstant, kVerifyRegA | kVerifyRegBField) \
+ V(0x6B, SPUT_BYTE, "sput-byte", k21c, false, kIndexFieldRef, kContinue | kThrow | kStore | kRegBFieldOrConstant, kVerifyRegA | kVerifyRegBField) \
+ V(0x6C, SPUT_CHAR, "sput-char", k21c, false, kIndexFieldRef, kContinue | kThrow | kStore | kRegBFieldOrConstant, kVerifyRegA | kVerifyRegBField) \
+ V(0x6D, SPUT_SHORT, "sput-short", k21c, false, kIndexFieldRef, kContinue | kThrow | kStore | kRegBFieldOrConstant, kVerifyRegA | kVerifyRegBField) \
+ V(0x6E, INVOKE_VIRTUAL, "invoke-virtual", k35c, false, kIndexMethodRef, kContinue | kThrow | kInvoke, kVerifyRegBMethod | kVerifyVarArgNonZero) \
+ V(0x6F, INVOKE_SUPER, "invoke-super", k35c, false, kIndexMethodRef, kContinue | kThrow | kInvoke, kVerifyRegBMethod | kVerifyVarArgNonZero) \
+ V(0x70, INVOKE_DIRECT, "invoke-direct", k35c, false, kIndexMethodRef, kContinue | kThrow | kInvoke, kVerifyRegBMethod | kVerifyVarArgNonZero) \
+ V(0x71, INVOKE_STATIC, "invoke-static", k35c, false, kIndexMethodRef, kContinue | kThrow | kInvoke, kVerifyRegBMethod | kVerifyVarArg) \
+ V(0x72, INVOKE_INTERFACE, "invoke-interface", k35c, false, kIndexMethodRef, kContinue | kThrow | kInvoke, kVerifyRegBMethod | kVerifyVarArgNonZero) \
+ V(0x73, RETURN_VOID_NO_BARRIER, "return-void-no-barrier", k10x, false, kIndexNone, kReturn, kVerifyNone) \
+ V(0x74, INVOKE_VIRTUAL_RANGE, "invoke-virtual/range", k3rc, false, kIndexMethodRef, kContinue | kThrow | kInvoke, kVerifyRegBMethod | kVerifyVarArgRangeNonZero) \
+ V(0x75, INVOKE_SUPER_RANGE, "invoke-super/range", k3rc, false, kIndexMethodRef, kContinue | kThrow | kInvoke, kVerifyRegBMethod | kVerifyVarArgRangeNonZero) \
+ V(0x76, INVOKE_DIRECT_RANGE, "invoke-direct/range", k3rc, false, kIndexMethodRef, kContinue | kThrow | kInvoke, kVerifyRegBMethod | kVerifyVarArgRangeNonZero) \
+ V(0x77, INVOKE_STATIC_RANGE, "invoke-static/range", k3rc, false, kIndexMethodRef, kContinue | kThrow | kInvoke, kVerifyRegBMethod | kVerifyVarArgRange) \
+ V(0x78, INVOKE_INTERFACE_RANGE, "invoke-interface/range", k3rc, false, kIndexMethodRef, kContinue | kThrow | kInvoke, kVerifyRegBMethod | kVerifyVarArgRangeNonZero) \
+ V(0x79, UNUSED_79, "unused-79", k10x, false, kIndexUnknown, 0, kVerifyError) \
+ V(0x7A, UNUSED_7A, "unused-7a", k10x, false, kIndexUnknown, 0, kVerifyError) \
+ V(0x7B, NEG_INT, "neg-int", k12x, true, kIndexNone, kContinue, kVerifyRegA | kVerifyRegB) \
+ V(0x7C, NOT_INT, "not-int", k12x, true, kIndexNone, kContinue, kVerifyRegA | kVerifyRegB) \
+ V(0x7D, NEG_LONG, "neg-long", k12x, true, kIndexNone, kContinue, kVerifyRegAWide | kVerifyRegBWide) \
+ V(0x7E, NOT_LONG, "not-long", k12x, true, kIndexNone, kContinue, kVerifyRegAWide | kVerifyRegBWide) \
+ V(0x7F, NEG_FLOAT, "neg-float", k12x, true, kIndexNone, kContinue, kVerifyRegA | kVerifyRegB) \
+ V(0x80, NEG_DOUBLE, "neg-double", k12x, true, kIndexNone, kContinue, kVerifyRegAWide | kVerifyRegBWide) \
+ V(0x81, INT_TO_LONG, "int-to-long", k12x, true, kIndexNone, kContinue | kCast, kVerifyRegAWide | kVerifyRegB) \
+ V(0x82, INT_TO_FLOAT, "int-to-float", k12x, true, kIndexNone, kContinue | kCast, kVerifyRegA | kVerifyRegB) \
+ V(0x83, INT_TO_DOUBLE, "int-to-double", k12x, true, kIndexNone, kContinue | kCast, kVerifyRegAWide | kVerifyRegB) \
+ V(0x84, LONG_TO_INT, "long-to-int", k12x, true, kIndexNone, kContinue | kCast, kVerifyRegA | kVerifyRegBWide) \
+ V(0x85, LONG_TO_FLOAT, "long-to-float", k12x, true, kIndexNone, kContinue | kCast, kVerifyRegA | kVerifyRegBWide) \
+ V(0x86, LONG_TO_DOUBLE, "long-to-double", k12x, true, kIndexNone, kContinue | kCast, kVerifyRegAWide | kVerifyRegBWide) \
+ V(0x87, FLOAT_TO_INT, "float-to-int", k12x, true, kIndexNone, kContinue | kCast, kVerifyRegA | kVerifyRegB) \
+ V(0x88, FLOAT_TO_LONG, "float-to-long", k12x, true, kIndexNone, kContinue | kCast, kVerifyRegAWide | kVerifyRegB) \
+ V(0x89, FLOAT_TO_DOUBLE, "float-to-double", k12x, true, kIndexNone, kContinue | kCast, kVerifyRegAWide | kVerifyRegB) \
+ V(0x8A, DOUBLE_TO_INT, "double-to-int", k12x, true, kIndexNone, kContinue | kCast, kVerifyRegA | kVerifyRegBWide) \
+ V(0x8B, DOUBLE_TO_LONG, "double-to-long", k12x, true, kIndexNone, kContinue | kCast, kVerifyRegAWide | kVerifyRegBWide) \
+ V(0x8C, DOUBLE_TO_FLOAT, "double-to-float", k12x, true, kIndexNone, kContinue | kCast, kVerifyRegA | kVerifyRegBWide) \
+ V(0x8D, INT_TO_BYTE, "int-to-byte", k12x, true, kIndexNone, kContinue | kCast, kVerifyRegA | kVerifyRegB) \
+ V(0x8E, INT_TO_CHAR, "int-to-char", k12x, true, kIndexNone, kContinue | kCast, kVerifyRegA | kVerifyRegB) \
+ V(0x8F, INT_TO_SHORT, "int-to-short", k12x, true, kIndexNone, kContinue | kCast, kVerifyRegA | kVerifyRegB) \
+ V(0x90, ADD_INT, "add-int", k23x, true, kIndexNone, kContinue | kAdd, kVerifyRegA | kVerifyRegB | kVerifyRegC) \
+ V(0x91, SUB_INT, "sub-int", k23x, true, kIndexNone, kContinue | kSubtract, kVerifyRegA | kVerifyRegB | kVerifyRegC) \
+ V(0x92, MUL_INT, "mul-int", k23x, true, kIndexNone, kContinue | kMultiply, kVerifyRegA | kVerifyRegB | kVerifyRegC) \
+ V(0x93, DIV_INT, "div-int", k23x, true, kIndexNone, kContinue | kThrow | kDivide, kVerifyRegA | kVerifyRegB | kVerifyRegC) \
+ V(0x94, REM_INT, "rem-int", k23x, true, kIndexNone, kContinue | kThrow | kRemainder, kVerifyRegA | kVerifyRegB | kVerifyRegC) \
+ V(0x95, AND_INT, "and-int", k23x, true, kIndexNone, kContinue | kAnd, kVerifyRegA | kVerifyRegB | kVerifyRegC) \
+ V(0x96, OR_INT, "or-int", k23x, true, kIndexNone, kContinue | kOr, kVerifyRegA | kVerifyRegB | kVerifyRegC) \
+ V(0x97, XOR_INT, "xor-int", k23x, true, kIndexNone, kContinue | kXor, kVerifyRegA | kVerifyRegB | kVerifyRegC) \
+ V(0x98, SHL_INT, "shl-int", k23x, true, kIndexNone, kContinue | kShl, kVerifyRegA | kVerifyRegB | kVerifyRegC) \
+ V(0x99, SHR_INT, "shr-int", k23x, true, kIndexNone, kContinue | kShr, kVerifyRegA | kVerifyRegB | kVerifyRegC) \
+ V(0x9A, USHR_INT, "ushr-int", k23x, true, kIndexNone, kContinue | kUshr, kVerifyRegA | kVerifyRegB | kVerifyRegC) \
+ V(0x9B, ADD_LONG, "add-long", k23x, true, kIndexNone, kContinue | kAdd, kVerifyRegAWide | kVerifyRegBWide | kVerifyRegCWide) \
+ V(0x9C, SUB_LONG, "sub-long", k23x, true, kIndexNone, kContinue | kSubtract, kVerifyRegAWide | kVerifyRegBWide | kVerifyRegCWide) \
+ V(0x9D, MUL_LONG, "mul-long", k23x, true, kIndexNone, kContinue | kMultiply, kVerifyRegAWide | kVerifyRegBWide | kVerifyRegCWide) \
+ V(0x9E, DIV_LONG, "div-long", k23x, true, kIndexNone, kContinue | kThrow | kDivide, kVerifyRegAWide | kVerifyRegBWide | kVerifyRegCWide) \
+ V(0x9F, REM_LONG, "rem-long", k23x, true, kIndexNone, kContinue | kThrow | kRemainder, kVerifyRegAWide | kVerifyRegBWide | kVerifyRegCWide) \
+ V(0xA0, AND_LONG, "and-long", k23x, true, kIndexNone, kContinue | kAnd, kVerifyRegAWide | kVerifyRegBWide | kVerifyRegCWide) \
+ V(0xA1, OR_LONG, "or-long", k23x, true, kIndexNone, kContinue | kOr, kVerifyRegAWide | kVerifyRegBWide | kVerifyRegCWide) \
+ V(0xA2, XOR_LONG, "xor-long", k23x, true, kIndexNone, kContinue | kXor, kVerifyRegAWide | kVerifyRegBWide | kVerifyRegCWide) \
+ V(0xA3, SHL_LONG, "shl-long", k23x, true, kIndexNone, kContinue | kShl, kVerifyRegAWide | kVerifyRegBWide | kVerifyRegC) \
+ V(0xA4, SHR_LONG, "shr-long", k23x, true, kIndexNone, kContinue | kShr, kVerifyRegAWide | kVerifyRegBWide | kVerifyRegC) \
+ V(0xA5, USHR_LONG, "ushr-long", k23x, true, kIndexNone, kContinue | kUshr, kVerifyRegAWide | kVerifyRegBWide | kVerifyRegC) \
+ V(0xA6, ADD_FLOAT, "add-float", k23x, true, kIndexNone, kContinue | kAdd, kVerifyRegA | kVerifyRegB | kVerifyRegC) \
+ V(0xA7, SUB_FLOAT, "sub-float", k23x, true, kIndexNone, kContinue | kSubtract, kVerifyRegA | kVerifyRegB | kVerifyRegC) \
+ V(0xA8, MUL_FLOAT, "mul-float", k23x, true, kIndexNone, kContinue | kMultiply, kVerifyRegA | kVerifyRegB | kVerifyRegC) \
+ V(0xA9, DIV_FLOAT, "div-float", k23x, true, kIndexNone, kContinue | kDivide, kVerifyRegA | kVerifyRegB | kVerifyRegC) \
+ V(0xAA, REM_FLOAT, "rem-float", k23x, true, kIndexNone, kContinue | kRemainder, kVerifyRegA | kVerifyRegB | kVerifyRegC) \
+ V(0xAB, ADD_DOUBLE, "add-double", k23x, true, kIndexNone, kContinue | kAdd, kVerifyRegAWide | kVerifyRegBWide | kVerifyRegCWide) \
+ V(0xAC, SUB_DOUBLE, "sub-double", k23x, true, kIndexNone, kContinue | kSubtract, kVerifyRegAWide | kVerifyRegBWide | kVerifyRegCWide) \
+ V(0xAD, MUL_DOUBLE, "mul-double", k23x, true, kIndexNone, kContinue | kMultiply, kVerifyRegAWide | kVerifyRegBWide | kVerifyRegCWide) \
+ V(0xAE, DIV_DOUBLE, "div-double", k23x, true, kIndexNone, kContinue | kDivide, kVerifyRegAWide | kVerifyRegBWide | kVerifyRegCWide) \
+ V(0xAF, REM_DOUBLE, "rem-double", k23x, true, kIndexNone, kContinue | kRemainder, kVerifyRegAWide | kVerifyRegBWide | kVerifyRegCWide) \
+ V(0xB0, ADD_INT_2ADDR, "add-int/2addr", k12x, true, kIndexNone, kContinue | kAdd, kVerifyRegA | kVerifyRegB) \
+ V(0xB1, SUB_INT_2ADDR, "sub-int/2addr", k12x, true, kIndexNone, kContinue | kSubtract, kVerifyRegA | kVerifyRegB) \
+ V(0xB2, MUL_INT_2ADDR, "mul-int/2addr", k12x, true, kIndexNone, kContinue | kMultiply, kVerifyRegA | kVerifyRegB) \
+ V(0xB3, DIV_INT_2ADDR, "div-int/2addr", k12x, true, kIndexNone, kContinue | kThrow | kDivide, kVerifyRegA | kVerifyRegB) \
+ V(0xB4, REM_INT_2ADDR, "rem-int/2addr", k12x, true, kIndexNone, kContinue | kThrow | kRemainder, kVerifyRegA | kVerifyRegB) \
+ V(0xB5, AND_INT_2ADDR, "and-int/2addr", k12x, true, kIndexNone, kContinue | kAnd, kVerifyRegA | kVerifyRegB) \
+ V(0xB6, OR_INT_2ADDR, "or-int/2addr", k12x, true, kIndexNone, kContinue | kOr, kVerifyRegA | kVerifyRegB) \
+ V(0xB7, XOR_INT_2ADDR, "xor-int/2addr", k12x, true, kIndexNone, kContinue | kXor, kVerifyRegA | kVerifyRegB) \
+ V(0xB8, SHL_INT_2ADDR, "shl-int/2addr", k12x, true, kIndexNone, kContinue | kShl, kVerifyRegA | kVerifyRegB) \
+ V(0xB9, SHR_INT_2ADDR, "shr-int/2addr", k12x, true, kIndexNone, kContinue | kShr, kVerifyRegA | kVerifyRegB) \
+ V(0xBA, USHR_INT_2ADDR, "ushr-int/2addr", k12x, true, kIndexNone, kContinue | kUshr, kVerifyRegA | kVerifyRegB) \
+ V(0xBB, ADD_LONG_2ADDR, "add-long/2addr", k12x, true, kIndexNone, kContinue | kAdd, kVerifyRegAWide | kVerifyRegBWide) \
+ V(0xBC, SUB_LONG_2ADDR, "sub-long/2addr", k12x, true, kIndexNone, kContinue | kSubtract, kVerifyRegAWide | kVerifyRegBWide) \
+ V(0xBD, MUL_LONG_2ADDR, "mul-long/2addr", k12x, true, kIndexNone, kContinue | kMultiply, kVerifyRegAWide | kVerifyRegBWide) \
+ V(0xBE, DIV_LONG_2ADDR, "div-long/2addr", k12x, true, kIndexNone, kContinue | kThrow | kDivide, kVerifyRegAWide | kVerifyRegBWide) \
+ V(0xBF, REM_LONG_2ADDR, "rem-long/2addr", k12x, true, kIndexNone, kContinue | kThrow | kRemainder, kVerifyRegAWide | kVerifyRegBWide) \
+ V(0xC0, AND_LONG_2ADDR, "and-long/2addr", k12x, true, kIndexNone, kContinue | kAnd, kVerifyRegAWide | kVerifyRegBWide) \
+ V(0xC1, OR_LONG_2ADDR, "or-long/2addr", k12x, true, kIndexNone, kContinue | kOr, kVerifyRegAWide | kVerifyRegBWide) \
+ V(0xC2, XOR_LONG_2ADDR, "xor-long/2addr", k12x, true, kIndexNone, kContinue | kXor, kVerifyRegAWide | kVerifyRegBWide) \
+ V(0xC3, SHL_LONG_2ADDR, "shl-long/2addr", k12x, true, kIndexNone, kContinue | kShl, kVerifyRegAWide | kVerifyRegB) \
+ V(0xC4, SHR_LONG_2ADDR, "shr-long/2addr", k12x, true, kIndexNone, kContinue | kShr, kVerifyRegAWide | kVerifyRegB) \
+ V(0xC5, USHR_LONG_2ADDR, "ushr-long/2addr", k12x, true, kIndexNone, kContinue | kUshr, kVerifyRegAWide | kVerifyRegB) \
+ V(0xC6, ADD_FLOAT_2ADDR, "add-float/2addr", k12x, true, kIndexNone, kContinue | kAdd, kVerifyRegA | kVerifyRegB) \
+ V(0xC7, SUB_FLOAT_2ADDR, "sub-float/2addr", k12x, true, kIndexNone, kContinue | kSubtract, kVerifyRegA | kVerifyRegB) \
+ V(0xC8, MUL_FLOAT_2ADDR, "mul-float/2addr", k12x, true, kIndexNone, kContinue | kMultiply, kVerifyRegA | kVerifyRegB) \
+ V(0xC9, DIV_FLOAT_2ADDR, "div-float/2addr", k12x, true, kIndexNone, kContinue | kDivide, kVerifyRegA | kVerifyRegB) \
+ V(0xCA, REM_FLOAT_2ADDR, "rem-float/2addr", k12x, true, kIndexNone, kContinue | kRemainder, kVerifyRegA | kVerifyRegB) \
+ V(0xCB, ADD_DOUBLE_2ADDR, "add-double/2addr", k12x, true, kIndexNone, kContinue | kAdd, kVerifyRegAWide | kVerifyRegBWide) \
+ V(0xCC, SUB_DOUBLE_2ADDR, "sub-double/2addr", k12x, true, kIndexNone, kContinue | kSubtract, kVerifyRegAWide | kVerifyRegBWide) \
+ V(0xCD, MUL_DOUBLE_2ADDR, "mul-double/2addr", k12x, true, kIndexNone, kContinue | kMultiply, kVerifyRegAWide | kVerifyRegBWide) \
+ V(0xCE, DIV_DOUBLE_2ADDR, "div-double/2addr", k12x, true, kIndexNone, kContinue | kDivide, kVerifyRegAWide | kVerifyRegBWide) \
+ V(0xCF, REM_DOUBLE_2ADDR, "rem-double/2addr", k12x, true, kIndexNone, kContinue | kRemainder, kVerifyRegAWide | kVerifyRegBWide) \
+ V(0xD0, ADD_INT_LIT16, "add-int/lit16", k22s, true, kIndexNone, kContinue | kAdd | kRegCFieldOrConstant, kVerifyRegA | kVerifyRegB) \
+ V(0xD1, RSUB_INT, "rsub-int", k22s, true, kIndexNone, kContinue | kSubtract | kRegCFieldOrConstant, kVerifyRegA | kVerifyRegB) \
+ V(0xD2, MUL_INT_LIT16, "mul-int/lit16", k22s, true, kIndexNone, kContinue | kMultiply | kRegCFieldOrConstant, kVerifyRegA | kVerifyRegB) \
+ V(0xD3, DIV_INT_LIT16, "div-int/lit16", k22s, true, kIndexNone, kContinue | kThrow | kDivide | kRegCFieldOrConstant, kVerifyRegA | kVerifyRegB) \
+ V(0xD4, REM_INT_LIT16, "rem-int/lit16", k22s, true, kIndexNone, kContinue | kThrow | kRemainder | kRegCFieldOrConstant, kVerifyRegA | kVerifyRegB) \
+ V(0xD5, AND_INT_LIT16, "and-int/lit16", k22s, true, kIndexNone, kContinue | kAnd | kRegCFieldOrConstant, kVerifyRegA | kVerifyRegB) \
+ V(0xD6, OR_INT_LIT16, "or-int/lit16", k22s, true, kIndexNone, kContinue | kOr | kRegCFieldOrConstant, kVerifyRegA | kVerifyRegB) \
+ V(0xD7, XOR_INT_LIT16, "xor-int/lit16", k22s, true, kIndexNone, kContinue | kXor | kRegCFieldOrConstant, kVerifyRegA | kVerifyRegB) \
+ V(0xD8, ADD_INT_LIT8, "add-int/lit8", k22b, true, kIndexNone, kContinue | kAdd | kRegCFieldOrConstant, kVerifyRegA | kVerifyRegB) \
+ V(0xD9, RSUB_INT_LIT8, "rsub-int/lit8", k22b, true, kIndexNone, kContinue | kSubtract | kRegCFieldOrConstant, kVerifyRegA | kVerifyRegB) \
+ V(0xDA, MUL_INT_LIT8, "mul-int/lit8", k22b, true, kIndexNone, kContinue | kMultiply | kRegCFieldOrConstant, kVerifyRegA | kVerifyRegB) \
+ V(0xDB, DIV_INT_LIT8, "div-int/lit8", k22b, true, kIndexNone, kContinue | kThrow | kDivide | kRegCFieldOrConstant, kVerifyRegA | kVerifyRegB) \
+ V(0xDC, REM_INT_LIT8, "rem-int/lit8", k22b, true, kIndexNone, kContinue | kThrow | kRemainder | kRegCFieldOrConstant, kVerifyRegA | kVerifyRegB) \
+ V(0xDD, AND_INT_LIT8, "and-int/lit8", k22b, true, kIndexNone, kContinue | kAnd | kRegCFieldOrConstant, kVerifyRegA | kVerifyRegB) \
+ V(0xDE, OR_INT_LIT8, "or-int/lit8", k22b, true, kIndexNone, kContinue | kOr | kRegCFieldOrConstant, kVerifyRegA | kVerifyRegB) \
+ V(0xDF, XOR_INT_LIT8, "xor-int/lit8", k22b, true, kIndexNone, kContinue | kXor | kRegCFieldOrConstant, kVerifyRegA | kVerifyRegB) \
+ V(0xE0, SHL_INT_LIT8, "shl-int/lit8", k22b, true, kIndexNone, kContinue | kShl | kRegCFieldOrConstant, kVerifyRegA | kVerifyRegB) \
+ V(0xE1, SHR_INT_LIT8, "shr-int/lit8", k22b, true, kIndexNone, kContinue | kShr | kRegCFieldOrConstant, kVerifyRegA | kVerifyRegB) \
+ V(0xE2, USHR_INT_LIT8, "ushr-int/lit8", k22b, true, kIndexNone, kContinue | kUshr | kRegCFieldOrConstant, kVerifyRegA | kVerifyRegB) \
+ V(0xE3, IGET_QUICK, "iget-quick", k22c, true, kIndexFieldOffset, kContinue | kThrow | kLoad | kRegCFieldOrConstant, kVerifyRegA | kVerifyRegB | kVerifyRuntimeOnly) \
+ V(0xE4, IGET_WIDE_QUICK, "iget-wide-quick", k22c, true, kIndexFieldOffset, kContinue | kThrow | kLoad | kRegCFieldOrConstant, kVerifyRegAWide | kVerifyRegB | kVerifyRuntimeOnly) \
+ V(0xE5, IGET_OBJECT_QUICK, "iget-object-quick", k22c, true, kIndexFieldOffset, kContinue | kThrow | kLoad | kRegCFieldOrConstant, kVerifyRegA | kVerifyRegB | kVerifyRuntimeOnly) \
+ V(0xE6, IPUT_QUICK, "iput-quick", k22c, false, kIndexFieldOffset, kContinue | kThrow | kStore | kRegCFieldOrConstant, kVerifyRegA | kVerifyRegB | kVerifyRuntimeOnly) \
+ V(0xE7, IPUT_WIDE_QUICK, "iput-wide-quick", k22c, false, kIndexFieldOffset, kContinue | kThrow | kStore | kRegCFieldOrConstant, kVerifyRegAWide | kVerifyRegB | kVerifyRuntimeOnly) \
+ V(0xE8, IPUT_OBJECT_QUICK, "iput-object-quick", k22c, false, kIndexFieldOffset, kContinue | kThrow | kStore | kRegCFieldOrConstant, kVerifyRegA | kVerifyRegB | kVerifyRuntimeOnly) \
+ V(0xE9, INVOKE_VIRTUAL_QUICK, "invoke-virtual-quick", k35c, false, kIndexVtableOffset, kContinue | kThrow | kInvoke, kVerifyVarArgNonZero | kVerifyRuntimeOnly) \
+ V(0xEA, INVOKE_VIRTUAL_RANGE_QUICK, "invoke-virtual/range-quick", k3rc, false, kIndexVtableOffset, kContinue | kThrow | kInvoke, kVerifyVarArgRangeNonZero | kVerifyRuntimeOnly) \
+ V(0xEB, IPUT_BOOLEAN_QUICK, "iput-boolean-quick", k22c, false, kIndexFieldOffset, kContinue | kThrow | kStore | kRegCFieldOrConstant, kVerifyRegA | kVerifyRegB | kVerifyRuntimeOnly) \
+ V(0xEC, IPUT_BYTE_QUICK, "iput-byte-quick", k22c, false, kIndexFieldOffset, kContinue | kThrow | kStore | kRegCFieldOrConstant, kVerifyRegA | kVerifyRegB | kVerifyRuntimeOnly) \
+ V(0xED, IPUT_CHAR_QUICK, "iput-char-quick", k22c, false, kIndexFieldOffset, kContinue | kThrow | kStore | kRegCFieldOrConstant, kVerifyRegA | kVerifyRegB | kVerifyRuntimeOnly) \
+ V(0xEE, IPUT_SHORT_QUICK, "iput-short-quick", k22c, false, kIndexFieldOffset, kContinue | kThrow | kStore | kRegCFieldOrConstant, kVerifyRegA | kVerifyRegB | kVerifyRuntimeOnly) \
+ V(0xEF, IGET_BOOLEAN_QUICK, "iget-boolean-quick", k22c, true, kIndexFieldOffset, kContinue | kThrow | kLoad | kRegCFieldOrConstant, kVerifyRegA | kVerifyRegB | kVerifyRuntimeOnly) \
+ V(0xF0, IGET_BYTE_QUICK, "iget-byte-quick", k22c, true, kIndexFieldOffset, kContinue | kThrow | kLoad | kRegCFieldOrConstant, kVerifyRegA | kVerifyRegB | kVerifyRuntimeOnly) \
+ V(0xF1, IGET_CHAR_QUICK, "iget-char-quick", k22c, true, kIndexFieldOffset, kContinue | kThrow | kLoad | kRegCFieldOrConstant, kVerifyRegA | kVerifyRegB | kVerifyRuntimeOnly) \
+ V(0xF2, IGET_SHORT_QUICK, "iget-short-quick", k22c, true, kIndexFieldOffset, kContinue | kThrow | kLoad | kRegCFieldOrConstant, kVerifyRegA | kVerifyRegB | kVerifyRuntimeOnly) \
+ V(0xF3, INVOKE_LAMBDA, "invoke-lambda", k25x, false, kIndexNone, kContinue | kThrow | kInvoke | kExperimental, kVerifyRegC /*TODO: | kVerifyVarArg*/) \
+ V(0xF4, UNUSED_F4, "unused-f4", k10x, false, kIndexUnknown, 0, kVerifyError) \
+ V(0xF5, UNUSED_F5, "unused-f5", k10x, false, kIndexUnknown, 0, kVerifyError) \
/* TODO(iam): get rid of the unused 'false' column */ \
- V(0xF6, CREATE_LAMBDA, "create-lambda", k21c, false_UNUSED, kMethodRef, kContinue | kThrow | kExperimental, kVerifyRegA | kVerifyRegBMethod) \
- V(0xF7, UNUSED_F7, "unused-f7", k10x, false, kUnknown, 0, kVerifyError) \
- V(0xF8, BOX_LAMBDA, "box-lambda", k22x, true, kNone, kContinue | kExperimental, kVerifyRegA | kVerifyRegB) \
- V(0xF9, UNBOX_LAMBDA, "unbox-lambda", k22c, true, kTypeRef, kContinue | kThrow | kExperimental, kVerifyRegA | kVerifyRegB | kVerifyRegCType) \
- V(0xFA, UNUSED_FA, "unused-fa", k10x, false, kUnknown, 0, kVerifyError) \
- V(0xFB, UNUSED_FB, "unused-fb", k10x, false, kUnknown, 0, kVerifyError) \
- V(0xFC, UNUSED_FC, "unused-fc", k10x, false, kUnknown, 0, kVerifyError) \
- V(0xFD, UNUSED_FD, "unused-fd", k10x, false, kUnknown, 0, kVerifyError) \
- V(0xFE, UNUSED_FE, "unused-fe", k10x, false, kUnknown, 0, kVerifyError) \
- V(0xFF, UNUSED_FF, "unused-ff", k10x, false, kUnknown, 0, kVerifyError)
+ V(0xF6, CREATE_LAMBDA, "create-lambda", k21c, false_UNUSED, kIndexMethodRef, kContinue | kThrow | kExperimental, kVerifyRegA | kVerifyRegBMethod) \
+ V(0xF7, UNUSED_F7, "unused-f7", k10x, false, kIndexUnknown, 0, kVerifyError) \
+ V(0xF8, BOX_LAMBDA, "box-lambda", k22x, true, kIndexNone, kContinue | kExperimental, kVerifyRegA | kVerifyRegB) \
+ V(0xF9, UNBOX_LAMBDA, "unbox-lambda", k22c, true, kIndexTypeRef, kContinue | kThrow | kExperimental, kVerifyRegA | kVerifyRegB | kVerifyRegCType) \
+ V(0xFA, UNUSED_FA, "unused-fa", k10x, false, kIndexUnknown, 0, kVerifyError) \
+ V(0xFB, UNUSED_FB, "unused-fb", k10x, false, kIndexUnknown, 0, kVerifyError) \
+ V(0xFC, UNUSED_FC, "unused-fc", k10x, false, kIndexUnknown, 0, kVerifyError) \
+ V(0xFD, UNUSED_FD, "unused-fd", k10x, false, kIndexUnknown, 0, kVerifyError) \
+ V(0xFE, UNUSED_FE, "unused-fe", k10x, false, kIndexUnknown, 0, kVerifyError) \
+ V(0xFF, UNUSED_FF, "unused-ff", k10x, false, kIndexUnknown, 0, kVerifyError)
#define DEX_INSTRUCTION_FORMAT_LIST(V) \
V(k10x) \
diff --git a/runtime/dex_instruction_test.cc b/runtime/dex_instruction_test.cc
new file mode 100644
index 0000000..671ac0e
--- /dev/null
+++ b/runtime/dex_instruction_test.cc
@@ -0,0 +1,31 @@
+/*
+ * Copyright (C) 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "dex_instruction-inl.h"
+#include "gtest/gtest.h"
+
+namespace art {
+
+TEST(StaticGetters, PropertiesOfNopTest) {
+ Instruction::Code nop = Instruction::NOP;
+ EXPECT_STREQ("nop", Instruction::Name(nop));
+ EXPECT_EQ(Instruction::k10x, Instruction::FormatOf(nop));
+ EXPECT_EQ(Instruction::kIndexNone, Instruction::IndexTypeOf(nop));
+ EXPECT_EQ(Instruction::kContinue, Instruction::FlagsOf(nop));
+ EXPECT_EQ(Instruction::kVerifyNone, Instruction::VerifyFlagsOf(nop));
+}
+
+} // namespace art
diff --git a/runtime/reflection_test.cc b/runtime/reflection_test.cc
index 6f17e7d..9707fb8 100644
--- a/runtime/reflection_test.cc
+++ b/runtime/reflection_test.cc
@@ -505,6 +505,7 @@
};
TEST_F(ReflectionTest, StaticMainMethod) {
+ TEST_DISABLED_FOR_HEAP_REFERENCE_POISONING_WITH_QUICK();
ScopedObjectAccess soa(Thread::Current());
jobject jclass_loader = LoadDex("Main");
StackHandleScope<1> hs(soa.Self());
diff --git a/runtime/runtime_options.def b/runtime/runtime_options.def
index d5f7f97..dc4c0c7 100644
--- a/runtime/runtime_options.def
+++ b/runtime/runtime_options.def
@@ -79,10 +79,10 @@
RUNTIME_OPTIONS_KEY (bool, Relocate, kDefaultMustRelocate)
RUNTIME_OPTIONS_KEY (bool, Dex2Oat, true)
RUNTIME_OPTIONS_KEY (bool, ImageDex2Oat, true)
- // kPoisonHeapReferences currently works with
+ // kUseReadBarrier currently works with
// the interpreter only.
// TODO: make it work with the compiler.
-RUNTIME_OPTIONS_KEY (bool, Interpret, (kPoisonHeapReferences || kUseReadBarrier)) // -Xint
+RUNTIME_OPTIONS_KEY (bool, Interpret, kUseReadBarrier) // -Xint
// Disable the compiler for CC (for now).
RUNTIME_OPTIONS_KEY (XGcOption, GcOption) // -Xgc:
RUNTIME_OPTIONS_KEY (gc::space::LargeObjectSpaceType, \
diff --git a/test/099-vmdebug/src/Main.java b/test/099-vmdebug/src/Main.java
index a8db069..add2ff6 100644
--- a/test/099-vmdebug/src/Main.java
+++ b/test/099-vmdebug/src/Main.java
@@ -20,6 +20,9 @@
import java.util.Map;
public class Main {
+ private static final String TEMP_FILE_NAME_PREFIX = "test";
+ private static final String TEMP_FILE_NAME_SUFFIX = ".trace";
+
public static void main(String[] args) throws Exception {
String name = System.getProperty("java.vm.name");
if (!"Dalvik".equals(name)) {
@@ -32,21 +35,31 @@
private static File createTempFile() throws Exception {
try {
- return File.createTempFile("test", ".trace");
+ return File.createTempFile(TEMP_FILE_NAME_PREFIX, TEMP_FILE_NAME_SUFFIX);
} catch (IOException e) {
System.setProperty("java.io.tmpdir", "/data/local/tmp");
try {
- return File.createTempFile("test", ".trace");
+ return File.createTempFile(TEMP_FILE_NAME_PREFIX, TEMP_FILE_NAME_SUFFIX);
} catch (IOException e2) {
System.setProperty("java.io.tmpdir", "/sdcard");
- return File.createTempFile("test", ".trace");
+ return File.createTempFile(TEMP_FILE_NAME_PREFIX, TEMP_FILE_NAME_SUFFIX);
}
}
}
private static void testMethodTracing() throws Exception {
- File tempFile = createTempFile();
- tempFile.deleteOnExit();
+ File tempFile = null;
+ try {
+ tempFile = createTempFile();
+ testMethodTracingToFile(tempFile);
+ } finally {
+ if (tempFile != null) {
+ tempFile.delete();
+ }
+ }
+ }
+
+ private static void testMethodTracingToFile(File tempFile) throws Exception {
String tempFileName = tempFile.getPath();
if (VMDebug.getMethodTracingMode() != 0) {
diff --git a/test/510-checker-try-catch/smali/Builder.smali b/test/510-checker-try-catch/smali/Builder.smali
index f300b21..95708a2 100644
--- a/test/510-checker-try-catch/smali/Builder.smali
+++ b/test/510-checker-try-catch/smali/Builder.smali
@@ -68,25 +68,25 @@
## CHECK: predecessors "B0"
## CHECK: successors "<<BTry1>>"
## CHECK: xhandlers "<<BCatch1>>" "<<BCatch3>>"
-## CHECK: TryBoundary is_entry:true is_exit:false
+## CHECK: TryBoundary kind:entry
## CHECK: name "<<BExitTry1>>"
## CHECK: predecessors "<<BTry1>>"
## CHECK: successors "<<BAdd>>"
## CHECK: xhandlers "<<BCatch1>>" "<<BCatch3>>"
-## CHECK: TryBoundary is_entry:false is_exit:true
+## CHECK: TryBoundary kind:exit
## CHECK: name "<<BEnterTry2>>"
## CHECK: predecessors "<<BAdd>>"
## CHECK: successors "<<BTry2>>"
## CHECK: xhandlers "<<BCatch2>>" "<<BCatch3>>"
-## CHECK: TryBoundary is_entry:true is_exit:false
+## CHECK: TryBoundary kind:entry
## CHECK: name "<<BExitTry2>>"
## CHECK: predecessors "<<BTry2>>"
## CHECK: successors "<<BReturn>>"
## CHECK: xhandlers "<<BCatch2>>" "<<BCatch3>>"
-## CHECK: TryBoundary is_entry:false is_exit:true
+## CHECK: TryBoundary kind:exit
.method public static testMultipleTryCatch(III)I
.registers 3
@@ -164,19 +164,19 @@
## CHECK: predecessors "<<BThen>>"
## CHECK: successors "<<BTry1>>"
## CHECK: xhandlers "<<BCatch>>"
-## CHECK: TryBoundary is_entry:true is_exit:false
+## CHECK: TryBoundary kind:entry
## CHECK: name "<<BEnterTry2>>"
## CHECK: predecessors "<<BIf>>"
## CHECK: successors "<<BTry2>>"
## CHECK: xhandlers "<<BCatch>>"
-## CHECK: TryBoundary is_entry:true is_exit:false
+## CHECK: TryBoundary kind:entry
## CHECK: name "<<BExitTry>>"
## CHECK: predecessors "<<BTry2>>"
## CHECK: successors "<<BReturn>>"
## CHECK: xhandlers "<<BCatch>>"
-## CHECK: TryBoundary is_entry:false is_exit:true
+## CHECK: TryBoundary kind:exit
.method public static testMultipleEntries(IIII)I
.registers 4
@@ -237,19 +237,19 @@
## CHECK: predecessors "B0"
## CHECK: successors "<<BTry>>"
## CHECK: xhandlers "<<BCatch>>"
-## CHECK: TryBoundary is_entry:true is_exit:false
+## CHECK: TryBoundary kind:entry
## CHECK: name "<<BExitTry1>>"
## CHECK: predecessors "<<BTry>>"
## CHECK: successors "<<BThen>>"
## CHECK: xhandlers "<<BCatch>>"
-## CHECK: TryBoundary is_entry:false is_exit:true
+## CHECK: TryBoundary kind:exit
## CHECK: name "<<BExitTry2>>"
## CHECK: predecessors "<<BTry>>"
## CHECK: successors "<<BReturn>>"
## CHECK: xhandlers "<<BCatch>>"
-## CHECK: TryBoundary is_entry:false is_exit:true
+## CHECK: TryBoundary kind:exit
.method public static testMultipleExits(II)I
.registers 2
@@ -284,11 +284,11 @@
## CHECK: name "<<BTry1:B\d+>>"
## CHECK: predecessors "<<BEnter1>>"
-## CHECK: successors "<<BExit1Enter2:B\d+>>"
+## CHECK: successors "<<BExit1:B\d+>>"
## CHECK: Div
## CHECK: name "<<BTry2:B\d+>>"
-## CHECK: predecessors "<<BExit1Enter2>>"
+## CHECK: predecessors "<<BEnter2:B\d+>>"
## CHECK: successors "<<BExit2:B\d+>>"
## CHECK: Div
@@ -297,13 +297,13 @@
## CHECK: Return
## CHECK: name "<<BCatch1>>"
-## CHECK: predecessors "<<BEnter1>>" "<<BExit1Enter2>>"
+## CHECK: predecessors "<<BEnter1>>" "<<BExit1>>"
## CHECK: successors "<<BReturn>>"
## CHECK: flags "catch_block"
## CHECK: StoreLocal [v0,<<Minus1>>]
## CHECK: name "<<BCatch2>>"
-## CHECK: predecessors "<<BExit1Enter2>>" "<<BExit2>>"
+## CHECK: predecessors "<<BEnter2>>" "<<BExit2>>"
## CHECK: successors "<<BReturn>>"
## CHECK: flags "catch_block"
## CHECK: StoreLocal [v0,<<Minus2>>]
@@ -312,19 +312,25 @@
## CHECK: predecessors "B0"
## CHECK: successors "<<BTry1>>"
## CHECK: xhandlers "<<BCatch1>>"
-## CHECK: TryBoundary is_entry:true is_exit:false
+## CHECK: TryBoundary kind:entry
-## CHECK: name "<<BExit1Enter2>>"
+## CHECK: name "<<BExit1>>"
## CHECK: predecessors "<<BTry1>>"
+## CHECK: successors "<<BEnter2>>"
+## CHECK: xhandlers "<<BCatch1>>"
+## CHECK: TryBoundary kind:exit
+
+## CHECK: name "<<BEnter2>>"
+## CHECK: predecessors "<<BExit1>>"
## CHECK: successors "<<BTry2>>"
-## CHECK: xhandlers "<<BCatch1>>" "<<BCatch2>>"
-## CHECK: TryBoundary is_entry:true is_exit:true
+## CHECK: xhandlers "<<BCatch2>>"
+## CHECK: TryBoundary kind:entry
## CHECK: name "<<BExit2>>"
## CHECK: predecessors "<<BTry2>>"
## CHECK: successors "<<BReturn>>"
## CHECK: xhandlers "<<BCatch2>>"
-## CHECK: TryBoundary is_entry:false is_exit:true
+## CHECK: TryBoundary kind:exit
.method public static testSharedBoundary(III)I
.registers 3
@@ -365,13 +371,13 @@
## CHECK: Goto
## CHECK: name "<<BTry1:B\d+>>"
-## CHECK: predecessors "<<BExit2Enter1:B\d+>>"
+## CHECK: predecessors "<<BEnter1:B\d+>>"
## CHECK: successors "<<BExit1:B\d+>>"
## CHECK: Div
## CHECK: name "<<BTry2:B\d+>>"
## CHECK: predecessors "<<BEnter2>>"
-## CHECK: successors "<<BExit2Enter1>>"
+## CHECK: successors "<<BExit2:B\d+>>"
## CHECK: Div
## CHECK: name "<<BReturn:B\d+>>"
@@ -379,34 +385,40 @@
## CHECK: Return
## CHECK: name "<<BCatch1>>"
-## CHECK: predecessors "<<BExit2Enter1>>" "<<BExit1>>"
+## CHECK: predecessors "<<BEnter1>>" "<<BExit1>>"
## CHECK: successors "<<BReturn>>"
## CHECK: flags "catch_block"
## CHECK: StoreLocal [v0,<<Minus1>>]
## CHECK: name "<<BCatch2>>"
-## CHECK: predecessors "<<BEnter2>>" "<<BExit2Enter1>>"
+## CHECK: predecessors "<<BEnter2>>" "<<BExit2>>"
## CHECK: successors "<<BReturn>>"
## CHECK: flags "catch_block"
## CHECK: StoreLocal [v0,<<Minus2>>]
-## CHECK: name "<<BExit2Enter1>>"
-## CHECK: predecessors "<<BTry2>>"
+## CHECK: name "<<BEnter1>>"
+## CHECK: predecessors "<<BExit2>>"
## CHECK: successors "<<BTry1>>"
-## CHECK: xhandlers "<<BCatch1>>" "<<BCatch2>>"
-## CHECK: TryBoundary is_entry:true is_exit:true
+## CHECK: xhandlers "<<BCatch1>>"
+## CHECK: TryBoundary kind:entry
## CHECK: name "<<BExit1>>"
## CHECK: predecessors "<<BTry1>>"
## CHECK: successors "<<BReturn>>"
## CHECK: xhandlers "<<BCatch1>>"
-## CHECK: TryBoundary is_entry:false is_exit:true
+## CHECK: TryBoundary kind:exit
## CHECK: name "<<BEnter2>>"
## CHECK: predecessors "<<BGoto>>"
## CHECK: successors "<<BTry2>>"
## CHECK: xhandlers "<<BCatch2>>"
-## CHECK: TryBoundary is_entry:true is_exit:false
+## CHECK: TryBoundary kind:entry
+
+## CHECK: name "<<BExit2>>"
+## CHECK: predecessors "<<BTry2>>"
+## CHECK: successors "<<BEnter1>>"
+## CHECK: xhandlers "<<BCatch2>>"
+## CHECK: TryBoundary kind:exit
.method public static testSharedBoundary_Reverse(III)I
.registers 3
@@ -448,16 +460,16 @@
## CHECK: name "<<BTry1:B\d+>>"
## CHECK: predecessors "<<BEnter1:B\d+>>"
-## CHECK: successors "<<BExit1Enter2:B\d+>>"
+## CHECK: successors "<<BExit1:B\d+>>"
## CHECK: Div
## CHECK: name "<<BTry2:B\d+>>"
-## CHECK: predecessors "<<BExit1Enter2>>"
-## CHECK: successors "<<BExit2Enter3:B\d+>>"
+## CHECK: predecessors "<<BEnter2:B\d+>>"
+## CHECK: successors "<<BExit2:B\d+>>"
## CHECK: Div
## CHECK: name "<<BTry3:B\d+>>"
-## CHECK: predecessors "<<BExit2Enter3>>"
+## CHECK: predecessors "<<BEnter3:B\d+>>"
## CHECK: successors "<<BExit3:B\d+>>"
## CHECK: Div
@@ -465,13 +477,13 @@
## CHECK: predecessors "<<BExit3>>" "<<BCatchArith:B\d+>>" "<<BCatchAll:B\d+>>"
## CHECK: name "<<BCatchArith>>"
-## CHECK: predecessors "<<BExit1Enter2>>" "<<BExit2Enter3>>"
+## CHECK: predecessors "<<BEnter2>>" "<<BExit2>>"
## CHECK: successors "<<BReturn>>"
## CHECK: flags "catch_block"
## CHECK: StoreLocal [v0,<<Minus1>>]
## CHECK: name "<<BCatchAll>>"
-## CHECK: predecessors "<<BEnter1>>" "<<BExit1Enter2>>" "<<BExit2Enter3>>" "<<BExit3>>"
+## CHECK: predecessors "<<BEnter1>>" "<<BExit1>>" "<<BEnter2>>" "<<BExit2>>" "<<BEnter3>>" "<<BExit3>>"
## CHECK: successors "<<BReturn>>"
## CHECK: flags "catch_block"
## CHECK: StoreLocal [v0,<<Minus2>>]
@@ -480,25 +492,37 @@
## CHECK: predecessors "B0"
## CHECK: successors "<<BTry1>>"
## CHECK: xhandlers "<<BCatchAll>>"
-## CHECK: TryBoundary is_entry:true is_exit:false
+## CHECK: TryBoundary kind:entry
-## CHECK: name "<<BExit1Enter2>>"
+## CHECK: name "<<BExit1>>"
## CHECK: predecessors "<<BTry1>>"
-## CHECK: successors "<<BTry2>>"
-## CHECK: xhandlers "<<BCatchAll>>" "<<BCatchArith>>"
-## CHECK: TryBoundary is_entry:true is_exit:true
+## CHECK: successors "<<BEnter2>>"
+## CHECK: xhandlers "<<BCatchAll>>"
+## CHECK: TryBoundary kind:exit
-## CHECK: name "<<BExit2Enter3>>"
-## CHECK: predecessors "<<BTry2>>"
-## CHECK: successors "<<BTry3>>"
+## CHECK: name "<<BEnter2>>"
+## CHECK: predecessors "<<BExit1>>"
+## CHECK: successors "<<BTry2>>"
## CHECK: xhandlers "<<BCatchArith>>" "<<BCatchAll>>"
-## CHECK: TryBoundary is_entry:true is_exit:true
+## CHECK: TryBoundary kind:entry
+
+## CHECK: name "<<BExit2>>"
+## CHECK: predecessors "<<BTry2>>"
+## CHECK: successors "<<BEnter3>>"
+## CHECK: xhandlers "<<BCatchArith>>" "<<BCatchAll>>"
+## CHECK: TryBoundary kind:exit
+
+## CHECK: name "<<BEnter3>>"
+## CHECK: predecessors "<<BExit2>>"
+## CHECK: successors "<<BTry3>>"
+## CHECK: xhandlers "<<BCatchAll>>"
+## CHECK: TryBoundary kind:entry
## CHECK: name "<<BExit3>>"
## CHECK: predecessors "<<BTry3>>"
## CHECK: successors "<<BReturn>>"
## CHECK: xhandlers "<<BCatchAll>>"
-## CHECK: TryBoundary is_entry:false is_exit:true
+## CHECK: TryBoundary kind:exit
.method public static testNestedTry(IIII)I
.registers 4
@@ -562,25 +586,25 @@
## CHECK: predecessors "B0"
## CHECK: successors "<<BTry1>>"
## CHECK: xhandlers "<<BCatch>>"
-## CHECK: TryBoundary is_entry:true is_exit:false
+## CHECK: TryBoundary kind:entry
## CHECK: name "<<BExitTry1>>"
## CHECK: predecessors "<<BTry1>>"
## CHECK: successors "<<BOutside>>"
## CHECK: xhandlers "<<BCatch>>"
-## CHECK: TryBoundary is_entry:false is_exit:true
+## CHECK: TryBoundary kind:exit
## CHECK: name "<<BEnterTry2>>"
## CHECK: predecessors "<<BOutside>>"
## CHECK: successors "<<BTry2>>"
## CHECK: xhandlers "<<BCatch>>"
-## CHECK: TryBoundary is_entry:true is_exit:false
+## CHECK: TryBoundary kind:entry
## CHECK: name "<<BExitTry2>>"
## CHECK: predecessors "<<BTry2>>"
## CHECK: successors "<<BReturn>>"
## CHECK: xhandlers "<<BCatch>>"
-## CHECK: TryBoundary is_entry:false is_exit:true
+## CHECK: TryBoundary kind:exit
.method public static testIncontinuousTry(IIII)I
.registers 4
@@ -630,13 +654,13 @@
## CHECK: predecessors "B0"
## CHECK: successors "<<BTry>>"
## CHECK: xhandlers "<<BCatch>>"
-## CHECK: TryBoundary is_entry:true is_exit:false
+## CHECK: TryBoundary kind:entry
## CHECK: name "<<BExitTry>>"
## CHECK: predecessors "<<BTry>>"
## CHECK: successors "<<BExit>>"
## CHECK: xhandlers "<<BCatch>>"
-## CHECK: TryBoundary is_entry:false is_exit:true
+## CHECK: TryBoundary kind:exit
## CHECK: name "<<BExit>>"
## CHECK: predecessors "<<BExitTry>>" "<<BCatch>>"
@@ -660,28 +684,32 @@
## CHECK-START: int Builder.testCatchLoop(int, int, int) builder (after)
## CHECK: name "B0"
-## CHECK: successors "<<BEnterTry:B\d+>>"
+## CHECK: successors "<<BCatch:B\d+>>"
-## CHECK: name "<<BTry:B\d+>>"
-## CHECK: predecessors "<<BEnterTry>>" "<<BEnterTry>>" "<<BExitTry:B\d+>>"
-## CHECK: successors "<<BExitTry>>"
+## CHECK: name "<<BCatch>>"
+## CHECK: predecessors "B0" "<<BEnterTry:B\d+>>" "<<BExitTry:B\d+>>"
+## CHECK: successors "<<BEnterTry>>"
## CHECK: flags "catch_block"
-## CHECK: Div
## CHECK: name "<<BReturn:B\d+>>"
## CHECK: predecessors "<<BExitTry>>"
+## CHECK: name "<<BTry:B\d+>>"
+## CHECK: predecessors "<<BEnterTry>>"
+## CHECK: successors "<<BExitTry>>"
+## CHECK: Div
+
## CHECK: name "<<BEnterTry>>"
-## CHECK: predecessors "B0"
+## CHECK: predecessors "<<BCatch>>"
## CHECK: successors "<<BTry>>"
-## CHECK: xhandlers "<<BTry>>"
-## CHECK: TryBoundary is_entry:true is_exit:false
+## CHECK: xhandlers "<<BCatch>>"
+## CHECK: TryBoundary kind:entry
## CHECK: name "<<BExitTry>>"
## CHECK: predecessors "<<BTry>>"
## CHECK: successors "<<BReturn>>"
-## CHECK: xhandlers "<<BTry>>"
-## CHECK: TryBoundary is_entry:false is_exit:true
+## CHECK: xhandlers "<<BCatch>>"
+## CHECK: TryBoundary kind:exit
.method public static testCatchLoop(III)I
.registers 4
@@ -702,33 +730,49 @@
## CHECK-START: int Builder.testHandlerEdge1(int, int, int) builder (after)
## CHECK: name "B0"
-## CHECK: successors "<<BEnterTry:B\d+>>"
+## CHECK: successors "<<BEnterTry1:B\d+>>"
-## CHECK: name "<<BTry:B\d+>>"
-## CHECK: predecessors "<<BEnterTry>>"
-## CHECK: successors "<<BCatch:B\d+>>"
+## CHECK: name "<<BTry1:B\d+>>"
+## CHECK: predecessors "<<BEnterTry1>>"
+## CHECK: successors "<<BExitTry1:B\d+>>"
## CHECK: Div
-## CHECK: name "<<BCatch>>"
-## CHECK: predecessors "<<BTry>>" "<<BEnterTry>>" "<<BExitTry:B\d+>>"
-## CHECK: successors "<<BExitTry>>"
+## CHECK: name "<<BCatch:B\d+>>"
+## CHECK: predecessors "<<BExitTry1>>" "<<BEnterTry1>>" "<<BExitTry1>>" "<<BEnterTry2:B\d+>>" "<<BExitTry2:B\d+>>"
+## CHECK: successors "<<BEnterTry2>>"
## CHECK: flags "catch_block"
-## CHECK: Div
## CHECK: name "<<BReturn:B\d+>>"
-## CHECK: predecessors "<<BExitTry>>"
+## CHECK: predecessors "<<BExitTry2>>"
-## CHECK: name "<<BEnterTry>>"
+## CHECK: name "<<BEnterTry1>>"
## CHECK: predecessors "B0"
-## CHECK: successors "<<BTry>>"
+## CHECK: successors "<<BTry1>>"
## CHECK: xhandlers "<<BCatch>>"
-## CHECK: TryBoundary is_entry:true is_exit:false
+## CHECK: TryBoundary kind:entry
-## CHECK: name "<<BExitTry>>"
+## CHECK: name "<<BExitTry1>>"
+## CHECK: predecessors "<<BTry1>>"
+## CHECK: successors "<<BCatch>>"
+## CHECK: xhandlers "<<BCatch>>"
+## CHECK: TryBoundary kind:exit
+
+## CHECK: name "<<BTry2:B\d+>>"
+## CHECK: predecessors "<<BEnterTry2>>"
+## CHECK: successors "<<BExitTry2>>"
+## CHECK: Div
+
+## CHECK: name "<<BEnterTry2>>"
## CHECK: predecessors "<<BCatch>>"
+## CHECK: successors "<<BTry2>>"
+## CHECK: xhandlers "<<BCatch>>"
+## CHECK: TryBoundary kind:entry
+
+## CHECK: name "<<BExitTry2>>"
+## CHECK: predecessors "<<BTry2>>"
## CHECK: successors "<<BReturn>>"
## CHECK: xhandlers "<<BCatch>>"
-## CHECK: TryBoundary is_entry:false is_exit:true
+## CHECK: TryBoundary kind:exit
.method public static testHandlerEdge1(III)I
.registers 4
@@ -750,41 +794,55 @@
## CHECK-START: int Builder.testHandlerEdge2(int, int, int) builder (after)
## CHECK: name "B0"
+## CHECK: successors "<<BCatch1:B\d+>>"
+
+## CHECK: name "<<BCatch1>>"
+## CHECK: predecessors "B0" "<<BEnter2:B\d+>>" "<<BExit2:B\d+>>"
## CHECK: successors "<<BEnter1:B\d+>>"
-
-## CHECK: name "<<BTry1:B\d+>>"
-## CHECK: predecessors "<<BEnter1>>" "<<BExit1Enter2:B\d+>>" "<<BExit2:B\d+>>"
-## CHECK: successors "<<BExit1Enter2>>"
## CHECK: flags "catch_block"
-## CHECK: Div
-## CHECK: name "<<BTry2:B\d+>>"
-## CHECK: predecessors "<<BExit1Enter2>>" "<<BEnter1>>" "<<BExit1Enter2>>"
-## CHECK: successors "<<BExit2>>"
+## CHECK: name "<<BCatch2:B\d+>>"
+## CHECK: predecessors "<<BExit1:B\d+>>" "<<BEnter1>>" "<<BExit1>>"
+## CHECK: successors "<<BEnter2>>"
## CHECK: flags "catch_block"
-## CHECK: Div
## CHECK: name "<<BReturn:B\d+>>"
## CHECK: predecessors "<<BExit2>>"
## CHECK: Return
-## CHECK: name "<<BEnter1>>"
-## CHECK: predecessors "B0"
-## CHECK: successors "<<BTry1>>"
-## CHECK: xhandlers "<<BTry2>>"
-## CHECK: TryBoundary is_entry:true is_exit:false
+## CHECK: name "<<BTry1:B\d+>>"
+## CHECK: predecessors "<<BEnter1>>"
+## CHECK: successors "<<BExit1>>"
+## CHECK: Div
-## CHECK: name "<<BExit1Enter2>>"
+## CHECK: name "<<BEnter1>>"
+## CHECK: predecessors "<<BCatch1>>"
+## CHECK: successors "<<BTry1>>"
+## CHECK: xhandlers "<<BCatch2>>"
+## CHECK: TryBoundary kind:entry
+
+## CHECK: name "<<BExit1>>"
## CHECK: predecessors "<<BTry1>>"
+## CHECK: successors "<<BCatch2>>"
+## CHECK: xhandlers "<<BCatch2>>"
+## CHECK: TryBoundary kind:exit
+
+## CHECK: name "<<BTry2:B\d+>>"
+## CHECK: predecessors "<<BEnter2>>"
+## CHECK: successors "<<BExit2>>"
+## CHECK: Div
+
+## CHECK: name "<<BEnter2>>"
+## CHECK: predecessors "<<BCatch2>>"
## CHECK: successors "<<BTry2>>"
-## CHECK: xhandlers "<<BTry2>>" "<<BTry1>>"
-## CHECK: TryBoundary is_entry:true is_exit:true
+## CHECK: xhandlers "<<BCatch1>>"
+## CHECK: TryBoundary kind:entry
## CHECK: name "<<BExit2>>"
## CHECK: predecessors "<<BTry2>>"
## CHECK: successors "<<BReturn>>"
-## CHECK: xhandlers "<<BTry1>>"
-## CHECK: TryBoundary is_entry:false is_exit:true
+## CHECK: xhandlers "<<BCatch1>>"
+## CHECK: TryBoundary kind:exit
.method public static testHandlerEdge2(III)I
.registers 4
diff --git a/test/521-checker-array-set-null/expected.txt b/test/521-checker-array-set-null/expected.txt
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/test/521-checker-array-set-null/expected.txt
diff --git a/test/521-checker-array-set-null/info.txt b/test/521-checker-array-set-null/info.txt
new file mode 100644
index 0000000..ec2ba35
--- /dev/null
+++ b/test/521-checker-array-set-null/info.txt
@@ -0,0 +1,2 @@
+Checker test for optimizing that checks whether our
+optimizations to remove type checks on array set operations work.
diff --git a/test/521-checker-array-set-null/src/Main.java b/test/521-checker-array-set-null/src/Main.java
new file mode 100644
index 0000000..74bb73f
--- /dev/null
+++ b/test/521-checker-array-set-null/src/Main.java
@@ -0,0 +1,41 @@
+/*
+ * Copyright (C) 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+public class Main {
+ public static void main(String[] args) {
+ testWithNull(new Object[2]);
+ testWithUnknown(new Object[2], new Object());
+ testWithSame(new Object[2]);
+ }
+
+ /// CHECK-START: void Main.testWithNull(java.lang.Object[]) disassembly (after)
+ /// CHECK-NOT: pAputObject
+ public static void testWithNull(Object[] o) {
+ o[0] = null;
+ }
+
+ /// CHECK-START: void Main.testWithUnknown(java.lang.Object[], java.lang.Object) disassembly (after)
+ /// CHECK: pAputObject
+ public static void testWithUnknown(Object[] o, Object obj) {
+ o[0] = obj;
+ }
+
+ /// CHECK-START: void Main.testWithSame(java.lang.Object[]) disassembly (after)
+ /// CHECK-NOT: pAputObject
+ public static void testWithSame(Object[] o) {
+ o[0] = o[1];
+ }
+}
diff --git a/test/521-regression-integer-field-set/expected.txt b/test/521-regression-integer-field-set/expected.txt
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/test/521-regression-integer-field-set/expected.txt
diff --git a/test/521-regression-integer-field-set/info.txt b/test/521-regression-integer-field-set/info.txt
new file mode 100644
index 0000000..62f7e3d
--- /dev/null
+++ b/test/521-regression-integer-field-set/info.txt
@@ -0,0 +1,3 @@
+Regression test for Optimizing's x86-64 code generator, where moving a
+32-bit immediate (integer or reference) into a field used to generate
+a `movw` instruction instead of a `movl` instruction.
diff --git a/test/521-regression-integer-field-set/src/Main.java b/test/521-regression-integer-field-set/src/Main.java
new file mode 100644
index 0000000..9924e09
--- /dev/null
+++ b/test/521-regression-integer-field-set/src/Main.java
@@ -0,0 +1,58 @@
+/*
+ * Copyright (C) 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+class Main {
+ public static void assertIntEquals(int expected, int result) {
+ if (expected != result) {
+ throw new Error("Expected: " + expected + ", found: " + result);
+ }
+ }
+
+ public static void main(String[] args) {
+ Main m = new Main();
+
+ m.$noinline$SetInstanceField();
+ assertIntEquals(123456, m.i);
+
+ $noinline$SetStaticField();
+ assertIntEquals(456789, s);
+ }
+
+ private static boolean doThrow = false;
+
+ private void $noinline$SetInstanceField() {
+ if (doThrow) {
+ // Try defeating inlining.
+ throw new Error();
+ }
+
+ // Set a value than does not fit in a 16-bit (signed) integer.
+ i = 123456;
+ }
+
+ private static void $noinline$SetStaticField() {
+ if (doThrow) {
+ // Try defeating inlining.
+ throw new Error();
+ }
+
+ // Set a value than does not fit in a 16-bit (signed) integer.
+ s = 456789;
+ }
+
+ private int i = 0;
+ private static int s = 0;
+}
diff --git a/test/802-deoptimization/src/DeoptimizationController.java b/test/802-deoptimization/src/DeoptimizationController.java
index c926669..d6e662d 100644
--- a/test/802-deoptimization/src/DeoptimizationController.java
+++ b/test/802-deoptimization/src/DeoptimizationController.java
@@ -22,24 +22,27 @@
* Controls deoptimization using dalvik.system.VMDebug class.
*/
public class DeoptimizationController {
+ private static final String TEMP_FILE_NAME_PREFIX = "test";
+ private static final String TEMP_FILE_NAME_SUFFIX = ".trace";
+
private static File createTempFile() throws Exception {
try {
- return File.createTempFile("test", ".trace");
+ return File.createTempFile(TEMP_FILE_NAME_PREFIX, TEMP_FILE_NAME_SUFFIX);
} catch (IOException e) {
System.setProperty("java.io.tmpdir", "/data/local/tmp");
try {
- return File.createTempFile("test", ".trace");
+ return File.createTempFile(TEMP_FILE_NAME_PREFIX, TEMP_FILE_NAME_SUFFIX);
} catch (IOException e2) {
System.setProperty("java.io.tmpdir", "/sdcard");
- return File.createTempFile("test", ".trace");
+ return File.createTempFile(TEMP_FILE_NAME_PREFIX, TEMP_FILE_NAME_SUFFIX);
}
}
}
public static void startDeoptimization() {
+ File tempFile = null;
try {
- File tempFile = createTempFile();
- tempFile.deleteOnExit();
+ tempFile = createTempFile();
String tempFileName = tempFile.getPath();
VMDebug.startMethodTracing(tempFileName, 0, 0, false, 1000);
@@ -48,6 +51,10 @@
}
} catch (Exception exc) {
exc.printStackTrace(System.err);
+ } finally {
+ if (tempFile != null) {
+ tempFile.delete();
+ }
}
}
diff --git a/test/Android.run-test.mk b/test/Android.run-test.mk
index 5b5c368..3d97901 100644
--- a/test/Android.run-test.mk
+++ b/test/Android.run-test.mk
@@ -498,18 +498,45 @@
TEST_ART_BROKEN_READ_BARRIER_RUN_TESTS :=
-# Tests that should fail in the heap poisoning configuration.
-# 137: Heap poisoning forces interpreter. Cannot run this with the interpreter.
-TEST_ART_BROKEN_HEAP_POISONING_RUN_TESTS := \
+# Tests that should fail in the heap poisoning configuration with the default (Quick) compiler.
+# 137: Quick punts to the interpreter, and this test cannot run this with the interpreter.
+TEST_ART_BROKEN_DEFAULT_HEAP_POISONING_RUN_TESTS := \
+ 137-cfi
+# Tests that should fail in the heap poisoning configuration with the Optimizing compiler.
+# 055-enum-performance: Exceeds run time limits due to heap poisoning instrumentation.
+TEST_ART_BROKEN_OPTIMIZING_HEAP_POISONING_RUN_TESTS := \
+ 055-enum-performance
+# Tests that should fail in the heap poisoning configuration with the interpreter.
+# 137: Cannot run this with the interpreter.
+TEST_ART_BROKEN_INTERPRETER_HEAP_POISONING_RUN_TESTS := \
137-cfi
ifeq ($(ART_HEAP_POISONING),true)
- ART_TEST_KNOWN_BROKEN += $(call all-run-test-names,$(TARGET_TYPES),$(RUN_TYPES),$(PREBUILD_TYPES), \
- $(COMPILER_TYPES),$(RELOCATE_TYPES),$(TRACE_TYPES),$(GC_TYPES),$(JNI_TYPES), \
- $(IMAGE_TYPES),$(PICTEST_TYPES),$(DEBUGGABLE_TYPES),$(TEST_ART_BROKEN_HEAP_POISONING_RUN_TESTS),$(ALL_ADDRESS_SIZES))
+ ifneq (,$(filter default,$(COMPILER_TYPES)))
+ ART_TEST_KNOWN_BROKEN += $(call all-run-test-names,$(TARGET_TYPES),$(RUN_TYPES), \
+ $(PREBUILD_TYPES),default,$(RELOCATE_TYPES),$(TRACE_TYPES),$(GC_TYPES),$(JNI_TYPES), \
+ $(IMAGE_TYPES),$(PICTEST_TYPES),$(DEBUGGABLE_TYPES), \
+ $(TEST_ART_BROKEN_DEFAULT_HEAP_POISONING_RUN_TESTS),$(ALL_ADDRESS_SIZES))
+ endif
+
+ ifneq (,$(filter optimizing,$(COMPILER_TYPES)))
+ ART_TEST_KNOWN_BROKEN += $(call all-run-test-names,$(TARGET_TYPES),$(RUN_TYPES), \
+ $(PREBUILD_TYPES),optimizing,$(RELOCATE_TYPES),$(TRACE_TYPES),$(GC_TYPES),$(JNI_TYPES), \
+ $(IMAGE_TYPES),$(PICTEST_TYPES),$(DEBUGGABLE_TYPES), \
+ $(TEST_ART_BROKEN_OPTIMIZING_HEAP_POISONING_RUN_TESTS),$(ALL_ADDRESS_SIZES))
+ endif
+
+ ifneq (,$(filter interpreter,$(COMPILER_TYPES)))
+ ART_TEST_KNOWN_BROKEN += $(call all-run-test-names,$(TARGET_TYPES),$(RUN_TYPES), \
+ $(PREBUILD_TYPES),interpreter,$(RELOCATE_TYPES),$(TRACE_TYPES),$(GC_TYPES),$(JNI_TYPES), \
+ $(IMAGE_TYPES),$(PICTEST_TYPES),$(DEBUGGABLE_TYPES), \
+ $(TEST_ART_BROKEN_INTERPRETER_HEAP_POISONING_RUN_TESTS),$(ALL_ADDRESS_SIZES))
+ endif
endif
-TEST_ART_BROKEN_HEAP_POISONING_RUN_TESTS :=
+TEST_ART_BROKEN_INTERPRETER_HEAP_POISONING_RUN_TESTS :=
+TEST_ART_BROKEN_OPTIMIZING_HEAP_POISONING_RUN_TESTS :=
+TEST_ART_BROKEN_DEFAULT_HEAP_POISONING_RUN_TESTS :=
# Clear variables ahead of appending to them when defining tests.
$(foreach target, $(TARGET_TYPES), $(eval ART_RUN_TEST_$(call name-to-var,$(target))_RULES :=))