Revert "Annotate used fields."
This reverts commit 7f6cf56942c8469958b273ea968db253051c5b05.
Change-Id: Ic389a194c3404ecb5bb563a405bf4a0d6336ea0d
diff --git a/compiler/dex/quick/gen_common.cc b/compiler/dex/quick/gen_common.cc
index a0a83db..0533fbf 100644
--- a/compiler/dex/quick/gen_common.cc
+++ b/compiler/dex/quick/gen_common.cc
@@ -381,14 +381,20 @@
const int r_base_;
};
-void Mir2Lir::GenSput(MIR* mir, RegLocation rl_src, bool is_long_or_double,
+void Mir2Lir::GenSput(uint32_t field_idx, RegLocation rl_src, bool is_long_or_double,
bool is_object) {
- const SFieldAnnotation& annotation = mir_graph_->GetSFieldAnnotation(mir);
- cu_->compiler_driver->ProcessedStaticField(annotation.FastPut(), annotation.IsReferrersClass());
- if (annotation.FastPut() && !SLOW_FIELD_PATH) {
- DCHECK_GE(annotation.FieldOffset().Int32Value(), 0);
+ int field_offset;
+ int storage_index;
+ bool is_volatile;
+ bool is_referrers_class;
+ bool is_initialized;
+ bool fast_path = cu_->compiler_driver->ComputeStaticFieldInfo(
+ field_idx, mir_graph_->GetCurrentDexCompilationUnit(), true,
+ &field_offset, &storage_index, &is_referrers_class, &is_volatile, &is_initialized);
+ if (fast_path && !SLOW_FIELD_PATH) {
+ DCHECK_GE(field_offset, 0);
int r_base;
- if (annotation.IsReferrersClass()) {
+ if (is_referrers_class) {
// Fast path, static storage base is this method's class
RegLocation rl_method = LoadCurrMethod();
r_base = AllocTemp();
@@ -401,7 +407,7 @@
// Medium path, static storage base in a different class which requires checks that the other
// class is initialized.
// TODO: remove initialized check now that we are initializing classes in the compiler driver.
- DCHECK_NE(annotation.StorageIndex(), DexFile::kDexNoIndex);
+ DCHECK_GE(storage_index, 0);
// May do runtime call so everything to home locations.
FlushAllRegs();
// Using fixed register to sync with possible call to runtime support.
@@ -414,9 +420,9 @@
mirror::ArtMethod::DexCacheResolvedTypesOffset().Int32Value(),
r_base);
LoadWordDisp(r_base, mirror::Array::DataOffset(sizeof(mirror::Object*)).Int32Value() +
- sizeof(int32_t*) * annotation.StorageIndex(), r_base);
+ sizeof(int32_t*) * storage_index, r_base);
// r_base now points at static storage (Class*) or NULL if the type is not yet resolved.
- if (!annotation.IsInitialized()) {
+ if (!is_initialized) {
// Check if r_base is NULL or a not yet initialized class.
// The slow path is invoked if the r_base is NULL or the class pointed
@@ -431,7 +437,7 @@
AddSlowPath(new (arena_) StaticFieldSlowPath(this,
unresolved_branch, uninit_branch, cont,
- annotation.StorageIndex(), r_base));
+ storage_index, r_base));
FreeTemp(r_tmp);
}
@@ -443,16 +449,16 @@
} else {
rl_src = LoadValue(rl_src, kAnyReg);
}
- if (annotation.IsVolatile()) {
+ if (is_volatile) {
GenMemBarrier(kStoreStore);
}
if (is_long_or_double) {
- StoreBaseDispWide(r_base, annotation.FieldOffset().Int32Value(), rl_src.low_reg,
+ StoreBaseDispWide(r_base, field_offset, rl_src.low_reg,
rl_src.high_reg);
} else {
- StoreWordDisp(r_base, annotation.FieldOffset().Int32Value(), rl_src.low_reg);
+ StoreWordDisp(r_base, field_offset, rl_src.low_reg);
}
- if (annotation.IsVolatile()) {
+ if (is_volatile) {
GenMemBarrier(kStoreLoad);
}
if (is_object && !mir_graph_->IsConstantNullRef(rl_src)) {
@@ -465,18 +471,24 @@
is_long_or_double ? QUICK_ENTRYPOINT_OFFSET(pSet64Static)
: (is_object ? QUICK_ENTRYPOINT_OFFSET(pSetObjStatic)
: QUICK_ENTRYPOINT_OFFSET(pSet32Static));
- CallRuntimeHelperImmRegLocation(setter_offset, annotation.FieldIndex(), rl_src, true);
+ CallRuntimeHelperImmRegLocation(setter_offset, field_idx, rl_src, true);
}
}
-void Mir2Lir::GenSget(MIR* mir, RegLocation rl_dest,
+void Mir2Lir::GenSget(uint32_t field_idx, RegLocation rl_dest,
bool is_long_or_double, bool is_object) {
- const SFieldAnnotation& annotation = mir_graph_->GetSFieldAnnotation(mir);
- cu_->compiler_driver->ProcessedStaticField(annotation.FastGet(), annotation.IsReferrersClass());
- if (annotation.FastGet() && !SLOW_FIELD_PATH) {
- DCHECK_GE(annotation.FieldOffset().Int32Value(), 0);
+ int field_offset;
+ int storage_index;
+ bool is_volatile;
+ bool is_referrers_class;
+ bool is_initialized;
+ bool fast_path = cu_->compiler_driver->ComputeStaticFieldInfo(
+ field_idx, mir_graph_->GetCurrentDexCompilationUnit(), false,
+ &field_offset, &storage_index, &is_referrers_class, &is_volatile, &is_initialized);
+ if (fast_path && !SLOW_FIELD_PATH) {
+ DCHECK_GE(field_offset, 0);
int r_base;
- if (annotation.IsReferrersClass()) {
+ if (is_referrers_class) {
// Fast path, static storage base is this method's class
RegLocation rl_method = LoadCurrMethod();
r_base = AllocTemp();
@@ -485,7 +497,7 @@
} else {
// Medium path, static storage base in a different class which requires checks that the other
// class is initialized
- DCHECK_NE(annotation.StorageIndex(), DexFile::kDexNoIndex);
+ DCHECK_GE(storage_index, 0);
// May do runtime call so everything to home locations.
FlushAllRegs();
// Using fixed register to sync with possible call to runtime support.
@@ -498,9 +510,9 @@
mirror::ArtMethod::DexCacheResolvedTypesOffset().Int32Value(),
r_base);
LoadWordDisp(r_base, mirror::Array::DataOffset(sizeof(mirror::Object*)).Int32Value() +
- sizeof(int32_t*) * annotation.StorageIndex(), r_base);
+ sizeof(int32_t*) * storage_index, r_base);
// r_base now points at static storage (Class*) or NULL if the type is not yet resolved.
- if (!annotation.IsInitialized()) {
+ if (!is_initialized) {
// Check if r_base is NULL or a not yet initialized class.
// The slow path is invoked if the r_base is NULL or the class pointed
@@ -515,7 +527,7 @@
AddSlowPath(new (arena_) StaticFieldSlowPath(this,
unresolved_branch, uninit_branch, cont,
- annotation.StorageIndex(), r_base));
+ storage_index, r_base));
FreeTemp(r_tmp);
}
@@ -523,14 +535,14 @@
}
// r_base now holds static storage base
RegLocation rl_result = EvalLoc(rl_dest, kAnyReg, true);
- if (annotation.IsVolatile()) {
+ if (is_volatile) {
GenMemBarrier(kLoadLoad);
}
if (is_long_or_double) {
- LoadBaseDispWide(r_base, annotation.FieldOffset().Int32Value(), rl_result.low_reg,
+ LoadBaseDispWide(r_base, field_offset, rl_result.low_reg,
rl_result.high_reg, INVALID_SREG);
} else {
- LoadWordDisp(r_base, annotation.FieldOffset().Int32Value(), rl_result.low_reg);
+ LoadWordDisp(r_base, field_offset, rl_result.low_reg);
}
FreeTemp(r_base);
if (is_long_or_double) {
@@ -544,7 +556,7 @@
is_long_or_double ? QUICK_ENTRYPOINT_OFFSET(pGet64Static)
:(is_object ? QUICK_ENTRYPOINT_OFFSET(pGetObjStatic)
: QUICK_ENTRYPOINT_OFFSET(pGet32Static));
- CallRuntimeHelperImm(getterOffset, annotation.FieldIndex(), true);
+ CallRuntimeHelperImm(getterOffset, field_idx, true);
if (is_long_or_double) {
RegLocation rl_result = GetReturnWide(rl_dest.fp);
StoreValueWide(rl_dest, rl_result);
@@ -686,15 +698,18 @@
}
}
-void Mir2Lir::GenIGet(MIR* mir, int opt_flags, OpSize size,
+void Mir2Lir::GenIGet(uint32_t field_idx, int opt_flags, OpSize size,
RegLocation rl_dest, RegLocation rl_obj, bool is_long_or_double,
bool is_object) {
- const IFieldAnnotation& annotation = mir_graph_->GetIFieldAnnotation(mir);
- cu_->compiler_driver->ProcessedInstanceField(annotation.FastGet());
- if (annotation.FastGet() && !SLOW_FIELD_PATH) {
+ int field_offset;
+ bool is_volatile;
+
+ bool fast_path = FastInstance(field_idx, false, &field_offset, &is_volatile);
+
+ if (fast_path && !SLOW_FIELD_PATH) {
RegLocation rl_result;
RegisterClass reg_class = oat_reg_class_by_size(size);
- DCHECK_GE(annotation.FieldOffset().Int32Value(), 0);
+ DCHECK_GE(field_offset, 0);
rl_obj = LoadValue(rl_obj, kCoreReg);
if (is_long_or_double) {
DCHECK(rl_dest.wide);
@@ -702,17 +717,17 @@
if (cu_->instruction_set == kX86) {
rl_result = EvalLoc(rl_dest, reg_class, true);
GenNullCheck(rl_obj.s_reg_low, rl_obj.low_reg, opt_flags);
- LoadBaseDispWide(rl_obj.low_reg, annotation.FieldOffset().Int32Value(), rl_result.low_reg,
+ LoadBaseDispWide(rl_obj.low_reg, field_offset, rl_result.low_reg,
rl_result.high_reg, rl_obj.s_reg_low);
- if (annotation.IsVolatile()) {
+ if (is_volatile) {
GenMemBarrier(kLoadLoad);
}
} else {
int reg_ptr = AllocTemp();
- OpRegRegImm(kOpAdd, reg_ptr, rl_obj.low_reg, annotation.FieldOffset().Int32Value());
+ OpRegRegImm(kOpAdd, reg_ptr, rl_obj.low_reg, field_offset);
rl_result = EvalLoc(rl_dest, reg_class, true);
LoadBaseDispWide(reg_ptr, 0, rl_result.low_reg, rl_result.high_reg, INVALID_SREG);
- if (annotation.IsVolatile()) {
+ if (is_volatile) {
GenMemBarrier(kLoadLoad);
}
FreeTemp(reg_ptr);
@@ -721,9 +736,9 @@
} else {
rl_result = EvalLoc(rl_dest, reg_class, true);
GenNullCheck(rl_obj.s_reg_low, rl_obj.low_reg, opt_flags);
- LoadBaseDisp(rl_obj.low_reg, annotation.FieldOffset().Int32Value(), rl_result.low_reg,
+ LoadBaseDisp(rl_obj.low_reg, field_offset, rl_result.low_reg,
kWord, rl_obj.s_reg_low);
- if (annotation.IsVolatile()) {
+ if (is_volatile) {
GenMemBarrier(kLoadLoad);
}
StoreValue(rl_dest, rl_result);
@@ -733,7 +748,7 @@
is_long_or_double ? QUICK_ENTRYPOINT_OFFSET(pGet64Instance)
: (is_object ? QUICK_ENTRYPOINT_OFFSET(pGetObjInstance)
: QUICK_ENTRYPOINT_OFFSET(pGet32Instance));
- CallRuntimeHelperImmRegLocation(getterOffset, annotation.FieldIndex(), rl_obj, true);
+ CallRuntimeHelperImmRegLocation(getterOffset, field_idx, rl_obj, true);
if (is_long_or_double) {
RegLocation rl_result = GetReturnWide(rl_dest.fp);
StoreValueWide(rl_dest, rl_result);
@@ -744,37 +759,39 @@
}
}
-void Mir2Lir::GenIPut(MIR* mir, int opt_flags, OpSize size,
+void Mir2Lir::GenIPut(uint32_t field_idx, int opt_flags, OpSize size,
RegLocation rl_src, RegLocation rl_obj, bool is_long_or_double,
bool is_object) {
- const IFieldAnnotation& annotation = mir_graph_->GetIFieldAnnotation(mir);
- cu_->compiler_driver->ProcessedInstanceField(annotation.FastPut());
- if (annotation.FastPut() && !SLOW_FIELD_PATH) {
+ int field_offset;
+ bool is_volatile;
+
+ bool fast_path = FastInstance(field_idx, true, &field_offset, &is_volatile);
+ if (fast_path && !SLOW_FIELD_PATH) {
RegisterClass reg_class = oat_reg_class_by_size(size);
- DCHECK_GE(annotation.FieldOffset().Int32Value(), 0);
+ DCHECK_GE(field_offset, 0);
rl_obj = LoadValue(rl_obj, kCoreReg);
if (is_long_or_double) {
int reg_ptr;
rl_src = LoadValueWide(rl_src, kAnyReg);
GenNullCheck(rl_obj.s_reg_low, rl_obj.low_reg, opt_flags);
reg_ptr = AllocTemp();
- OpRegRegImm(kOpAdd, reg_ptr, rl_obj.low_reg, annotation.FieldOffset().Int32Value());
- if (annotation.IsVolatile()) {
+ OpRegRegImm(kOpAdd, reg_ptr, rl_obj.low_reg, field_offset);
+ if (is_volatile) {
GenMemBarrier(kStoreStore);
}
StoreBaseDispWide(reg_ptr, 0, rl_src.low_reg, rl_src.high_reg);
- if (annotation.IsVolatile()) {
+ if (is_volatile) {
GenMemBarrier(kLoadLoad);
}
FreeTemp(reg_ptr);
} else {
rl_src = LoadValue(rl_src, reg_class);
GenNullCheck(rl_obj.s_reg_low, rl_obj.low_reg, opt_flags);
- if (annotation.IsVolatile()) {
+ if (is_volatile) {
GenMemBarrier(kStoreStore);
}
- StoreBaseDisp(rl_obj.low_reg, annotation.FieldOffset().Int32Value(), rl_src.low_reg, kWord);
- if (annotation.IsVolatile()) {
+ StoreBaseDisp(rl_obj.low_reg, field_offset, rl_src.low_reg, kWord);
+ if (is_volatile) {
GenMemBarrier(kLoadLoad);
}
if (is_object && !mir_graph_->IsConstantNullRef(rl_src)) {
@@ -786,8 +803,7 @@
is_long_or_double ? QUICK_ENTRYPOINT_OFFSET(pSet64Instance)
: (is_object ? QUICK_ENTRYPOINT_OFFSET(pSetObjInstance)
: QUICK_ENTRYPOINT_OFFSET(pSet32Instance));
- CallRuntimeHelperImmRegLocationRegLocation(setter_offset, annotation.FieldIndex(),
- rl_obj, rl_src, true);
+ CallRuntimeHelperImmRegLocationRegLocation(setter_offset, field_idx, rl_obj, rl_src, true);
}
}