Support unresolved fields in optimizing
Change-Id: I9941fa5fcb6ef0a7a253c7a0b479a44a0210aad4
diff --git a/compiler/optimizing/builder.cc b/compiler/optimizing/builder.cc
index 3663448..6831e9b 100644
--- a/compiler/optimizing/builder.cc
+++ b/compiler/optimizing/builder.cc
@@ -1225,6 +1225,12 @@
}
}
+static Primitive::Type GetFieldAccessType(const DexFile& dex_file, uint16_t field_index) {
+ const DexFile::FieldId& field_id = dex_file.GetFieldId(field_index);
+ const char* type = dex_file.GetFieldTypeDescriptor(field_id);
+ return Primitive::GetType(type[0]);
+}
+
bool HGraphBuilder::BuildInstanceFieldAccess(const Instruction& instruction,
uint32_t dex_pc,
bool is_put) {
@@ -1244,44 +1250,61 @@
ArtField* resolved_field =
compiler_driver_->ComputeInstanceFieldInfo(field_index, dex_compilation_unit_, is_put, soa);
- if (resolved_field == nullptr) {
- MaybeRecordStat(MethodCompilationStat::kNotCompiledUnresolvedField);
- return false;
- }
-
- Primitive::Type field_type = resolved_field->GetTypeAsPrimitiveType();
HInstruction* object = LoadLocal(obj_reg, Primitive::kPrimNot, dex_pc);
- current_block_->AddInstruction(new (arena_) HNullCheck(object, dex_pc));
+ HInstruction* null_check = new (arena_) HNullCheck(object, dex_pc);
+ current_block_->AddInstruction(null_check);
+
+ Primitive::Type field_type = (resolved_field == nullptr)
+ ? GetFieldAccessType(*dex_file_, field_index)
+ : resolved_field->GetTypeAsPrimitiveType();
if (is_put) {
Temporaries temps(graph_);
- HInstruction* null_check = current_block_->GetLastInstruction();
// We need one temporary for the null check.
temps.Add(null_check);
HInstruction* value = LoadLocal(source_or_dest_reg, field_type, dex_pc);
- current_block_->AddInstruction(new (arena_) HInstanceFieldSet(
- null_check,
- value,
- field_type,
- resolved_field->GetOffset(),
- resolved_field->IsVolatile(),
- field_index,
- *dex_file_,
- dex_compilation_unit_->GetDexCache(),
- dex_pc));
+ HInstruction* field_set = nullptr;
+ if (resolved_field == nullptr) {
+ MaybeRecordStat(MethodCompilationStat::kUnresolvedField);
+ field_set = new (arena_) HUnresolvedInstanceFieldSet(null_check,
+ value,
+ field_type,
+ field_index,
+ dex_pc);
+ } else {
+ field_set = new (arena_) HInstanceFieldSet(null_check,
+ value,
+ field_type,
+ resolved_field->GetOffset(),
+ resolved_field->IsVolatile(),
+ field_index,
+ *dex_file_,
+ dex_compilation_unit_->GetDexCache(),
+ dex_pc);
+ }
+ current_block_->AddInstruction(field_set);
} else {
- current_block_->AddInstruction(new (arena_) HInstanceFieldGet(
- current_block_->GetLastInstruction(),
- field_type,
- resolved_field->GetOffset(),
- resolved_field->IsVolatile(),
- field_index,
- *dex_file_,
- dex_compilation_unit_->GetDexCache(),
- dex_pc));
-
- UpdateLocal(source_or_dest_reg, current_block_->GetLastInstruction(), dex_pc);
+ HInstruction* field_get = nullptr;
+ if (resolved_field == nullptr) {
+ MaybeRecordStat(MethodCompilationStat::kUnresolvedField);
+ field_get = new (arena_) HUnresolvedInstanceFieldGet(null_check,
+ field_type,
+ field_index,
+ dex_pc);
+ } else {
+ field_get = new (arena_) HInstanceFieldGet(null_check,
+ field_type,
+ resolved_field->GetOffset(),
+ resolved_field->IsVolatile(),
+ field_index,
+ *dex_file_,
+ dex_compilation_unit_->GetDexCache(),
+ dex_pc);
+ }
+ current_block_->AddInstruction(field_get);
+ UpdateLocal(source_or_dest_reg, field_get, dex_pc);
}
+
return true;
}
@@ -1338,8 +1361,18 @@
soa, dex_cache, class_loader, dex_compilation_unit_, field_index, true);
if (resolved_field == nullptr) {
- MaybeRecordStat(MethodCompilationStat::kNotCompiledUnresolvedField);
- return false;
+ MaybeRecordStat(MethodCompilationStat::kUnresolvedField);
+ Primitive::Type field_type = GetFieldAccessType(*dex_file_, field_index);
+ if (is_put) {
+ HInstruction* value = LoadLocal(source_or_dest_reg, field_type, dex_pc);
+ current_block_->AddInstruction(
+ new (arena_) HUnresolvedStaticFieldSet(value, field_type, field_index, dex_pc));
+ } else {
+ current_block_->AddInstruction(
+ new (arena_) HUnresolvedStaticFieldGet(field_type, field_index, dex_pc));
+ UpdateLocal(source_or_dest_reg, current_block_->GetLastInstruction(), dex_pc);
+ }
+ return true;
}
const DexFile& outer_dex_file = *outer_compilation_unit_->GetDexFile();
diff --git a/compiler/optimizing/code_generator.cc b/compiler/optimizing/code_generator.cc
index 3c6a41d..1b66121 100644
--- a/compiler/optimizing/code_generator.cc
+++ b/compiler/optimizing/code_generator.cc
@@ -413,6 +413,120 @@
InvokeRuntime(entrypoint, invoke, invoke->GetDexPc(), nullptr);
}
+void CodeGenerator::CreateUnresolvedFieldLocationSummary(
+ HInstruction* field_access,
+ Primitive::Type field_type,
+ const FieldAccessCallingConvetion& calling_convention) {
+ bool is_instance = field_access->IsUnresolvedInstanceFieldGet()
+ || field_access->IsUnresolvedInstanceFieldSet();
+ bool is_get = field_access->IsUnresolvedInstanceFieldGet()
+ || field_access->IsUnresolvedStaticFieldGet();
+
+ ArenaAllocator* allocator = field_access->GetBlock()->GetGraph()->GetArena();
+ LocationSummary* locations =
+ new (allocator) LocationSummary(field_access, LocationSummary::kCall);
+
+ locations->AddTemp(calling_convention.GetFieldIndexLocation());
+
+ if (is_instance) {
+ // Add the `this` object for instance field accesses.
+ locations->SetInAt(0, calling_convention.GetObjectLocation());
+ }
+
+ // Note that pSetXXStatic/pGetXXStatic always takes/returns an int or int64
+ // regardless of the the type. Because of that we forced to special case
+ // the access to floating point values.
+ if (is_get) {
+ if (Primitive::IsFloatingPointType(field_type)) {
+ // The return value will be stored in regular registers while register
+ // allocator expects it in a floating point register. Allocate a temp for
+ // it and make the transfer at codegen.
+ AddLocationAsTemp(calling_convention.GetReturnLocation(field_type), locations);
+ locations->SetOut(calling_convention.GetFpuLocation(field_type));
+ } else {
+ locations->SetOut(calling_convention.GetReturnLocation(field_type));
+ }
+ } else {
+ size_t set_index = is_instance ? 1 : 0;
+ if (Primitive::IsFloatingPointType(field_type)) {
+ // The set value comes from a float location while the calling convention
+ // expects it in a regular register location. Allocate a temp for it and
+ // make the transfer at codegen.
+ AddLocationAsTemp(calling_convention.GetSetValueLocation(field_type, is_instance), locations);
+ locations->SetInAt(set_index, calling_convention.GetFpuLocation(field_type));
+ } else {
+ locations->SetInAt(set_index,
+ calling_convention.GetSetValueLocation(field_type, is_instance));
+ }
+ }
+}
+
+void CodeGenerator::GenerateUnresolvedFieldAccess(HInstruction* field_access,
+ Primitive::Type field_type,
+ uint32_t field_index,
+ uint32_t dex_pc) {
+ LocationSummary* locations = field_access->GetLocations();
+
+ MoveConstant(locations->GetTemp(0), field_index);
+
+ bool is_instance = field_access->IsUnresolvedInstanceFieldGet()
+ || field_access->IsUnresolvedInstanceFieldSet();
+ bool is_get = field_access->IsUnresolvedInstanceFieldGet()
+ || field_access->IsUnresolvedStaticFieldGet();
+
+ if (!is_get && Primitive::IsFloatingPointType(field_type)) {
+ MoveLocationToTemp(locations->InAt(is_instance ? 1 : 0), *locations, 1, field_type);
+ }
+
+ QuickEntrypointEnum entrypoint = kQuickSet8Static; // Initialize to anything to avoid warnings.
+ switch (field_type) {
+ case Primitive::kPrimBoolean:
+ entrypoint = is_instance
+ ? (is_get ? kQuickGetBooleanInstance : kQuickSet8Instance)
+ : (is_get ? kQuickGetBooleanStatic : kQuickSet8Static);
+ break;
+ case Primitive::kPrimByte:
+ entrypoint = is_instance
+ ? (is_get ? kQuickGetByteInstance : kQuickSet8Instance)
+ : (is_get ? kQuickGetByteStatic : kQuickSet8Static);
+ break;
+ case Primitive::kPrimShort:
+ entrypoint = is_instance
+ ? (is_get ? kQuickGetShortInstance : kQuickSet16Instance)
+ : (is_get ? kQuickGetShortStatic : kQuickSet16Static);
+ break;
+ case Primitive::kPrimChar:
+ entrypoint = is_instance
+ ? (is_get ? kQuickGetCharInstance : kQuickSet16Instance)
+ : (is_get ? kQuickGetCharStatic : kQuickSet16Static);
+ break;
+ case Primitive::kPrimInt:
+ case Primitive::kPrimFloat:
+ entrypoint = is_instance
+ ? (is_get ? kQuickGet32Instance : kQuickSet32Instance)
+ : (is_get ? kQuickGet32Static : kQuickSet32Static);
+ break;
+ case Primitive::kPrimNot:
+ entrypoint = is_instance
+ ? (is_get ? kQuickGetObjInstance : kQuickSetObjInstance)
+ : (is_get ? kQuickGetObjStatic : kQuickSetObjStatic);
+ break;
+ case Primitive::kPrimLong:
+ case Primitive::kPrimDouble:
+ entrypoint = is_instance
+ ? (is_get ? kQuickGet64Instance : kQuickSet64Instance)
+ : (is_get ? kQuickGet64Static : kQuickSet64Static);
+ break;
+ default:
+ LOG(FATAL) << "Invalid type " << field_type;
+ }
+ InvokeRuntime(entrypoint, field_access, dex_pc, nullptr);
+
+ if (is_get && Primitive::IsFloatingPointType(field_type)) {
+ MoveTempToLocation(*locations, 1, locations->Out(), field_type);
+ }
+}
+
void CodeGenerator::BlockIfInRegister(Location location, bool is_out) const {
// The DCHECKS below check that a register is not specified twice in
// the summary. The out location can overlap with an input, so we need
diff --git a/compiler/optimizing/code_generator.h b/compiler/optimizing/code_generator.h
index a54dbf1..11478a4 100644
--- a/compiler/optimizing/code_generator.h
+++ b/compiler/optimizing/code_generator.h
@@ -136,6 +136,22 @@
DISALLOW_COPY_AND_ASSIGN(InvokeDexCallingConventionVisitor);
};
+class FieldAccessCallingConvetion {
+ public:
+ virtual Location GetObjectLocation() const = 0;
+ virtual Location GetFieldIndexLocation() const = 0;
+ virtual Location GetReturnLocation(Primitive::Type type) const = 0;
+ virtual Location GetSetValueLocation(Primitive::Type type, bool is_instance) const = 0;
+ virtual Location GetFpuLocation(Primitive::Type type) const = 0;
+ virtual ~FieldAccessCallingConvetion() {}
+
+ protected:
+ FieldAccessCallingConvetion() {}
+
+ private:
+ DISALLOW_COPY_AND_ASSIGN(FieldAccessCallingConvetion);
+};
+
class CodeGenerator {
public:
// Compiles the graph to executable instructions. Returns whether the compilation
@@ -170,6 +186,16 @@
virtual void Bind(HBasicBlock* block) = 0;
virtual void Move(HInstruction* instruction, Location location, HInstruction* move_for) = 0;
virtual void MoveConstant(Location destination, int32_t value) = 0;
+ virtual void AddLocationAsTemp(Location location, LocationSummary* locations) = 0;
+ virtual void MoveLocationToTemp(Location source,
+ const LocationSummary& locations,
+ int temp_index,
+ Primitive::Type type) = 0;
+ virtual void MoveTempToLocation(const LocationSummary& locations,
+ int temp_index,
+ Location destination,
+ Primitive::Type type) = 0;
+
virtual Assembler* GetAssembler() = 0;
virtual const Assembler& GetAssembler() const = 0;
virtual size_t GetWordSize() const = 0;
@@ -378,6 +404,16 @@
void GenerateInvokeUnresolvedRuntimeCall(HInvokeUnresolved* invoke);
+ void CreateUnresolvedFieldLocationSummary(
+ HInstruction* field_access,
+ Primitive::Type field_type,
+ const FieldAccessCallingConvetion& calling_convention);
+
+ void GenerateUnresolvedFieldAccess(HInstruction* field_access,
+ Primitive::Type field_type,
+ uint32_t field_index,
+ uint32_t dex_pc);
+
void SetDisassemblyInformation(DisassemblyInformation* info) { disasm_info_ = info; }
DisassemblyInformation* GetDisassemblyInformation() const { return disasm_info_; }
diff --git a/compiler/optimizing/code_generator_arm.cc b/compiler/optimizing/code_generator_arm.cc
index 6f89293..15fc446 100644
--- a/compiler/optimizing/code_generator_arm.cc
+++ b/compiler/optimizing/code_generator_arm.cc
@@ -969,6 +969,59 @@
__ LoadImmediate(location.AsRegister<Register>(), value);
}
+void CodeGeneratorARM::AddLocationAsTemp(Location location, LocationSummary* locations) {
+ if (location.IsRegister()) {
+ locations->AddTemp(location);
+ } else if (location.IsRegisterPair()) {
+ locations->AddTemp(Location::RegisterLocation(location.AsRegisterPairLow<Register>()));
+ locations->AddTemp(Location::RegisterLocation(location.AsRegisterPairHigh<Register>()));
+ } else {
+ UNIMPLEMENTED(FATAL) << "AddLocationAsTemp not implemented for location " << location;
+ }
+}
+
+void CodeGeneratorARM::MoveLocationToTemp(Location source,
+ const LocationSummary& locations,
+ int temp_index,
+ Primitive::Type type) {
+ if (!Primitive::IsFloatingPointType(type)) {
+ UNIMPLEMENTED(FATAL) << "MoveLocationToTemp not implemented for type " << type;
+ }
+
+ if (type == Primitive::kPrimFloat) {
+ DCHECK(source.IsFpuRegister()) << source;
+ __ vmovrs(locations.GetTemp(temp_index).AsRegister<Register>(),
+ source.AsFpuRegister<SRegister>());
+ } else {
+ DCHECK_EQ(type, Primitive::kPrimDouble) << type;
+ DCHECK(source.IsFpuRegisterPair()) << source;
+ __ vmovrrd(locations.GetTemp(temp_index).AsRegister<Register>(),
+ locations.GetTemp(temp_index + 1).AsRegister<Register>(),
+ FromLowSToD(source.AsFpuRegisterPairLow<SRegister>()));
+ }
+}
+
+void CodeGeneratorARM::MoveTempToLocation(const LocationSummary& locations,
+ int temp_index,
+ Location destination,
+ Primitive::Type type) {
+ if (!Primitive::IsFloatingPointType(type)) {
+ UNIMPLEMENTED(FATAL) << "MoveLocationToTemp not implemented for type " << type;
+ }
+
+ if (type == Primitive::kPrimFloat) {
+ DCHECK(destination.IsFpuRegister()) << destination;
+ __ vmovsr(destination.AsFpuRegister<SRegister>(),
+ locations.GetTemp(temp_index).AsRegister<Register>());
+ } else {
+ DCHECK(type == Primitive::kPrimDouble);
+ DCHECK(destination.IsFpuRegisterPair()) << destination;
+ __ vmovdrr(FromLowSToD(destination.AsFpuRegisterPairLow<SRegister>()),
+ locations.GetTemp(temp_index).AsRegister<Register>(),
+ locations.GetTemp(temp_index + 1).AsRegister<Register>());
+ }
+}
+
void CodeGeneratorARM::InvokeRuntime(QuickEntrypointEnum entrypoint,
HInstruction* instruction,
uint32_t dex_pc,
@@ -3536,6 +3589,66 @@
HandleFieldSet(instruction, instruction->GetFieldInfo(), instruction->GetValueCanBeNull());
}
+void LocationsBuilderARM::VisitUnresolvedInstanceFieldGet(
+ HUnresolvedInstanceFieldGet* instruction) {
+ FieldAccessCallingConvetionARM calling_convention;
+ codegen_->CreateUnresolvedFieldLocationSummary(
+ instruction, instruction->GetFieldType(), calling_convention);
+}
+
+void InstructionCodeGeneratorARM::VisitUnresolvedInstanceFieldGet(
+ HUnresolvedInstanceFieldGet* instruction) {
+ codegen_->GenerateUnresolvedFieldAccess(instruction,
+ instruction->GetFieldType(),
+ instruction->GetFieldIndex(),
+ instruction->GetDexPc());
+}
+
+void LocationsBuilderARM::VisitUnresolvedInstanceFieldSet(
+ HUnresolvedInstanceFieldSet* instruction) {
+ FieldAccessCallingConvetionARM calling_convention;
+ codegen_->CreateUnresolvedFieldLocationSummary(
+ instruction, instruction->GetFieldType(), calling_convention);
+}
+
+void InstructionCodeGeneratorARM::VisitUnresolvedInstanceFieldSet(
+ HUnresolvedInstanceFieldSet* instruction) {
+ codegen_->GenerateUnresolvedFieldAccess(instruction,
+ instruction->GetFieldType(),
+ instruction->GetFieldIndex(),
+ instruction->GetDexPc());
+}
+
+void LocationsBuilderARM::VisitUnresolvedStaticFieldGet(
+ HUnresolvedStaticFieldGet* instruction) {
+ FieldAccessCallingConvetionARM calling_convention;
+ codegen_->CreateUnresolvedFieldLocationSummary(
+ instruction, instruction->GetFieldType(), calling_convention);
+}
+
+void InstructionCodeGeneratorARM::VisitUnresolvedStaticFieldGet(
+ HUnresolvedStaticFieldGet* instruction) {
+ codegen_->GenerateUnresolvedFieldAccess(instruction,
+ instruction->GetFieldType(),
+ instruction->GetFieldIndex(),
+ instruction->GetDexPc());
+}
+
+void LocationsBuilderARM::VisitUnresolvedStaticFieldSet(
+ HUnresolvedStaticFieldSet* instruction) {
+ FieldAccessCallingConvetionARM calling_convention;
+ codegen_->CreateUnresolvedFieldLocationSummary(
+ instruction, instruction->GetFieldType(), calling_convention);
+}
+
+void InstructionCodeGeneratorARM::VisitUnresolvedStaticFieldSet(
+ HUnresolvedStaticFieldSet* instruction) {
+ codegen_->GenerateUnresolvedFieldAccess(instruction,
+ instruction->GetFieldType(),
+ instruction->GetFieldIndex(),
+ instruction->GetDexPc());
+}
+
void LocationsBuilderARM::VisitNullCheck(HNullCheck* instruction) {
LocationSummary::CallKind call_kind = instruction->CanThrowIntoCatchBlock()
? LocationSummary::kCallOnSlowPath
diff --git a/compiler/optimizing/code_generator_arm.h b/compiler/optimizing/code_generator_arm.h
index 91cfd00..651b31b 100644
--- a/compiler/optimizing/code_generator_arm.h
+++ b/compiler/optimizing/code_generator_arm.h
@@ -97,6 +97,40 @@
DISALLOW_COPY_AND_ASSIGN(InvokeDexCallingConventionVisitorARM);
};
+class FieldAccessCallingConvetionARM : public FieldAccessCallingConvetion {
+ public:
+ FieldAccessCallingConvetionARM() {}
+
+ Location GetObjectLocation() const OVERRIDE {
+ return Location::RegisterLocation(R1);
+ }
+ Location GetFieldIndexLocation() const OVERRIDE {
+ return Location::RegisterLocation(R0);
+ }
+ Location GetReturnLocation(Primitive::Type type) const OVERRIDE {
+ return Primitive::Is64BitType(type)
+ ? Location::RegisterPairLocation(R0, R1)
+ : Location::RegisterLocation(R0);
+ }
+ Location GetSetValueLocation(Primitive::Type type, bool is_instance) const OVERRIDE {
+ return Primitive::Is64BitType(type)
+ ? (is_instance
+ ? Location::RegisterPairLocation(R2, R3)
+ : Location::RegisterPairLocation(R1, R2))
+ : (is_instance
+ ? Location::RegisterLocation(R2)
+ : Location::RegisterLocation(R1));
+ }
+ Location GetFpuLocation(Primitive::Type type) const OVERRIDE {
+ return Primitive::Is64BitType(type)
+ ? Location::FpuRegisterPairLocation(S0, S1)
+ : Location::FpuRegisterLocation(S0);
+ }
+
+ private:
+ DISALLOW_COPY_AND_ASSIGN(FieldAccessCallingConvetionARM);
+};
+
class ParallelMoveResolverARM : public ParallelMoveResolverWithSwap {
public:
ParallelMoveResolverARM(ArenaAllocator* allocator, CodeGeneratorARM* codegen)
@@ -240,6 +274,17 @@
void Bind(HBasicBlock* block) OVERRIDE;
void Move(HInstruction* instruction, Location location, HInstruction* move_for) OVERRIDE;
void MoveConstant(Location destination, int32_t value) OVERRIDE;
+
+ void AddLocationAsTemp(Location location, LocationSummary* locations) OVERRIDE;
+ void MoveLocationToTemp(Location source,
+ const LocationSummary& locations,
+ int temp_index,
+ Primitive::Type type) OVERRIDE;
+ void MoveTempToLocation(const LocationSummary& locations,
+ int temp_index,
+ Location destination,
+ Primitive::Type type) OVERRIDE;
+
size_t SaveCoreRegister(size_t stack_index, uint32_t reg_id) OVERRIDE;
size_t RestoreCoreRegister(size_t stack_index, uint32_t reg_id) OVERRIDE;
size_t SaveFloatingPointRegister(size_t stack_index, uint32_t reg_id) OVERRIDE;
diff --git a/compiler/optimizing/code_generator_arm64.cc b/compiler/optimizing/code_generator_arm64.cc
index 531b669..c774bf0 100644
--- a/compiler/optimizing/code_generator_arm64.cc
+++ b/compiler/optimizing/code_generator_arm64.cc
@@ -19,7 +19,6 @@
#include "arch/arm64/instruction_set_features_arm64.h"
#include "art_method.h"
#include "code_generator_utils.h"
-#include "common_arm64.h"
#include "compiled_method.h"
#include "entrypoints/quick/quick_entrypoints.h"
#include "entrypoints/quick/quick_entrypoints_enum.h"
@@ -721,6 +720,46 @@
__ Mov(RegisterFrom(location, Primitive::kPrimInt), value);
}
+void CodeGeneratorARM64::AddLocationAsTemp(Location location, LocationSummary* locations) {
+ if (location.IsRegister()) {
+ locations->AddTemp(location);
+ } else {
+ UNIMPLEMENTED(FATAL) << "AddLocationAsTemp not implemented for location " << location;
+ }
+}
+
+void CodeGeneratorARM64::MoveLocationToTemp(Location source,
+ const LocationSummary& locations,
+ int temp_index,
+ Primitive::Type type) {
+ if (!Primitive::IsFloatingPointType(type)) {
+ UNIMPLEMENTED(FATAL) << "MoveLocationToTemp not implemented for type " << type;
+ }
+
+ DCHECK(source.IsFpuRegister()) << source;
+ Primitive::Type temp_type = Primitive::Is64BitType(type)
+ ? Primitive::kPrimLong
+ : Primitive::kPrimInt;
+ __ Fmov(RegisterFrom(locations.GetTemp(temp_index), temp_type),
+ FPRegisterFrom(source, type));
+}
+
+void CodeGeneratorARM64::MoveTempToLocation(const LocationSummary& locations,
+ int temp_index,
+ Location destination,
+ Primitive::Type type) {
+ if (!Primitive::IsFloatingPointType(type)) {
+ UNIMPLEMENTED(FATAL) << "MoveLocationToTemp not implemented for type " << type;
+ }
+
+ DCHECK(destination.IsFpuRegister()) << destination;
+ Primitive::Type temp_type = Primitive::Is64BitType(type)
+ ? Primitive::kPrimLong
+ : Primitive::kPrimInt;
+ __ Fmov(FPRegisterFrom(destination, type),
+ RegisterFrom(locations.GetTemp(temp_index), temp_type));
+}
+
Location CodeGeneratorARM64::GetStackLocation(HLoadLocal* load) const {
Primitive::Type type = load->GetType();
@@ -3160,6 +3199,66 @@
HandleFieldSet(instruction, instruction->GetFieldInfo(), instruction->GetValueCanBeNull());
}
+void LocationsBuilderARM64::VisitUnresolvedInstanceFieldGet(
+ HUnresolvedInstanceFieldGet* instruction) {
+ FieldAccessCallingConvetionARM64 calling_convention;
+ codegen_->CreateUnresolvedFieldLocationSummary(
+ instruction, instruction->GetFieldType(), calling_convention);
+}
+
+void InstructionCodeGeneratorARM64::VisitUnresolvedInstanceFieldGet(
+ HUnresolvedInstanceFieldGet* instruction) {
+ codegen_->GenerateUnresolvedFieldAccess(instruction,
+ instruction->GetFieldType(),
+ instruction->GetFieldIndex(),
+ instruction->GetDexPc());
+}
+
+void LocationsBuilderARM64::VisitUnresolvedInstanceFieldSet(
+ HUnresolvedInstanceFieldSet* instruction) {
+ FieldAccessCallingConvetionARM64 calling_convention;
+ codegen_->CreateUnresolvedFieldLocationSummary(
+ instruction, instruction->GetFieldType(), calling_convention);
+}
+
+void InstructionCodeGeneratorARM64::VisitUnresolvedInstanceFieldSet(
+ HUnresolvedInstanceFieldSet* instruction) {
+ codegen_->GenerateUnresolvedFieldAccess(instruction,
+ instruction->GetFieldType(),
+ instruction->GetFieldIndex(),
+ instruction->GetDexPc());
+}
+
+void LocationsBuilderARM64::VisitUnresolvedStaticFieldGet(
+ HUnresolvedStaticFieldGet* instruction) {
+ FieldAccessCallingConvetionARM64 calling_convention;
+ codegen_->CreateUnresolvedFieldLocationSummary(
+ instruction, instruction->GetFieldType(), calling_convention);
+}
+
+void InstructionCodeGeneratorARM64::VisitUnresolvedStaticFieldGet(
+ HUnresolvedStaticFieldGet* instruction) {
+ codegen_->GenerateUnresolvedFieldAccess(instruction,
+ instruction->GetFieldType(),
+ instruction->GetFieldIndex(),
+ instruction->GetDexPc());
+}
+
+void LocationsBuilderARM64::VisitUnresolvedStaticFieldSet(
+ HUnresolvedStaticFieldSet* instruction) {
+ FieldAccessCallingConvetionARM64 calling_convention;
+ codegen_->CreateUnresolvedFieldLocationSummary(
+ instruction, instruction->GetFieldType(), calling_convention);
+}
+
+void InstructionCodeGeneratorARM64::VisitUnresolvedStaticFieldSet(
+ HUnresolvedStaticFieldSet* instruction) {
+ codegen_->GenerateUnresolvedFieldAccess(instruction,
+ instruction->GetFieldType(),
+ instruction->GetFieldIndex(),
+ instruction->GetDexPc());
+}
+
void LocationsBuilderARM64::VisitSuspendCheck(HSuspendCheck* instruction) {
new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnSlowPath);
}
diff --git a/compiler/optimizing/code_generator_arm64.h b/compiler/optimizing/code_generator_arm64.h
index 576406e..3218002 100644
--- a/compiler/optimizing/code_generator_arm64.h
+++ b/compiler/optimizing/code_generator_arm64.h
@@ -18,6 +18,7 @@
#define ART_COMPILER_OPTIMIZING_CODE_GENERATOR_ARM64_H_
#include "code_generator.h"
+#include "common_arm64.h"
#include "dex/compiler_enums.h"
#include "driver/compiler_options.h"
#include "nodes.h"
@@ -141,6 +142,31 @@
DISALLOW_COPY_AND_ASSIGN(InvokeDexCallingConventionVisitorARM64);
};
+class FieldAccessCallingConvetionARM64 : public FieldAccessCallingConvetion {
+ public:
+ FieldAccessCallingConvetionARM64() {}
+
+ Location GetObjectLocation() const OVERRIDE {
+ return helpers::LocationFrom(vixl::x1);
+ }
+ Location GetFieldIndexLocation() const OVERRIDE {
+ return helpers::LocationFrom(vixl::x0);
+ }
+ Location GetReturnLocation(Primitive::Type type ATTRIBUTE_UNUSED) const OVERRIDE {
+ return helpers::LocationFrom(vixl::x0);
+ }
+ Location GetSetValueLocation(
+ Primitive::Type type ATTRIBUTE_UNUSED, bool is_instance) const OVERRIDE {
+ return is_instance ? helpers::LocationFrom(vixl::x2) : helpers::LocationFrom(vixl::x1);
+ }
+ Location GetFpuLocation(Primitive::Type type ATTRIBUTE_UNUSED) const OVERRIDE {
+ return helpers::LocationFrom(vixl::d0);
+ }
+
+ private:
+ DISALLOW_COPY_AND_ASSIGN(FieldAccessCallingConvetionARM64);
+};
+
class InstructionCodeGeneratorARM64 : public HGraphVisitor {
public:
InstructionCodeGeneratorARM64(HGraph* graph, CodeGeneratorARM64* codegen);
@@ -343,6 +369,17 @@
// locations, and is used for optimisation and debugging.
void MoveLocation(Location destination, Location source,
Primitive::Type type = Primitive::kPrimVoid);
+
+ void AddLocationAsTemp(Location location, LocationSummary* locations) OVERRIDE;
+ void MoveLocationToTemp(Location source,
+ const LocationSummary& locations,
+ int temp_index,
+ Primitive::Type type) OVERRIDE;
+ void MoveTempToLocation(const LocationSummary& locations,
+ int temp_index,
+ Location destination,
+ Primitive::Type type) OVERRIDE;
+
void Load(Primitive::Type type, vixl::CPURegister dst, const vixl::MemOperand& src);
void Store(Primitive::Type type, vixl::CPURegister rt, const vixl::MemOperand& dst);
void LoadAcquire(HInstruction* instruction, vixl::CPURegister dst, const vixl::MemOperand& src);
diff --git a/compiler/optimizing/code_generator_mips64.cc b/compiler/optimizing/code_generator_mips64.cc
index bf0d2e2..d521d78 100644
--- a/compiler/optimizing/code_generator_mips64.cc
+++ b/compiler/optimizing/code_generator_mips64.cc
@@ -861,6 +861,52 @@
__ LoadConst32(location.AsRegister<GpuRegister>(), value);
}
+void CodeGeneratorMIPS64::AddLocationAsTemp(Location location, LocationSummary* locations) {
+ if (location.IsRegister()) {
+ locations->AddTemp(location);
+ } else {
+ UNIMPLEMENTED(FATAL) << "AddLocationAsTemp not implemented for location " << location;
+ }
+}
+
+void CodeGeneratorMIPS64::MoveLocationToTemp(Location source,
+ const LocationSummary& locations,
+ int temp_index,
+ Primitive::Type type) {
+ if (!Primitive::IsFloatingPointType(type)) {
+ UNIMPLEMENTED(FATAL) << "MoveLocationToTemp not implemented for type " << type;
+ }
+
+ DCHECK(source.IsFpuRegister()) << source;
+ if (type == Primitive::kPrimFloat) {
+ __ Mfc1(locations.GetTemp(temp_index).AsRegister<GpuRegister>(),
+ source.AsFpuRegister<FpuRegister>());
+ } else {
+ DCHECK_EQ(type, Primitive::kPrimDouble);
+ __ Dmfc1(locations.GetTemp(temp_index).AsRegister<GpuRegister>(),
+ source.AsFpuRegister<FpuRegister>());
+ }
+}
+
+void CodeGeneratorMIPS64::MoveTempToLocation(const LocationSummary& locations,
+ int temp_index,
+ Location destination,
+ Primitive::Type type) {
+ if (!Primitive::IsFloatingPointType(type)) {
+ UNIMPLEMENTED(FATAL) << "MoveLocationToTemp not implemented for type " << type;
+ }
+
+ DCHECK(destination.IsFpuRegister()) << destination;
+ if (type == Primitive::kPrimFloat) {
+ __ Mtc1(locations.GetTemp(temp_index).AsRegister<GpuRegister>(),
+ destination.AsFpuRegister<FpuRegister>());
+ } else {
+ DCHECK_EQ(type, Primitive::kPrimDouble);
+ __ Dmtc1(locations.GetTemp(temp_index).AsRegister<GpuRegister>(),
+ destination.AsFpuRegister<FpuRegister>());
+ }
+}
+
Location CodeGeneratorMIPS64::GetStackLocation(HLoadLocal* load) const {
Primitive::Type type = load->GetType();
@@ -3108,6 +3154,66 @@
HandleFieldSet(instruction, instruction->GetFieldInfo());
}
+void LocationsBuilderMIPS64::VisitUnresolvedInstanceFieldGet(
+ HUnresolvedInstanceFieldGet* instruction) {
+ FieldAccessCallingConvetionMIPS64 calling_convention;
+ codegen_->CreateUnresolvedFieldLocationSummary(
+ instruction, instruction->GetFieldType(), calling_convention);
+}
+
+void InstructionCodeGeneratorMIPS64::VisitUnresolvedInstanceFieldGet(
+ HUnresolvedInstanceFieldGet* instruction) {
+ codegen_->GenerateUnresolvedFieldAccess(instruction,
+ instruction->GetFieldType(),
+ instruction->GetFieldIndex(),
+ instruction->GetDexPc());
+}
+
+void LocationsBuilderMIPS64::VisitUnresolvedInstanceFieldSet(
+ HUnresolvedInstanceFieldSet* instruction) {
+ FieldAccessCallingConvetionMIPS64 calling_convention;
+ codegen_->CreateUnresolvedFieldLocationSummary(
+ instruction, instruction->GetFieldType(), calling_convention);
+}
+
+void InstructionCodeGeneratorMIPS64::VisitUnresolvedInstanceFieldSet(
+ HUnresolvedInstanceFieldSet* instruction) {
+ codegen_->GenerateUnresolvedFieldAccess(instruction,
+ instruction->GetFieldType(),
+ instruction->GetFieldIndex(),
+ instruction->GetDexPc());
+}
+
+void LocationsBuilderMIPS64::VisitUnresolvedStaticFieldGet(
+ HUnresolvedStaticFieldGet* instruction) {
+ FieldAccessCallingConvetionMIPS64 calling_convention;
+ codegen_->CreateUnresolvedFieldLocationSummary(
+ instruction, instruction->GetFieldType(), calling_convention);
+}
+
+void InstructionCodeGeneratorMIPS64::VisitUnresolvedStaticFieldGet(
+ HUnresolvedStaticFieldGet* instruction) {
+ codegen_->GenerateUnresolvedFieldAccess(instruction,
+ instruction->GetFieldType(),
+ instruction->GetFieldIndex(),
+ instruction->GetDexPc());
+}
+
+void LocationsBuilderMIPS64::VisitUnresolvedStaticFieldSet(
+ HUnresolvedStaticFieldSet* instruction) {
+ FieldAccessCallingConvetionMIPS64 calling_convention;
+ codegen_->CreateUnresolvedFieldLocationSummary(
+ instruction, instruction->GetFieldType(), calling_convention);
+}
+
+void InstructionCodeGeneratorMIPS64::VisitUnresolvedStaticFieldSet(
+ HUnresolvedStaticFieldSet* instruction) {
+ codegen_->GenerateUnresolvedFieldAccess(instruction,
+ instruction->GetFieldType(),
+ instruction->GetFieldIndex(),
+ instruction->GetDexPc());
+}
+
void LocationsBuilderMIPS64::VisitSuspendCheck(HSuspendCheck* instruction) {
new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnSlowPath);
}
diff --git a/compiler/optimizing/code_generator_mips64.h b/compiler/optimizing/code_generator_mips64.h
index 8511eb6..cac94c7 100644
--- a/compiler/optimizing/code_generator_mips64.h
+++ b/compiler/optimizing/code_generator_mips64.h
@@ -106,6 +106,31 @@
DISALLOW_COPY_AND_ASSIGN(InvokeRuntimeCallingConvention);
};
+class FieldAccessCallingConvetionMIPS64 : public FieldAccessCallingConvetion {
+ public:
+ FieldAccessCallingConvetionMIPS64() {}
+
+ Location GetObjectLocation() const OVERRIDE {
+ return Location::RegisterLocation(A1);
+ }
+ Location GetFieldIndexLocation() const OVERRIDE {
+ return Location::RegisterLocation(A0);
+ }
+ Location GetReturnLocation(Primitive::Type type ATTRIBUTE_UNUSED) const OVERRIDE {
+ return Location::RegisterLocation(A0);
+ }
+ Location GetSetValueLocation(
+ Primitive::Type type ATTRIBUTE_UNUSED, bool is_instance) const OVERRIDE {
+ return is_instance ? Location::RegisterLocation(A2) : Location::RegisterLocation(A1);
+ }
+ Location GetFpuLocation(Primitive::Type type ATTRIBUTE_UNUSED) const OVERRIDE {
+ return Location::FpuRegisterLocation(F0);
+ }
+
+ private:
+ DISALLOW_COPY_AND_ASSIGN(FieldAccessCallingConvetionMIPS64);
+};
+
class ParallelMoveResolverMIPS64 : public ParallelMoveResolverWithSwap {
public:
ParallelMoveResolverMIPS64(ArenaAllocator* allocator, CodeGeneratorMIPS64* codegen)
@@ -285,6 +310,17 @@
void MoveConstant(Location destination, int32_t value) OVERRIDE;
+ void AddLocationAsTemp(Location location, LocationSummary* locations) OVERRIDE;
+ void MoveLocationToTemp(Location source,
+ const LocationSummary& locations,
+ int temp_index,
+ Primitive::Type type) OVERRIDE;
+ void MoveTempToLocation(const LocationSummary& locations,
+ int temp_index,
+ Location destination,
+ Primitive::Type type) OVERRIDE;
+
+
void SwapLocations(Location loc1, Location loc2, Primitive::Type type);
// Generate code to invoke a runtime entry point.
diff --git a/compiler/optimizing/code_generator_x86.cc b/compiler/optimizing/code_generator_x86.cc
index 9c5ecc3..a8dbbdc 100644
--- a/compiler/optimizing/code_generator_x86.cc
+++ b/compiler/optimizing/code_generator_x86.cc
@@ -904,6 +904,62 @@
__ movl(location.AsRegister<Register>(), Immediate(value));
}
+void CodeGeneratorX86::AddLocationAsTemp(Location location, LocationSummary* locations) {
+ if (location.IsRegister()) {
+ locations->AddTemp(location);
+ } else if (location.IsRegisterPair()) {
+ locations->AddTemp(Location::RegisterLocation(location.AsRegisterPairLow<Register>()));
+ locations->AddTemp(Location::RegisterLocation(location.AsRegisterPairHigh<Register>()));
+ } else {
+ UNIMPLEMENTED(FATAL) << "AddLocationAsTemp not implemented for location " << location;
+ }
+}
+
+void CodeGeneratorX86::MoveLocationToTemp(Location source,
+ const LocationSummary& locations,
+ int temp_index,
+ Primitive::Type type) {
+ if (!Primitive::IsFloatingPointType(type)) {
+ UNIMPLEMENTED(FATAL) << "MoveLocationToTemp not implemented for type " << type;
+ }
+
+ DCHECK(source.IsFpuRegister()) << source;
+ XmmRegister src_reg = source.AsFpuRegister<XmmRegister>();
+ if (type == Primitive::kPrimFloat) {
+ __ movd(locations.GetTemp(temp_index).AsRegister<Register>(), src_reg);
+ } else {
+ DCHECK(type == Primitive::kPrimDouble);
+ __ movd(locations.GetTemp(temp_index).AsRegister<Register>(), src_reg);
+ __ psrlq(src_reg, Immediate(32));
+ __ movd(locations.GetTemp(temp_index + 1).AsRegister<Register>(), src_reg);
+ }
+}
+
+void CodeGeneratorX86::MoveTempToLocation(const LocationSummary& locations,
+ int temp_index,
+ Location destination,
+ Primitive::Type type) {
+ if (!Primitive::IsFloatingPointType(type)) {
+ UNIMPLEMENTED(FATAL) << "MoveLocationToTemp not implemented for type " << type;
+ }
+
+ DCHECK(destination.IsFpuRegister()) << destination;
+ XmmRegister dst_reg = destination.AsFpuRegister<XmmRegister>();
+ if (type == Primitive::kPrimFloat) {
+ __ movd(dst_reg, locations.GetTemp(temp_index).AsRegister<Register>());
+ } else {
+ DCHECK(type == Primitive::kPrimDouble);
+ size_t elem_size = Primitive::ComponentSize(Primitive::kPrimInt);
+ // Create stack space for 2 elements.
+ __ subl(ESP, Immediate(2 * elem_size));
+ __ movl(Address(ESP, 0), locations.GetTemp(temp_index).AsRegister<Register>());
+ __ movl(Address(ESP, elem_size), locations.GetTemp(temp_index + 1).AsRegister<Register>());
+ __ movsd(dst_reg, Address(ESP, 0));
+ // And remove the temporary stack space we allocated.
+ __ addl(ESP, Immediate(2 * elem_size));
+ }
+}
+
void InstructionCodeGeneratorX86::HandleGoto(HInstruction* got, HBasicBlock* successor) {
DCHECK(!successor->IsExitBlock());
@@ -4023,6 +4079,66 @@
HandleFieldGet(instruction, instruction->GetFieldInfo());
}
+void LocationsBuilderX86::VisitUnresolvedInstanceFieldGet(
+ HUnresolvedInstanceFieldGet* instruction) {
+ FieldAccessCallingConvetionX86 calling_convention;
+ codegen_->CreateUnresolvedFieldLocationSummary(
+ instruction, instruction->GetFieldType(), calling_convention);
+}
+
+void InstructionCodeGeneratorX86::VisitUnresolvedInstanceFieldGet(
+ HUnresolvedInstanceFieldGet* instruction) {
+ codegen_->GenerateUnresolvedFieldAccess(instruction,
+ instruction->GetFieldType(),
+ instruction->GetFieldIndex(),
+ instruction->GetDexPc());
+}
+
+void LocationsBuilderX86::VisitUnresolvedInstanceFieldSet(
+ HUnresolvedInstanceFieldSet* instruction) {
+ FieldAccessCallingConvetionX86 calling_convention;
+ codegen_->CreateUnresolvedFieldLocationSummary(
+ instruction, instruction->GetFieldType(), calling_convention);
+}
+
+void InstructionCodeGeneratorX86::VisitUnresolvedInstanceFieldSet(
+ HUnresolvedInstanceFieldSet* instruction) {
+ codegen_->GenerateUnresolvedFieldAccess(instruction,
+ instruction->GetFieldType(),
+ instruction->GetFieldIndex(),
+ instruction->GetDexPc());
+}
+
+void LocationsBuilderX86::VisitUnresolvedStaticFieldGet(
+ HUnresolvedStaticFieldGet* instruction) {
+ FieldAccessCallingConvetionX86 calling_convention;
+ codegen_->CreateUnresolvedFieldLocationSummary(
+ instruction, instruction->GetFieldType(), calling_convention);
+}
+
+void InstructionCodeGeneratorX86::VisitUnresolvedStaticFieldGet(
+ HUnresolvedStaticFieldGet* instruction) {
+ codegen_->GenerateUnresolvedFieldAccess(instruction,
+ instruction->GetFieldType(),
+ instruction->GetFieldIndex(),
+ instruction->GetDexPc());
+}
+
+void LocationsBuilderX86::VisitUnresolvedStaticFieldSet(
+ HUnresolvedStaticFieldSet* instruction) {
+ FieldAccessCallingConvetionX86 calling_convention;
+ codegen_->CreateUnresolvedFieldLocationSummary(
+ instruction, instruction->GetFieldType(), calling_convention);
+}
+
+void InstructionCodeGeneratorX86::VisitUnresolvedStaticFieldSet(
+ HUnresolvedStaticFieldSet* instruction) {
+ codegen_->GenerateUnresolvedFieldAccess(instruction,
+ instruction->GetFieldType(),
+ instruction->GetFieldIndex(),
+ instruction->GetDexPc());
+}
+
void LocationsBuilderX86::VisitNullCheck(HNullCheck* instruction) {
LocationSummary::CallKind call_kind = instruction->CanThrowIntoCatchBlock()
? LocationSummary::kCallOnSlowPath
diff --git a/compiler/optimizing/code_generator_x86.h b/compiler/optimizing/code_generator_x86.h
index f3307cf..1a63083 100644
--- a/compiler/optimizing/code_generator_x86.h
+++ b/compiler/optimizing/code_generator_x86.h
@@ -92,6 +92,38 @@
DISALLOW_COPY_AND_ASSIGN(InvokeDexCallingConventionVisitorX86);
};
+class FieldAccessCallingConvetionX86 : public FieldAccessCallingConvetion {
+ public:
+ FieldAccessCallingConvetionX86() {}
+
+ Location GetObjectLocation() const OVERRIDE {
+ return Location::RegisterLocation(ECX);
+ }
+ Location GetFieldIndexLocation() const OVERRIDE {
+ return Location::RegisterLocation(EAX);
+ }
+ Location GetReturnLocation(Primitive::Type type) const OVERRIDE {
+ return Primitive::Is64BitType(type)
+ ? Location::RegisterPairLocation(EAX, EDX)
+ : Location::RegisterLocation(EAX);
+ }
+ Location GetSetValueLocation(Primitive::Type type, bool is_instance) const OVERRIDE {
+ return Primitive::Is64BitType(type)
+ ? (is_instance
+ ? Location::RegisterPairLocation(EDX, EBX)
+ : Location::RegisterPairLocation(ECX, EDX))
+ : (is_instance
+ ? Location::RegisterLocation(EDX)
+ : Location::RegisterLocation(ECX));
+ }
+ Location GetFpuLocation(Primitive::Type type ATTRIBUTE_UNUSED) const OVERRIDE {
+ return Location::FpuRegisterLocation(XMM0);
+ }
+
+ private:
+ DISALLOW_COPY_AND_ASSIGN(FieldAccessCallingConvetionX86);
+};
+
class ParallelMoveResolverX86 : public ParallelMoveResolverWithSwap {
public:
ParallelMoveResolverX86(ArenaAllocator* allocator, CodeGeneratorX86* codegen)
@@ -229,6 +261,17 @@
void Bind(HBasicBlock* block) OVERRIDE;
void Move(HInstruction* instruction, Location location, HInstruction* move_for) OVERRIDE;
void MoveConstant(Location destination, int32_t value) OVERRIDE;
+
+ void AddLocationAsTemp(Location location, LocationSummary* locations) OVERRIDE;
+ void MoveLocationToTemp(Location source,
+ const LocationSummary& locations,
+ int temp_index,
+ Primitive::Type type) OVERRIDE;
+ void MoveTempToLocation(const LocationSummary& locations,
+ int temp_index,
+ Location destination,
+ Primitive::Type type) OVERRIDE;
+
size_t SaveCoreRegister(size_t stack_index, uint32_t reg_id) OVERRIDE;
size_t RestoreCoreRegister(size_t stack_index, uint32_t reg_id) OVERRIDE;
size_t SaveFloatingPointRegister(size_t stack_index, uint32_t reg_id) OVERRIDE;
diff --git a/compiler/optimizing/code_generator_x86_64.cc b/compiler/optimizing/code_generator_x86_64.cc
index 134bfed..ef20b71 100644
--- a/compiler/optimizing/code_generator_x86_64.cc
+++ b/compiler/optimizing/code_generator_x86_64.cc
@@ -928,6 +928,42 @@
Load64BitValue(location.AsRegister<CpuRegister>(), static_cast<int64_t>(value));
}
+void CodeGeneratorX86_64::AddLocationAsTemp(Location location, LocationSummary* locations) {
+ if (location.IsRegister()) {
+ locations->AddTemp(location);
+ } else {
+ UNIMPLEMENTED(FATAL) << "AddLocationAsTemp not implemented for location " << location;
+ }
+}
+
+void CodeGeneratorX86_64::MoveLocationToTemp(Location source,
+ const LocationSummary& locations,
+ int temp_index,
+ Primitive::Type type) {
+ if (!Primitive::IsFloatingPointType(type)) {
+ UNIMPLEMENTED(FATAL) << "MoveLocationToTemp not implemented for type " << type;
+ }
+
+ DCHECK(source.IsFpuRegister()) << source;
+ __ movd(locations.GetTemp(temp_index).AsRegister<CpuRegister>(),
+ source.AsFpuRegister<XmmRegister>(),
+ Primitive::Is64BitType(type));
+}
+
+void CodeGeneratorX86_64::MoveTempToLocation(const LocationSummary& locations,
+ int temp_index,
+ Location destination,
+ Primitive::Type type) {
+ if (!Primitive::IsFloatingPointType(type)) {
+ UNIMPLEMENTED(FATAL) << "MoveLocationToTemp not implemented for type " << type;
+ }
+
+ DCHECK(destination.IsFpuRegister()) << destination;
+ __ movd(destination.AsFpuRegister<XmmRegister>(),
+ locations.GetTemp(temp_index).AsRegister<CpuRegister>(),
+ Primitive::Is64BitType(type));
+}
+
void InstructionCodeGeneratorX86_64::HandleGoto(HInstruction* got, HBasicBlock* successor) {
DCHECK(!successor->IsExitBlock());
@@ -3787,6 +3823,66 @@
HandleFieldSet(instruction, instruction->GetFieldInfo(), instruction->GetValueCanBeNull());
}
+void LocationsBuilderX86_64::VisitUnresolvedInstanceFieldGet(
+ HUnresolvedInstanceFieldGet* instruction) {
+ FieldAccessCallingConvetionX86_64 calling_convention;
+ codegen_->CreateUnresolvedFieldLocationSummary(
+ instruction, instruction->GetFieldType(), calling_convention);
+}
+
+void InstructionCodeGeneratorX86_64::VisitUnresolvedInstanceFieldGet(
+ HUnresolvedInstanceFieldGet* instruction) {
+ codegen_->GenerateUnresolvedFieldAccess(instruction,
+ instruction->GetFieldType(),
+ instruction->GetFieldIndex(),
+ instruction->GetDexPc());
+}
+
+void LocationsBuilderX86_64::VisitUnresolvedInstanceFieldSet(
+ HUnresolvedInstanceFieldSet* instruction) {
+ FieldAccessCallingConvetionX86_64 calling_convention;
+ codegen_->CreateUnresolvedFieldLocationSummary(
+ instruction, instruction->GetFieldType(), calling_convention);
+}
+
+void InstructionCodeGeneratorX86_64::VisitUnresolvedInstanceFieldSet(
+ HUnresolvedInstanceFieldSet* instruction) {
+ codegen_->GenerateUnresolvedFieldAccess(instruction,
+ instruction->GetFieldType(),
+ instruction->GetFieldIndex(),
+ instruction->GetDexPc());
+}
+
+void LocationsBuilderX86_64::VisitUnresolvedStaticFieldGet(
+ HUnresolvedStaticFieldGet* instruction) {
+ FieldAccessCallingConvetionX86_64 calling_convention;
+ codegen_->CreateUnresolvedFieldLocationSummary(
+ instruction, instruction->GetFieldType(), calling_convention);
+}
+
+void InstructionCodeGeneratorX86_64::VisitUnresolvedStaticFieldGet(
+ HUnresolvedStaticFieldGet* instruction) {
+ codegen_->GenerateUnresolvedFieldAccess(instruction,
+ instruction->GetFieldType(),
+ instruction->GetFieldIndex(),
+ instruction->GetDexPc());
+}
+
+void LocationsBuilderX86_64::VisitUnresolvedStaticFieldSet(
+ HUnresolvedStaticFieldSet* instruction) {
+ FieldAccessCallingConvetionX86_64 calling_convention;
+ codegen_->CreateUnresolvedFieldLocationSummary(
+ instruction, instruction->GetFieldType(), calling_convention);
+}
+
+void InstructionCodeGeneratorX86_64::VisitUnresolvedStaticFieldSet(
+ HUnresolvedStaticFieldSet* instruction) {
+ codegen_->GenerateUnresolvedFieldAccess(instruction,
+ instruction->GetFieldType(),
+ instruction->GetFieldIndex(),
+ instruction->GetDexPc());
+}
+
void LocationsBuilderX86_64::VisitNullCheck(HNullCheck* instruction) {
LocationSummary::CallKind call_kind = instruction->CanThrowIntoCatchBlock()
? LocationSummary::kCallOnSlowPath
diff --git a/compiler/optimizing/code_generator_x86_64.h b/compiler/optimizing/code_generator_x86_64.h
index 9b2423f..68c2b3d 100644
--- a/compiler/optimizing/code_generator_x86_64.h
+++ b/compiler/optimizing/code_generator_x86_64.h
@@ -70,6 +70,32 @@
DISALLOW_COPY_AND_ASSIGN(InvokeDexCallingConvention);
};
+class FieldAccessCallingConvetionX86_64 : public FieldAccessCallingConvetion {
+ public:
+ FieldAccessCallingConvetionX86_64() {}
+
+ Location GetObjectLocation() const OVERRIDE {
+ return Location::RegisterLocation(RSI);
+ }
+ Location GetFieldIndexLocation() const OVERRIDE {
+ return Location::RegisterLocation(RDI);
+ }
+ Location GetReturnLocation(Primitive::Type type ATTRIBUTE_UNUSED) const OVERRIDE {
+ return Location::RegisterLocation(RAX);
+ }
+ Location GetSetValueLocation(
+ Primitive::Type type ATTRIBUTE_UNUSED, bool is_instance) const OVERRIDE {
+ return Location::RegisterLocation(is_instance ? RDX : RSI);
+ }
+ Location GetFpuLocation(Primitive::Type type ATTRIBUTE_UNUSED) const OVERRIDE {
+ return Location::FpuRegisterLocation(XMM0);
+ }
+
+ private:
+ DISALLOW_COPY_AND_ASSIGN(FieldAccessCallingConvetionX86_64);
+};
+
+
class InvokeDexCallingConventionVisitorX86_64 : public InvokeDexCallingConventionVisitor {
public:
InvokeDexCallingConventionVisitorX86_64() {}
@@ -229,6 +255,15 @@
void Bind(HBasicBlock* block) OVERRIDE;
void Move(HInstruction* instruction, Location location, HInstruction* move_for) OVERRIDE;
void MoveConstant(Location destination, int32_t value) OVERRIDE;
+ void AddLocationAsTemp(Location location, LocationSummary* locations) OVERRIDE;
+ void MoveLocationToTemp(Location source,
+ const LocationSummary& locations,
+ int temp_index,
+ Primitive::Type type) OVERRIDE;
+ void MoveTempToLocation(const LocationSummary& locations,
+ int temp_index,
+ Location destination,
+ Primitive::Type type) OVERRIDE;
size_t SaveCoreRegister(size_t stack_index, uint32_t reg_id) OVERRIDE;
size_t RestoreCoreRegister(size_t stack_index, uint32_t reg_id) OVERRIDE;
size_t SaveFloatingPointRegister(size_t stack_index, uint32_t reg_id) OVERRIDE;
diff --git a/compiler/optimizing/graph_visualizer.cc b/compiler/optimizing/graph_visualizer.cc
index d05c514..628d75a 100644
--- a/compiler/optimizing/graph_visualizer.cc
+++ b/compiler/optimizing/graph_visualizer.cc
@@ -393,6 +393,22 @@
StartAttributeStream("intrinsic") << invoke->GetIntrinsic();
}
+ void VisitUnresolvedInstanceFieldGet(HUnresolvedInstanceFieldGet* field_access) OVERRIDE {
+ StartAttributeStream("field_type") << field_access->GetFieldType();
+ }
+
+ void VisitUnresolvedInstanceFieldSet(HUnresolvedInstanceFieldSet* field_access) OVERRIDE {
+ StartAttributeStream("field_type") << field_access->GetFieldType();
+ }
+
+ void VisitUnresolvedStaticFieldGet(HUnresolvedStaticFieldGet* field_access) OVERRIDE {
+ StartAttributeStream("field_type") << field_access->GetFieldType();
+ }
+
+ void VisitUnresolvedStaticFieldSet(HUnresolvedStaticFieldSet* field_access) OVERRIDE {
+ StartAttributeStream("field_type") << field_access->GetFieldType();
+ }
+
void VisitTryBoundary(HTryBoundary* try_boundary) OVERRIDE {
StartAttributeStream("kind") << (try_boundary->IsEntry() ? "entry" : "exit");
}
diff --git a/compiler/optimizing/nodes.h b/compiler/optimizing/nodes.h
index 90ff20c..a44c982 100644
--- a/compiler/optimizing/nodes.h
+++ b/compiler/optimizing/nodes.h
@@ -1066,6 +1066,10 @@
M(Shr, BinaryOperation) \
M(StaticFieldGet, Instruction) \
M(StaticFieldSet, Instruction) \
+ M(UnresolvedInstanceFieldGet, Instruction) \
+ M(UnresolvedInstanceFieldSet, Instruction) \
+ M(UnresolvedStaticFieldGet, Instruction) \
+ M(UnresolvedStaticFieldSet, Instruction) \
M(StoreLocal, Instruction) \
M(Sub, BinaryOperation) \
M(SuspendCheck, Instruction) \
@@ -4671,6 +4675,124 @@
DISALLOW_COPY_AND_ASSIGN(HStaticFieldSet);
};
+class HUnresolvedInstanceFieldGet : public HExpression<1> {
+ public:
+ HUnresolvedInstanceFieldGet(HInstruction* obj,
+ Primitive::Type field_type,
+ uint32_t field_index,
+ uint32_t dex_pc)
+ : HExpression(field_type, SideEffects::AllExceptGCDependency()),
+ field_index_(field_index),
+ dex_pc_(dex_pc) {
+ SetRawInputAt(0, obj);
+ }
+
+ bool NeedsEnvironment() const OVERRIDE { return true; }
+ bool CanThrow() const OVERRIDE { return true; }
+
+ Primitive::Type GetFieldType() const { return GetType(); }
+ uint32_t GetFieldIndex() const { return field_index_; }
+ uint32_t GetDexPc() const { return dex_pc_; }
+
+ DECLARE_INSTRUCTION(UnresolvedInstanceFieldGet);
+
+ private:
+ const uint32_t field_index_;
+ const uint32_t dex_pc_;
+
+ DISALLOW_COPY_AND_ASSIGN(HUnresolvedInstanceFieldGet);
+};
+
+class HUnresolvedInstanceFieldSet : public HTemplateInstruction<2> {
+ public:
+ HUnresolvedInstanceFieldSet(HInstruction* obj,
+ HInstruction* value,
+ Primitive::Type field_type,
+ uint32_t field_index,
+ uint32_t dex_pc)
+ : HTemplateInstruction(SideEffects::AllExceptGCDependency()),
+ field_type_(field_type),
+ field_index_(field_index),
+ dex_pc_(dex_pc) {
+ DCHECK_EQ(field_type, value->GetType());
+ SetRawInputAt(0, obj);
+ SetRawInputAt(1, value);
+ }
+
+ bool NeedsEnvironment() const OVERRIDE { return true; }
+ bool CanThrow() const OVERRIDE { return true; }
+
+ Primitive::Type GetFieldType() const { return field_type_; }
+ uint32_t GetFieldIndex() const { return field_index_; }
+ uint32_t GetDexPc() const { return dex_pc_; }
+
+ DECLARE_INSTRUCTION(UnresolvedInstanceFieldSet);
+
+ private:
+ const Primitive::Type field_type_;
+ const uint32_t field_index_;
+ const uint32_t dex_pc_;
+
+ DISALLOW_COPY_AND_ASSIGN(HUnresolvedInstanceFieldSet);
+};
+
+class HUnresolvedStaticFieldGet : public HExpression<0> {
+ public:
+ HUnresolvedStaticFieldGet(Primitive::Type field_type,
+ uint32_t field_index,
+ uint32_t dex_pc)
+ : HExpression(field_type, SideEffects::AllExceptGCDependency()),
+ field_index_(field_index),
+ dex_pc_(dex_pc) {
+ }
+
+ bool NeedsEnvironment() const OVERRIDE { return true; }
+ bool CanThrow() const OVERRIDE { return true; }
+
+ Primitive::Type GetFieldType() const { return GetType(); }
+ uint32_t GetFieldIndex() const { return field_index_; }
+ uint32_t GetDexPc() const { return dex_pc_; }
+
+ DECLARE_INSTRUCTION(UnresolvedStaticFieldGet);
+
+ private:
+ const uint32_t field_index_;
+ const uint32_t dex_pc_;
+
+ DISALLOW_COPY_AND_ASSIGN(HUnresolvedStaticFieldGet);
+};
+
+class HUnresolvedStaticFieldSet : public HTemplateInstruction<1> {
+ public:
+ HUnresolvedStaticFieldSet(HInstruction* value,
+ Primitive::Type field_type,
+ uint32_t field_index,
+ uint32_t dex_pc)
+ : HTemplateInstruction(SideEffects::AllExceptGCDependency()),
+ field_type_(field_type),
+ field_index_(field_index),
+ dex_pc_(dex_pc) {
+ DCHECK_EQ(field_type, value->GetType());
+ SetRawInputAt(0, value);
+ }
+
+ bool NeedsEnvironment() const OVERRIDE { return true; }
+ bool CanThrow() const OVERRIDE { return true; }
+
+ Primitive::Type GetFieldType() const { return field_type_; }
+ uint32_t GetFieldIndex() const { return field_index_; }
+ uint32_t GetDexPc() const { return dex_pc_; }
+
+ DECLARE_INSTRUCTION(UnresolvedStaticFieldSet);
+
+ private:
+ const Primitive::Type field_type_;
+ const uint32_t field_index_;
+ const uint32_t dex_pc_;
+
+ DISALLOW_COPY_AND_ASSIGN(HUnresolvedStaticFieldSet);
+};
+
// Implement the move-exception DEX instruction.
class HLoadException : public HExpression<0> {
public:
diff --git a/compiler/optimizing/optimizing_compiler_stats.h b/compiler/optimizing/optimizing_compiler_stats.h
index c7701b7..f1d2970 100644
--- a/compiler/optimizing/optimizing_compiler_stats.h
+++ b/compiler/optimizing/optimizing_compiler_stats.h
@@ -34,6 +34,7 @@
kInstructionSimplifications,
kInstructionSimplificationsArch,
kUnresolvedMethod,
+ kUnresolvedField,
kNotCompiledBranchOutsideMethodCode,
kNotCompiledCannotBuildSSA,
kNotCompiledCantAccesType,
@@ -45,7 +46,6 @@
kNotCompiledPathological,
kNotCompiledSpaceFilter,
kNotCompiledUnhandledInstruction,
- kNotCompiledUnresolvedField,
kNotCompiledUnsupportedIsa,
kNotCompiledVerifyAtRuntime,
kNotOptimizedDisabled,
@@ -104,6 +104,7 @@
case kInstructionSimplifications: return "kInstructionSimplifications";
case kInstructionSimplificationsArch: return "kInstructionSimplificationsArch";
case kUnresolvedMethod : return "kUnresolvedMethod";
+ case kUnresolvedField : return "kUnresolvedField";
case kNotCompiledBranchOutsideMethodCode: return "kNotCompiledBranchOutsideMethodCode";
case kNotCompiledCannotBuildSSA : return "kNotCompiledCannotBuildSSA";
case kNotCompiledCantAccesType : return "kNotCompiledCantAccesType";
@@ -115,7 +116,6 @@
case kNotCompiledPathological : return "kNotCompiledPathological";
case kNotCompiledSpaceFilter : return "kNotCompiledSpaceFilter";
case kNotCompiledUnhandledInstruction : return "kNotCompiledUnhandledInstruction";
- case kNotCompiledUnresolvedField : return "kNotCompiledUnresolvedField";
case kNotCompiledUnsupportedIsa : return "kNotCompiledUnsupportedIsa";
case kNotCompiledVerifyAtRuntime : return "kNotCompiledVerifyAtRuntime";
case kNotOptimizedDisabled : return "kNotOptimizedDisabled";
diff --git a/compiler/optimizing/reference_type_propagation.cc b/compiler/optimizing/reference_type_propagation.cc
index a88c543..cee495e 100644
--- a/compiler/optimizing/reference_type_propagation.cc
+++ b/compiler/optimizing/reference_type_propagation.cc
@@ -52,6 +52,8 @@
void SetClassAsTypeInfo(HInstruction* instr, mirror::Class* klass, bool is_exact);
void VisitInstanceFieldGet(HInstanceFieldGet* instr) OVERRIDE;
void VisitStaticFieldGet(HStaticFieldGet* instr) OVERRIDE;
+ void VisitUnresolvedInstanceFieldGet(HUnresolvedInstanceFieldGet* instr) OVERRIDE;
+ void VisitUnresolvedStaticFieldGet(HUnresolvedStaticFieldGet* instr) OVERRIDE;
void VisitInvoke(HInvoke* instr) OVERRIDE;
void VisitArrayGet(HArrayGet* instr) OVERRIDE;
void VisitCheckCast(HCheckCast* instr) OVERRIDE;
@@ -449,6 +451,22 @@
UpdateFieldAccessTypeInfo(instr, instr->GetFieldInfo());
}
+void RTPVisitor::VisitUnresolvedInstanceFieldGet(HUnresolvedInstanceFieldGet* instr) {
+ // TODO: Use descriptor to get the actual type.
+ if (instr->GetFieldType() == Primitive::kPrimNot) {
+ instr->SetReferenceTypeInfo(
+ ReferenceTypeInfo::Create(object_class_handle_, /* is_exact */ false));
+ }
+}
+
+void RTPVisitor::VisitUnresolvedStaticFieldGet(HUnresolvedStaticFieldGet* instr) {
+ // TODO: Use descriptor to get the actual type.
+ if (instr->GetFieldType() == Primitive::kPrimNot) {
+ instr->SetReferenceTypeInfo(
+ ReferenceTypeInfo::Create(object_class_handle_, /* is_exact */ false));
+ }
+}
+
void RTPVisitor::VisitLoadClass(HLoadClass* instr) {
ScopedObjectAccess soa(Thread::Current());
mirror::DexCache* dex_cache =
diff --git a/test/529-checker-unresolved/src/Main.java b/test/529-checker-unresolved/src/Main.java
index 6f04797..adb5ada 100644
--- a/test/529-checker-unresolved/src/Main.java
+++ b/test/529-checker-unresolved/src/Main.java
@@ -44,6 +44,76 @@
super.superMethod();
}
+ /// CHECK-START: void Main.callUnresolvedStaticFieldAccess() register (before)
+ /// CHECK: UnresolvedStaticFieldSet field_type:PrimByte
+ /// CHECK: UnresolvedStaticFieldSet field_type:PrimChar
+ /// CHECK: UnresolvedStaticFieldSet field_type:PrimInt
+ /// CHECK: UnresolvedStaticFieldSet field_type:PrimLong
+ /// CHECK: UnresolvedStaticFieldSet field_type:PrimFloat
+ /// CHECK: UnresolvedStaticFieldSet field_type:PrimDouble
+ /// CHECK: UnresolvedStaticFieldSet field_type:PrimNot
+
+ /// CHECK: UnresolvedStaticFieldGet field_type:PrimByte
+ /// CHECK: UnresolvedStaticFieldGet field_type:PrimChar
+ /// CHECK: UnresolvedStaticFieldGet field_type:PrimInt
+ /// CHECK: UnresolvedStaticFieldGet field_type:PrimLong
+ /// CHECK: UnresolvedStaticFieldGet field_type:PrimFloat
+ /// CHECK: UnresolvedStaticFieldGet field_type:PrimDouble
+ /// CHECK: UnresolvedStaticFieldGet field_type:PrimNot
+ static public void callUnresolvedStaticFieldAccess() {
+ Object o = new Object();
+ UnresolvedClass.staticByte = (byte)1;
+ UnresolvedClass.staticChar = '1';
+ UnresolvedClass.staticInt = 123456789;
+ UnresolvedClass.staticLong = 123456789123456789l;
+ UnresolvedClass.staticFloat = 123456789123456789f;
+ UnresolvedClass.staticDouble = 123456789123456789d;
+ UnresolvedClass.staticObject = o;
+
+ expectEquals((byte)1, UnresolvedClass.staticByte);
+ expectEquals('1', UnresolvedClass.staticChar);
+ expectEquals(123456789, UnresolvedClass.staticInt);
+ expectEquals(123456789123456789l, UnresolvedClass.staticLong);
+ expectEquals(123456789123456789f, UnresolvedClass.staticFloat);
+ expectEquals(123456789123456789d, UnresolvedClass.staticDouble);
+ expectEquals(o, UnresolvedClass.staticObject);
+ }
+
+ /// CHECK-START: void Main.callUnresolvedInstanceFieldAccess(UnresolvedClass) register (before)
+ /// CHECK: UnresolvedInstanceFieldSet field_type:PrimByte
+ /// CHECK: UnresolvedInstanceFieldSet field_type:PrimChar
+ /// CHECK: UnresolvedInstanceFieldSet field_type:PrimInt
+ /// CHECK: UnresolvedInstanceFieldSet field_type:PrimLong
+ /// CHECK: UnresolvedInstanceFieldSet field_type:PrimFloat
+ /// CHECK: UnresolvedInstanceFieldSet field_type:PrimDouble
+ /// CHECK: UnresolvedInstanceFieldSet field_type:PrimNot
+
+ /// CHECK: UnresolvedInstanceFieldGet field_type:PrimByte
+ /// CHECK: UnresolvedInstanceFieldGet field_type:PrimChar
+ /// CHECK: UnresolvedInstanceFieldGet field_type:PrimInt
+ /// CHECK: UnresolvedInstanceFieldGet field_type:PrimLong
+ /// CHECK: UnresolvedInstanceFieldGet field_type:PrimFloat
+ /// CHECK: UnresolvedInstanceFieldGet field_type:PrimDouble
+ /// CHECK: UnresolvedInstanceFieldGet field_type:PrimNot
+ static public void callUnresolvedInstanceFieldAccess(UnresolvedClass c) {
+ Object o = new Object();
+ c.instanceByte = (byte)1;
+ c.instanceChar = '1';
+ c.instanceInt = 123456789;
+ c.instanceLong = 123456789123456789l;
+ c.instanceFloat = 123456789123456789f;
+ c.instanceDouble = 123456789123456789d;
+ c.instanceObject = o;
+
+ expectEquals((byte)1, c.instanceByte);
+ expectEquals('1', c.instanceChar);
+ expectEquals(123456789, c.instanceInt);
+ expectEquals(123456789123456789l, c.instanceLong);
+ expectEquals(123456789123456789f, c.instanceFloat);
+ expectEquals(123456789123456789d, c.instanceDouble);
+ expectEquals(o, c.instanceObject);
+ }
+
/// CHECK-START: void Main.main(java.lang.String[]) register (before)
/// CHECK: InvokeUnresolved invoke_type:direct
static public void main(String[] args) {
@@ -52,5 +122,49 @@
callInvokeUnresolvedVirtual(c);
callInvokeUnresolvedInterface(c);
callInvokeUnresolvedSuper(new Main());
+ callUnresolvedStaticFieldAccess();
+ callUnresolvedInstanceFieldAccess(c);
+ }
+
+ public static void expectEquals(byte expected, byte result) {
+ if (expected != result) {
+ throw new Error("Expected: " + expected + ", found: " + result);
+ }
+ }
+
+ public static void expectEquals(char expected, char result) {
+ if (expected != result) {
+ throw new Error("Expected: " + expected + ", found: " + result);
+ }
+ }
+
+ public static void expectEquals(int expected, int result) {
+ if (expected != result) {
+ throw new Error("Expected: " + expected + ", found: " + result);
+ }
+ }
+
+ public static void expectEquals(long expected, long result) {
+ if (expected != result) {
+ throw new Error("Expected: " + expected + ", found: " + result);
+ }
+ }
+
+ public static void expectEquals(float expected, float result) {
+ if (expected != result) {
+ throw new Error("Expected: " + expected + ", found: " + result);
+ }
+ }
+
+ public static void expectEquals(double expected, double result) {
+ if (expected != result) {
+ throw new Error("Expected: " + expected + ", found: " + result);
+ }
+ }
+
+ public static void expectEquals(Object expected, Object result) {
+ if (expected != result) {
+ throw new Error("Expected: " + expected + ", found: " + result);
+ }
}
}
diff --git a/test/529-checker-unresolved/src/Unresolved.java b/test/529-checker-unresolved/src/Unresolved.java
index 5bf92dd..03ceb68 100644
--- a/test/529-checker-unresolved/src/Unresolved.java
+++ b/test/529-checker-unresolved/src/Unresolved.java
@@ -40,6 +40,22 @@
public void interfaceMethod() {
System.out.println("UnresolvedClass.interfaceMethod()");
}
+
+ public static byte staticByte;
+ public static char staticChar;
+ public static int staticInt;
+ public static long staticLong;
+ public static float staticFloat;
+ public static double staticDouble;
+ public static Object staticObject;
+
+ public byte instanceByte;
+ public char instanceChar;
+ public int instanceInt;
+ public long instanceLong;
+ public float instanceFloat;
+ public double instanceDouble;
+ public Object instanceObject;
}
final class UnresolvedFinalClass {