Merge "Use mmapped boot image intern table for PIC app HLoadString."
diff --git a/compiler/compiled_method.h b/compiler/compiled_method.h
index 97127f5..c67c523 100644
--- a/compiler/compiled_method.h
+++ b/compiler/compiled_method.h
@@ -126,6 +126,7 @@
kTypeRelative, // NOTE: Actual patching is instruction_set-dependent.
kTypeBssEntry, // NOTE: Actual patching is instruction_set-dependent.
kStringRelative, // NOTE: Actual patching is instruction_set-dependent.
+ kStringInternTable, // NOTE: Actual patching is instruction_set-dependent.
kStringBssEntry, // NOTE: Actual patching is instruction_set-dependent.
kBakerReadBarrierBranch, // NOTE: Actual patching is instruction_set-dependent.
};
@@ -196,6 +197,16 @@
return patch;
}
+ static LinkerPatch StringInternTablePatch(size_t literal_offset,
+ const DexFile* target_dex_file,
+ uint32_t pc_insn_offset,
+ uint32_t target_string_idx) {
+ LinkerPatch patch(literal_offset, Type::kStringInternTable, target_dex_file);
+ patch.string_idx_ = target_string_idx;
+ patch.pc_insn_offset_ = pc_insn_offset;
+ return patch;
+ }
+
static LinkerPatch StringBssEntryPatch(size_t literal_offset,
const DexFile* target_dex_file,
uint32_t pc_insn_offset,
@@ -234,6 +245,7 @@
case Type::kTypeRelative:
case Type::kTypeBssEntry:
case Type::kStringRelative:
+ case Type::kStringInternTable:
case Type::kStringBssEntry:
case Type::kBakerReadBarrierBranch:
return true;
@@ -264,12 +276,14 @@
const DexFile* TargetStringDexFile() const {
DCHECK(patch_type_ == Type::kStringRelative ||
+ patch_type_ == Type::kStringInternTable ||
patch_type_ == Type::kStringBssEntry);
return target_dex_file_;
}
dex::StringIndex TargetStringIndex() const {
DCHECK(patch_type_ == Type::kStringRelative ||
+ patch_type_ == Type::kStringInternTable ||
patch_type_ == Type::kStringBssEntry);
return dex::StringIndex(string_idx_);
}
@@ -280,6 +294,7 @@
patch_type_ == Type::kTypeRelative ||
patch_type_ == Type::kTypeBssEntry ||
patch_type_ == Type::kStringRelative ||
+ patch_type_ == Type::kStringInternTable ||
patch_type_ == Type::kStringBssEntry);
return pc_insn_offset_;
}
diff --git a/compiler/image_test.cc b/compiler/image_test.cc
index 252fdd6..7b623dd 100644
--- a/compiler/image_test.cc
+++ b/compiler/image_test.cc
@@ -46,7 +46,7 @@
// Make sure that the new stuff in the clinit in ImageLayoutB is in the last image and not in the
// first two images.
ASSERT_EQ(image_sizes.size(), image_sizes.size());
- // Sizes of the images should be the same. These sizes are for the whole image unrounded.
+ // Sizes of the object sections should be the same for all but the last image.
for (size_t i = 0; i < image_sizes.size() - 1; ++i) {
EXPECT_EQ(image_sizes[i], image_sizes_extra[i]);
}
diff --git a/compiler/image_test.h b/compiler/image_test.h
index 15f79cb..f1adedd 100644
--- a/compiler/image_test.h
+++ b/compiler/image_test.h
@@ -133,7 +133,7 @@
ImageHeader image_header;
CHECK_EQ(file->ReadFully(&image_header, sizeof(image_header)), true);
CHECK(image_header.IsValid());
- ret.push_back(image_header.GetImageSize());
+ ret.push_back(image_header.GetObjectsSection().Size());
}
return ret;
}
diff --git a/compiler/image_writer.cc b/compiler/image_writer.cc
index 12f477a..fa9f64c 100644
--- a/compiler/image_writer.cc
+++ b/compiler/image_writer.cc
@@ -689,7 +689,7 @@
for (ImageInfo& image_info : image_infos_) {
ImageSection unused_sections[ImageHeader::kSectionCount];
const size_t length = RoundUp(
- image_info.CreateImageSections(unused_sections), kPageSize);
+ image_info.CreateImageSections(unused_sections, compile_app_image_), kPageSize);
std::string error_msg;
image_info.image_.reset(MemMap::MapAnonymous("image writer image",
@@ -1834,7 +1834,8 @@
image_info.image_begin_ = global_image_begin_ + image_offset;
image_info.image_offset_ = image_offset;
ImageSection unused_sections[ImageHeader::kSectionCount];
- image_info.image_size_ = RoundUp(image_info.CreateImageSections(unused_sections), kPageSize);
+ image_info.image_size_ =
+ RoundUp(image_info.CreateImageSections(unused_sections, compile_app_image_), kPageSize);
// There should be no gaps until the next image.
image_offset += image_info.image_size_;
}
@@ -1865,7 +1866,8 @@
}
}
-size_t ImageWriter::ImageInfo::CreateImageSections(ImageSection* out_sections) const {
+size_t ImageWriter::ImageInfo::CreateImageSections(ImageSection* out_sections,
+ bool app_image) const {
DCHECK(out_sections != nullptr);
// Do not round up any sections here that are represented by the bins since it will break
@@ -1904,8 +1906,13 @@
ImageSection* dex_cache_arrays_section = &out_sections[ImageHeader::kSectionDexCacheArrays];
*dex_cache_arrays_section = ImageSection(bin_slot_offsets_[kBinDexCacheArray],
bin_slot_sizes_[kBinDexCacheArray]);
- // Round up to the alignment the string table expects. See HashSet::WriteToMemory.
- size_t cur_pos = RoundUp(dex_cache_arrays_section->End(), sizeof(uint64_t));
+ // For boot image, round up to the page boundary to separate the interned strings and
+ // class table from the modifiable data. We shall mprotect() these pages read-only when
+ // we load the boot image. This is more than sufficient for the string table alignment,
+ // namely sizeof(uint64_t). See HashSet::WriteToMemory.
+ static_assert(IsAligned<sizeof(uint64_t)>(kPageSize), "String table alignment check.");
+ size_t cur_pos =
+ RoundUp(dex_cache_arrays_section->End(), app_image ? sizeof(uint64_t) : kPageSize);
// Calculate the size of the interned strings.
ImageSection* interned_strings_section = &out_sections[ImageHeader::kSectionInternedStrings];
*interned_strings_section = ImageSection(cur_pos, intern_table_bytes_);
@@ -1928,7 +1935,7 @@
// Create the image sections.
ImageSection sections[ImageHeader::kSectionCount];
- const size_t image_end = image_info.CreateImageSections(sections);
+ const size_t image_end = image_info.CreateImageSections(sections, compile_app_image_);
// Finally bitmap section.
const size_t bitmap_bytes = image_info.image_bitmap_->Size();
diff --git a/compiler/image_writer.h b/compiler/image_writer.h
index 866e204..2fc394e 100644
--- a/compiler/image_writer.h
+++ b/compiler/image_writer.h
@@ -258,7 +258,7 @@
// Create the image sections into the out sections variable, returns the size of the image
// excluding the bitmap.
- size_t CreateImageSections(ImageSection* out_sections) const;
+ size_t CreateImageSections(ImageSection* out_sections, bool app_image) const;
std::unique_ptr<MemMap> image_; // Memory mapped for generating the image.
diff --git a/compiler/linker/arm64/relative_patcher_arm64.cc b/compiler/linker/arm64/relative_patcher_arm64.cc
index db829f3..0ebabc1 100644
--- a/compiler/linker/arm64/relative_patcher_arm64.cc
+++ b/compiler/linker/arm64/relative_patcher_arm64.cc
@@ -63,6 +63,7 @@
case LinkerPatch::Type::kTypeRelative:
case LinkerPatch::Type::kTypeBssEntry:
case LinkerPatch::Type::kStringRelative:
+ case LinkerPatch::Type::kStringInternTable:
case LinkerPatch::Type::kStringBssEntry:
return patch.LiteralOffset() == patch.PcInsnOffset();
}
@@ -266,6 +267,7 @@
// LDR/STR 32-bit or 64-bit with imm12 == 0 (unset).
DCHECK(patch.GetType() == LinkerPatch::Type::kMethodBssEntry ||
patch.GetType() == LinkerPatch::Type::kTypeBssEntry ||
+ patch.GetType() == LinkerPatch::Type::kStringInternTable ||
patch.GetType() == LinkerPatch::Type::kStringBssEntry) << patch.GetType();
DCHECK_EQ(insn & 0xbfbffc00, 0xb9000000) << std::hex << insn;
}
diff --git a/compiler/oat_writer.cc b/compiler/oat_writer.cc
index 58b6137..a33081e 100644
--- a/compiler/oat_writer.cc
+++ b/compiler/oat_writer.cc
@@ -335,6 +335,7 @@
bss_method_entries_(),
bss_type_entries_(),
bss_string_entries_(),
+ map_boot_image_tables_to_bss_(false),
oat_data_offset_(0u),
oat_header_(nullptr),
size_vdex_header_(0),
@@ -771,6 +772,8 @@
} else if (patch.GetType() == LinkerPatch::Type::kStringBssEntry) {
StringReference ref(patch.TargetStringDexFile(), patch.TargetStringIndex());
writer_->bss_string_entries_.Overwrite(ref, /* placeholder */ 0u);
+ } else if (patch.GetType() == LinkerPatch::Type::kStringInternTable) {
+ writer_->map_boot_image_tables_to_bss_ = true;
}
}
} else {
@@ -1398,6 +1401,14 @@
target_offset);
break;
}
+ case LinkerPatch::Type::kStringInternTable: {
+ uint32_t target_offset = GetInternTableEntryOffset(patch);
+ writer_->relative_patcher_->PatchPcRelativeReference(&patched_code_,
+ patch,
+ offset_ + literal_offset,
+ target_offset);
+ break;
+ }
case LinkerPatch::Type::kStringBssEntry: {
StringReference ref(patch.TargetStringDexFile(), patch.TargetStringIndex());
uint32_t target_offset =
@@ -1535,7 +1546,6 @@
}
mirror::String* GetTargetString(const LinkerPatch& patch) REQUIRES_SHARED(Locks::mutator_lock_) {
- ScopedObjectAccessUnchecked soa(Thread::Current());
ClassLinker* linker = Runtime::Current()->GetClassLinker();
mirror::String* string = linker->LookupString(*patch.TargetStringDexFile(),
patch.TargetStringIndex(),
@@ -1603,6 +1613,28 @@
data[2] = (address >> 16) & 0xffu;
data[3] = (address >> 24) & 0xffu;
}
+
+ // Calculate the offset of the InternTable slot (GcRoot<String>) when mmapped to the .bss.
+ uint32_t GetInternTableEntryOffset(const LinkerPatch& patch)
+ REQUIRES_SHARED(Locks::mutator_lock_) {
+ DCHECK(!writer_->HasBootImage());
+ const uint8_t* string_root = writer_->LookupBootImageInternTableSlot(
+ *patch.TargetStringDexFile(), patch.TargetStringIndex());
+ DCHECK(string_root != nullptr);
+ uint32_t base_offset = writer_->bss_start_;
+ for (gc::space::ImageSpace* space : Runtime::Current()->GetHeap()->GetBootImageSpaces()) {
+ const uint8_t* const_tables_begin =
+ space->Begin() + space->GetImageHeader().GetBootImageConstantTablesOffset();
+ size_t offset = static_cast<size_t>(string_root - const_tables_begin);
+ if (offset < space->GetImageHeader().GetBootImageConstantTablesSize()) {
+ DCHECK_LE(base_offset + offset, writer_->bss_start_ + writer_->bss_methods_offset_);
+ return base_offset + offset;
+ }
+ base_offset += space->GetImageHeader().GetBootImageConstantTablesSize();
+ }
+ LOG(FATAL) << "Didn't find boot image string in boot image intern tables!";
+ UNREACHABLE();
+ }
};
class OatWriter::WriteMapMethodVisitor : public OatDexMethodVisitor {
@@ -1942,16 +1974,24 @@
DCHECK_EQ(bss_size_, 0u);
if (HasBootImage()) {
+ DCHECK(!map_boot_image_tables_to_bss_);
DCHECK(bss_string_entries_.empty());
}
- if (bss_method_entries_.empty() &&
+ if (!map_boot_image_tables_to_bss_ &&
+ bss_method_entries_.empty() &&
bss_type_entries_.empty() &&
bss_string_entries_.empty()) {
// Nothing to put to the .bss section.
return;
}
+ // Allocate space for boot image tables in the .bss section.
PointerSize pointer_size = GetInstructionSetPointerSize(instruction_set);
+ if (map_boot_image_tables_to_bss_) {
+ for (gc::space::ImageSpace* space : Runtime::Current()->GetHeap()->GetBootImageSpaces()) {
+ bss_size_ += space->GetImageHeader().GetBootImageConstantTablesSize();
+ }
+ }
bss_methods_offset_ = bss_size_;
@@ -3500,4 +3540,25 @@
return true;
}
+const uint8_t* OatWriter::LookupBootImageInternTableSlot(const DexFile& dex_file,
+ dex::StringIndex string_idx)
+ NO_THREAD_SAFETY_ANALYSIS {
+ // Single-threaded OatWriter can avoid locking.
+ uint32_t utf16_length;
+ const char* utf8_data = dex_file.StringDataAndUtf16LengthByIdx(string_idx, &utf16_length);
+ DCHECK_EQ(utf16_length, CountModifiedUtf8Chars(utf8_data));
+ InternTable::Utf8String string(utf16_length,
+ utf8_data,
+ ComputeUtf16HashFromModifiedUtf8(utf8_data, utf16_length));
+ const InternTable* intern_table = Runtime::Current()->GetClassLinker()->intern_table_;
+ for (const InternTable::Table::UnorderedSet& table : intern_table->strong_interns_.tables_) {
+ auto it = table.Find(string);
+ if (it != table.end()) {
+ return reinterpret_cast<const uint8_t*>(std::addressof(*it));
+ }
+ }
+ LOG(FATAL) << "Did not find boot image string " << utf8_data;
+ UNREACHABLE();
+}
+
} // namespace art
diff --git a/compiler/oat_writer.h b/compiler/oat_writer.h
index 8db00f7..780dee0 100644
--- a/compiler/oat_writer.h
+++ b/compiler/oat_writer.h
@@ -333,6 +333,10 @@
bool MayHaveCompiledMethods() const;
+ // Find the address of the GcRoot<String> in the InternTable for a boot image string.
+ const uint8_t* LookupBootImageInternTableSlot(const DexFile& dex_file,
+ dex::StringIndex string_idx);
+
enum class WriteState {
kAddingDexFileSources,
kPrepareLayout,
@@ -407,6 +411,10 @@
// is the target offset for patching, starting at `bss_start_ + bss_roots_offset_`.
SafeMap<StringReference, size_t, StringReferenceValueComparator> bss_string_entries_;
+ // Whether boot image tables should be mapped to the .bss. This is needed for compiled
+ // code that reads from these tables with PC-relative instructions.
+ bool map_boot_image_tables_to_bss_;
+
// Offset of the oat data from the start of the mmapped region of the elf file.
size_t oat_data_offset_;
diff --git a/compiler/optimizing/code_generator_arm64.cc b/compiler/optimizing/code_generator_arm64.cc
index 3be774a..1b62868 100644
--- a/compiler/optimizing/code_generator_arm64.cc
+++ b/compiler/optimizing/code_generator_arm64.cc
@@ -435,11 +435,11 @@
// The string entry page address was preserved in temp_ thanks to kSaveEverything.
} else {
// For non-Baker read barrier, we need to re-calculate the address of the string entry page.
- adrp_label_ = arm64_codegen->NewPcRelativeStringPatch(dex_file, string_index);
+ adrp_label_ = arm64_codegen->NewStringBssEntryPatch(dex_file, string_index);
arm64_codegen->EmitAdrpPlaceholder(adrp_label_, temp_);
}
vixl::aarch64::Label* strp_label =
- arm64_codegen->NewPcRelativeStringPatch(dex_file, string_index, adrp_label_);
+ arm64_codegen->NewStringBssEntryPatch(dex_file, string_index, adrp_label_);
{
SingleEmissionCheckScope guard(arm64_codegen->GetVIXLAssembler());
__ Bind(strp_label);
@@ -1463,6 +1463,7 @@
pc_relative_type_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
type_bss_entry_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
pc_relative_string_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
+ string_bss_entry_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
baker_read_barrier_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
jit_string_patches_(StringReferenceValueComparator(),
graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
@@ -4675,6 +4676,13 @@
NewPcRelativePatch(dex_file, string_index.index_, adrp_label, &pc_relative_string_patches_);
}
+vixl::aarch64::Label* CodeGeneratorARM64::NewStringBssEntryPatch(
+ const DexFile& dex_file,
+ dex::StringIndex string_index,
+ vixl::aarch64::Label* adrp_label) {
+ return NewPcRelativePatch(dex_file, string_index.index_, adrp_label, &string_bss_entry_patches_);
+}
+
vixl::aarch64::Label* CodeGeneratorARM64::NewBakerReadBarrierPatch(uint32_t custom_data) {
baker_read_barrier_patches_.emplace_back(custom_data);
return &baker_read_barrier_patches_.back().label;
@@ -4764,6 +4772,7 @@
pc_relative_type_patches_.size() +
type_bss_entry_patches_.size() +
pc_relative_string_patches_.size() +
+ string_bss_entry_patches_.size() +
baker_read_barrier_patches_.size();
linker_patches->reserve(size);
if (GetCompilerOptions().IsBootImage()) {
@@ -4776,13 +4785,15 @@
} else {
DCHECK(pc_relative_method_patches_.empty());
DCHECK(pc_relative_type_patches_.empty());
- EmitPcRelativeLinkerPatches<LinkerPatch::StringBssEntryPatch>(pc_relative_string_patches_,
- linker_patches);
+ EmitPcRelativeLinkerPatches<LinkerPatch::StringInternTablePatch>(pc_relative_string_patches_,
+ linker_patches);
}
EmitPcRelativeLinkerPatches<LinkerPatch::MethodBssEntryPatch>(method_bss_entry_patches_,
linker_patches);
EmitPcRelativeLinkerPatches<LinkerPatch::TypeBssEntryPatch>(type_bss_entry_patches_,
linker_patches);
+ EmitPcRelativeLinkerPatches<LinkerPatch::StringBssEntryPatch>(string_bss_entry_patches_,
+ linker_patches);
for (const BakerReadBarrierPatchInfo& info : baker_read_barrier_patches_) {
linker_patches->push_back(LinkerPatch::BakerReadBarrierBranchPatch(info.label.GetLocation(),
info.custom_data));
@@ -5043,6 +5054,7 @@
HLoadString::LoadKind desired_string_load_kind) {
switch (desired_string_load_kind) {
case HLoadString::LoadKind::kBootImageLinkTimePcRelative:
+ case HLoadString::LoadKind::kBootImageInternTable:
case HLoadString::LoadKind::kBssEntry:
DCHECK(!Runtime::Current()->UseJitCompilation());
break;
@@ -5090,24 +5102,37 @@
switch (load->GetLoadKind()) {
case HLoadString::LoadKind::kBootImageLinkTimePcRelative: {
+ DCHECK(codegen_->GetCompilerOptions().IsBootImage());
// Add ADRP with its PC-relative String patch.
const DexFile& dex_file = load->GetDexFile();
const dex::StringIndex string_index = load->GetStringIndex();
- DCHECK(codegen_->GetCompilerOptions().IsBootImage());
vixl::aarch64::Label* adrp_label = codegen_->NewPcRelativeStringPatch(dex_file, string_index);
codegen_->EmitAdrpPlaceholder(adrp_label, out.X());
// Add ADD with its PC-relative String patch.
vixl::aarch64::Label* add_label =
codegen_->NewPcRelativeStringPatch(dex_file, string_index, adrp_label);
codegen_->EmitAddPlaceholder(add_label, out.X(), out.X());
- return; // No dex cache slow path.
+ return;
}
case HLoadString::LoadKind::kBootImageAddress: {
uint32_t address = dchecked_integral_cast<uint32_t>(
reinterpret_cast<uintptr_t>(load->GetString().Get()));
DCHECK_NE(address, 0u);
__ Ldr(out.W(), codegen_->DeduplicateBootImageAddressLiteral(address));
- return; // No dex cache slow path.
+ return;
+ }
+ case HLoadString::LoadKind::kBootImageInternTable: {
+ DCHECK(!codegen_->GetCompilerOptions().IsBootImage());
+ // Add ADRP with its PC-relative String patch.
+ const DexFile& dex_file = load->GetDexFile();
+ const dex::StringIndex string_index = load->GetStringIndex();
+ vixl::aarch64::Label* adrp_label = codegen_->NewPcRelativeStringPatch(dex_file, string_index);
+ codegen_->EmitAdrpPlaceholder(adrp_label, out.X());
+ // Add LDR with its PC-relative String patch.
+ vixl::aarch64::Label* ldr_label =
+ codegen_->NewPcRelativeStringPatch(dex_file, string_index, adrp_label);
+ codegen_->EmitLdrOffsetPlaceholder(ldr_label, out.W(), out.X());
+ return;
}
case HLoadString::LoadKind::kBssEntry: {
// Add ADRP with its PC-relative String .bss entry patch.
@@ -5115,11 +5140,11 @@
const dex::StringIndex string_index = load->GetStringIndex();
DCHECK(!codegen_->GetCompilerOptions().IsBootImage());
Register temp = XRegisterFrom(load->GetLocations()->GetTemp(0));
- vixl::aarch64::Label* adrp_label = codegen_->NewPcRelativeStringPatch(dex_file, string_index);
+ vixl::aarch64::Label* adrp_label = codegen_->NewStringBssEntryPatch(dex_file, string_index);
codegen_->EmitAdrpPlaceholder(adrp_label, temp);
- // Add LDR with its PC-relative String patch.
+ // Add LDR with its .bss entry String patch.
vixl::aarch64::Label* ldr_label =
- codegen_->NewPcRelativeStringPatch(dex_file, string_index, adrp_label);
+ codegen_->NewStringBssEntryPatch(dex_file, string_index, adrp_label);
// /* GcRoot<mirror::String> */ out = *(base_address + offset) /* PC-relative */
GenerateGcRootFieldLoad(load,
out_loc,
diff --git a/compiler/optimizing/code_generator_arm64.h b/compiler/optimizing/code_generator_arm64.h
index c339209..69c5119 100644
--- a/compiler/optimizing/code_generator_arm64.h
+++ b/compiler/optimizing/code_generator_arm64.h
@@ -599,6 +599,14 @@
dex::StringIndex string_index,
vixl::aarch64::Label* adrp_label = nullptr);
+ // Add a new .bss entry string patch for an instruction and return the label
+ // to be bound before the instruction. The instruction will be either the
+ // ADRP (pass `adrp_label = null`) or the ADD (pass `adrp_label` pointing
+ // to the associated ADRP patch label).
+ vixl::aarch64::Label* NewStringBssEntryPatch(const DexFile& dex_file,
+ dex::StringIndex string_index,
+ vixl::aarch64::Label* adrp_label = nullptr);
+
// Add a new baker read barrier patch and return the label to be bound
// before the CBNZ instruction.
vixl::aarch64::Label* NewBakerReadBarrierPatch(uint32_t custom_data);
@@ -825,8 +833,10 @@
ArenaDeque<PcRelativePatchInfo> pc_relative_type_patches_;
// PC-relative type patch info for kBssEntry.
ArenaDeque<PcRelativePatchInfo> type_bss_entry_patches_;
- // PC-relative String patch info; type depends on configuration (app .bss or boot image PIC).
+ // PC-relative String patch info; type depends on configuration (intern table or boot image PIC).
ArenaDeque<PcRelativePatchInfo> pc_relative_string_patches_;
+ // PC-relative String patch info for kBssEntry.
+ ArenaDeque<PcRelativePatchInfo> string_bss_entry_patches_;
// Baker read barrier patch info.
ArenaDeque<BakerReadBarrierPatchInfo> baker_read_barrier_patches_;
diff --git a/compiler/optimizing/code_generator_arm_vixl.cc b/compiler/optimizing/code_generator_arm_vixl.cc
index d78756e..8288141 100644
--- a/compiler/optimizing/code_generator_arm_vixl.cc
+++ b/compiler/optimizing/code_generator_arm_vixl.cc
@@ -598,7 +598,7 @@
down_cast<CodeGeneratorARMVIXL*>(codegen)->GetVIXLAssembler());
vixl32::Register temp = temps.Acquire();
CodeGeneratorARMVIXL::PcRelativePatchInfo* labels =
- arm_codegen->NewPcRelativeStringPatch(load->GetDexFile(), string_index);
+ arm_codegen->NewStringBssEntryPatch(load->GetDexFile(), string_index);
arm_codegen->EmitMovwMovtPlaceholder(labels, temp);
__ Str(r0, MemOperand(temp));
}
@@ -2380,6 +2380,7 @@
pc_relative_type_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
type_bss_entry_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
pc_relative_string_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
+ string_bss_entry_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
baker_read_barrier_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
jit_string_patches_(StringReferenceValueComparator(),
graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
@@ -7315,6 +7316,7 @@
HLoadString::LoadKind desired_string_load_kind) {
switch (desired_string_load_kind) {
case HLoadString::LoadKind::kBootImageLinkTimePcRelative:
+ case HLoadString::LoadKind::kBootImageInternTable:
case HLoadString::LoadKind::kBssEntry:
DCHECK(!Runtime::Current()->UseJitCompilation());
break;
@@ -7372,14 +7374,22 @@
CodeGeneratorARMVIXL::PcRelativePatchInfo* labels =
codegen_->NewPcRelativeStringPatch(load->GetDexFile(), load->GetStringIndex());
codegen_->EmitMovwMovtPlaceholder(labels, out);
- return; // No dex cache slow path.
+ return;
}
case HLoadString::LoadKind::kBootImageAddress: {
uint32_t address = dchecked_integral_cast<uint32_t>(
reinterpret_cast<uintptr_t>(load->GetString().Get()));
DCHECK_NE(address, 0u);
__ Ldr(out, codegen_->DeduplicateBootImageAddressLiteral(address));
- return; // No dex cache slow path.
+ return;
+ }
+ case HLoadString::LoadKind::kBootImageInternTable: {
+ DCHECK(!codegen_->GetCompilerOptions().IsBootImage());
+ CodeGeneratorARMVIXL::PcRelativePatchInfo* labels =
+ codegen_->NewPcRelativeStringPatch(load->GetDexFile(), load->GetStringIndex());
+ codegen_->EmitMovwMovtPlaceholder(labels, out);
+ __ Ldr(out, MemOperand(out, /* offset */ 0));
+ return;
}
case HLoadString::LoadKind::kBssEntry: {
DCHECK(!codegen_->GetCompilerOptions().IsBootImage());
@@ -7387,7 +7397,7 @@
? RegisterFrom(locations->GetTemp(0))
: out;
CodeGeneratorARMVIXL::PcRelativePatchInfo* labels =
- codegen_->NewPcRelativeStringPatch(load->GetDexFile(), load->GetStringIndex());
+ codegen_->NewStringBssEntryPatch(load->GetDexFile(), load->GetStringIndex());
codegen_->EmitMovwMovtPlaceholder(labels, temp);
GenerateGcRootFieldLoad(load, out_loc, temp, /* offset */ 0, kCompilerReadBarrierOption);
LoadStringSlowPathARMVIXL* slow_path =
@@ -9119,6 +9129,11 @@
return NewPcRelativePatch(dex_file, string_index.index_, &pc_relative_string_patches_);
}
+CodeGeneratorARMVIXL::PcRelativePatchInfo* CodeGeneratorARMVIXL::NewStringBssEntryPatch(
+ const DexFile& dex_file, dex::StringIndex string_index) {
+ return NewPcRelativePatch(dex_file, string_index.index_, &string_bss_entry_patches_);
+}
+
CodeGeneratorARMVIXL::PcRelativePatchInfo* CodeGeneratorARMVIXL::NewPcRelativePatch(
const DexFile& dex_file, uint32_t offset_or_index, ArenaDeque<PcRelativePatchInfo>* patches) {
patches->emplace_back(dex_file, offset_or_index);
@@ -9187,6 +9202,7 @@
/* MOVW+MOVT for each entry */ 2u * pc_relative_type_patches_.size() +
/* MOVW+MOVT for each entry */ 2u * type_bss_entry_patches_.size() +
/* MOVW+MOVT for each entry */ 2u * pc_relative_string_patches_.size() +
+ /* MOVW+MOVT for each entry */ 2u * string_bss_entry_patches_.size() +
baker_read_barrier_patches_.size();
linker_patches->reserve(size);
if (GetCompilerOptions().IsBootImage()) {
@@ -9199,13 +9215,15 @@
} else {
DCHECK(pc_relative_method_patches_.empty());
DCHECK(pc_relative_type_patches_.empty());
- EmitPcRelativeLinkerPatches<LinkerPatch::StringBssEntryPatch>(pc_relative_string_patches_,
- linker_patches);
+ EmitPcRelativeLinkerPatches<LinkerPatch::StringInternTablePatch>(pc_relative_string_patches_,
+ linker_patches);
}
EmitPcRelativeLinkerPatches<LinkerPatch::MethodBssEntryPatch>(method_bss_entry_patches_,
linker_patches);
EmitPcRelativeLinkerPatches<LinkerPatch::TypeBssEntryPatch>(type_bss_entry_patches_,
linker_patches);
+ EmitPcRelativeLinkerPatches<LinkerPatch::StringBssEntryPatch>(string_bss_entry_patches_,
+ linker_patches);
for (const BakerReadBarrierPatchInfo& info : baker_read_barrier_patches_) {
linker_patches->push_back(LinkerPatch::BakerReadBarrierBranchPatch(info.label.GetLocation(),
info.custom_data));
diff --git a/compiler/optimizing/code_generator_arm_vixl.h b/compiler/optimizing/code_generator_arm_vixl.h
index 5feb33b..e78bc15 100644
--- a/compiler/optimizing/code_generator_arm_vixl.h
+++ b/compiler/optimizing/code_generator_arm_vixl.h
@@ -579,6 +579,8 @@
PcRelativePatchInfo* NewTypeBssEntryPatch(const DexFile& dex_file, dex::TypeIndex type_index);
PcRelativePatchInfo* NewPcRelativeStringPatch(const DexFile& dex_file,
dex::StringIndex string_index);
+ PcRelativePatchInfo* NewStringBssEntryPatch(const DexFile& dex_file,
+ dex::StringIndex string_index);
// Add a new baker read barrier patch and return the label to be bound
// before the BNE instruction.
@@ -803,8 +805,10 @@
ArenaDeque<PcRelativePatchInfo> pc_relative_type_patches_;
// PC-relative type patch info for kBssEntry.
ArenaDeque<PcRelativePatchInfo> type_bss_entry_patches_;
- // PC-relative String patch info; type depends on configuration (app .bss or boot image PIC).
+ // PC-relative String patch info; type depends on configuration (intern table or boot image PIC).
ArenaDeque<PcRelativePatchInfo> pc_relative_string_patches_;
+ // PC-relative String patch info for kBssEntry.
+ ArenaDeque<PcRelativePatchInfo> string_bss_entry_patches_;
// Baker read barrier patch info.
ArenaDeque<BakerReadBarrierPatchInfo> baker_read_barrier_patches_;
diff --git a/compiler/optimizing/code_generator_mips.cc b/compiler/optimizing/code_generator_mips.cc
index 51f5b96..ac8f675 100644
--- a/compiler/optimizing/code_generator_mips.cc
+++ b/compiler/optimizing/code_generator_mips.cc
@@ -360,7 +360,7 @@
// The string entry address was preserved in `entry_address` thanks to kSaveEverything.
DCHECK(bss_info_high_);
CodeGeneratorMIPS::PcRelativePatchInfo* info_low =
- mips_codegen->NewPcRelativeStringPatch(load->GetDexFile(), string_index, bss_info_high_);
+ mips_codegen->NewStringBssEntryPatch(load->GetDexFile(), string_index, bss_info_high_);
__ Sw(calling_convention.GetRegisterAt(0),
entry_address,
/* placeholder */ 0x5678,
@@ -380,9 +380,9 @@
const bool isR6 = mips_codegen->GetInstructionSetFeatures().IsR6();
Register base = isR6 ? ZERO : locations->InAt(0).AsRegister<Register>();
CodeGeneratorMIPS::PcRelativePatchInfo* info_high =
- mips_codegen->NewPcRelativeStringPatch(load->GetDexFile(), string_index);
+ mips_codegen->NewStringBssEntryPatch(load->GetDexFile(), string_index);
CodeGeneratorMIPS::PcRelativePatchInfo* info_low =
- mips_codegen->NewPcRelativeStringPatch(load->GetDexFile(), string_index, info_high);
+ mips_codegen->NewStringBssEntryPatch(load->GetDexFile(), string_index, info_high);
mips_codegen->EmitPcRelativeAddressPlaceholderHigh(info_high, TMP, base);
__ Sw(out, TMP, /* placeholder */ 0x5678, &info_low->label);
}
@@ -1101,6 +1101,7 @@
pc_relative_type_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
type_bss_entry_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
pc_relative_string_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
+ string_bss_entry_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
jit_string_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
jit_class_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
clobbered_ra_(false) {
@@ -1651,7 +1652,8 @@
method_bss_entry_patches_.size() +
pc_relative_type_patches_.size() +
type_bss_entry_patches_.size() +
- pc_relative_string_patches_.size();
+ pc_relative_string_patches_.size() +
+ string_bss_entry_patches_.size();
linker_patches->reserve(size);
if (GetCompilerOptions().IsBootImage()) {
EmitPcRelativeLinkerPatches<LinkerPatch::RelativeMethodPatch>(pc_relative_method_patches_,
@@ -1663,13 +1665,15 @@
} else {
DCHECK(pc_relative_method_patches_.empty());
DCHECK(pc_relative_type_patches_.empty());
- EmitPcRelativeLinkerPatches<LinkerPatch::StringBssEntryPatch>(pc_relative_string_patches_,
- linker_patches);
+ EmitPcRelativeLinkerPatches<LinkerPatch::StringInternTablePatch>(pc_relative_string_patches_,
+ linker_patches);
}
EmitPcRelativeLinkerPatches<LinkerPatch::MethodBssEntryPatch>(method_bss_entry_patches_,
linker_patches);
EmitPcRelativeLinkerPatches<LinkerPatch::TypeBssEntryPatch>(type_bss_entry_patches_,
linker_patches);
+ EmitPcRelativeLinkerPatches<LinkerPatch::StringBssEntryPatch>(string_bss_entry_patches_,
+ linker_patches);
DCHECK_EQ(size, linker_patches->size());
}
@@ -1712,6 +1716,13 @@
return NewPcRelativePatch(dex_file, string_index.index_, info_high, &pc_relative_string_patches_);
}
+CodeGeneratorMIPS::PcRelativePatchInfo* CodeGeneratorMIPS::NewStringBssEntryPatch(
+ const DexFile& dex_file,
+ dex::StringIndex string_index,
+ const PcRelativePatchInfo* info_high) {
+ return NewPcRelativePatch(dex_file, string_index.index_, info_high, &string_bss_entry_patches_);
+}
+
CodeGeneratorMIPS::PcRelativePatchInfo* CodeGeneratorMIPS::NewPcRelativePatch(
const DexFile& dex_file,
uint32_t offset_or_index,
@@ -7365,6 +7376,7 @@
bool fallback_load = has_irreducible_loops && !is_r6;
switch (desired_string_load_kind) {
case HLoadString::LoadKind::kBootImageLinkTimePcRelative:
+ case HLoadString::LoadKind::kBootImageInternTable:
case HLoadString::LoadKind::kBssEntry:
DCHECK(!Runtime::Current()->UseJitCompilation());
break;
@@ -7817,6 +7829,7 @@
// We need an extra register for PC-relative literals on R2.
case HLoadString::LoadKind::kBootImageAddress:
case HLoadString::LoadKind::kBootImageLinkTimePcRelative:
+ case HLoadString::LoadKind::kBootImageInternTable:
case HLoadString::LoadKind::kBssEntry:
if (isR6) {
break;
@@ -7863,6 +7876,7 @@
// We need an extra register for PC-relative literals on R2.
case HLoadString::LoadKind::kBootImageAddress:
case HLoadString::LoadKind::kBootImageLinkTimePcRelative:
+ case HLoadString::LoadKind::kBootImageInternTable:
case HLoadString::LoadKind::kBssEntry:
base_or_current_method_reg = isR6 ? ZERO : locations->InAt(0).AsRegister<Register>();
break;
@@ -7882,7 +7896,7 @@
out,
base_or_current_method_reg);
__ Addiu(out, out, /* placeholder */ 0x5678, &info_low->label);
- return; // No dex cache slow path.
+ return;
}
case HLoadString::LoadKind::kBootImageAddress: {
uint32_t address = dchecked_integral_cast<uint32_t>(
@@ -7891,14 +7905,26 @@
__ LoadLiteral(out,
base_or_current_method_reg,
codegen_->DeduplicateBootImageAddressLiteral(address));
- return; // No dex cache slow path.
+ return;
}
- case HLoadString::LoadKind::kBssEntry: {
+ case HLoadString::LoadKind::kBootImageInternTable: {
DCHECK(!codegen_->GetCompilerOptions().IsBootImage());
CodeGeneratorMIPS::PcRelativePatchInfo* info_high =
codegen_->NewPcRelativeStringPatch(load->GetDexFile(), load->GetStringIndex());
CodeGeneratorMIPS::PcRelativePatchInfo* info_low =
codegen_->NewPcRelativeStringPatch(load->GetDexFile(), load->GetStringIndex(), info_high);
+ codegen_->EmitPcRelativeAddressPlaceholderHigh(info_high,
+ out,
+ base_or_current_method_reg);
+ __ Lw(out, out, /* placeholder */ 0x5678, &info_low->label);
+ return;
+ }
+ case HLoadString::LoadKind::kBssEntry: {
+ DCHECK(!codegen_->GetCompilerOptions().IsBootImage());
+ CodeGeneratorMIPS::PcRelativePatchInfo* info_high =
+ codegen_->NewStringBssEntryPatch(load->GetDexFile(), load->GetStringIndex());
+ CodeGeneratorMIPS::PcRelativePatchInfo* info_low =
+ codegen_->NewStringBssEntryPatch(load->GetDexFile(), load->GetStringIndex(), info_high);
constexpr bool non_baker_read_barrier = kUseReadBarrier && !kUseBakerReadBarrier;
Register temp = non_baker_read_barrier ? out : locations->GetTemp(0).AsRegister<Register>();
codegen_->EmitPcRelativeAddressPlaceholderHigh(info_high,
diff --git a/compiler/optimizing/code_generator_mips.h b/compiler/optimizing/code_generator_mips.h
index c0e1ec0..f15f8c6 100644
--- a/compiler/optimizing/code_generator_mips.h
+++ b/compiler/optimizing/code_generator_mips.h
@@ -633,6 +633,9 @@
PcRelativePatchInfo* NewPcRelativeStringPatch(const DexFile& dex_file,
dex::StringIndex string_index,
const PcRelativePatchInfo* info_high = nullptr);
+ PcRelativePatchInfo* NewStringBssEntryPatch(const DexFile& dex_file,
+ dex::StringIndex string_index,
+ const PcRelativePatchInfo* info_high = nullptr);
Literal* DeduplicateBootImageAddressLiteral(uint32_t address);
void EmitPcRelativeAddressPlaceholderHigh(PcRelativePatchInfo* info_high,
@@ -699,8 +702,10 @@
ArenaDeque<PcRelativePatchInfo> pc_relative_type_patches_;
// PC-relative type patch info for kBssEntry.
ArenaDeque<PcRelativePatchInfo> type_bss_entry_patches_;
- // PC-relative String patch info; type depends on configuration (app .bss or boot image PIC).
+ // PC-relative String patch info; type depends on configuration (intern table or boot image PIC).
ArenaDeque<PcRelativePatchInfo> pc_relative_string_patches_;
+ // PC-relative String patch info for kBssEntry.
+ ArenaDeque<PcRelativePatchInfo> string_bss_entry_patches_;
// Patches for string root accesses in JIT compiled code.
ArenaDeque<JitPatchInfo> jit_string_patches_;
diff --git a/compiler/optimizing/code_generator_mips64.cc b/compiler/optimizing/code_generator_mips64.cc
index 25fb1d0..71c2bff 100644
--- a/compiler/optimizing/code_generator_mips64.cc
+++ b/compiler/optimizing/code_generator_mips64.cc
@@ -318,9 +318,9 @@
// The string entry address was preserved in `entry_address` thanks to kSaveEverything.
DCHECK(bss_info_high_);
CodeGeneratorMIPS64::PcRelativePatchInfo* info_low =
- mips64_codegen->NewPcRelativeStringPatch(load->GetDexFile(),
- string_index,
- bss_info_high_);
+ mips64_codegen->NewStringBssEntryPatch(load->GetDexFile(),
+ string_index,
+ bss_info_high_);
__ Bind(&info_low->label);
__ StoreToOffset(kStoreWord,
calling_convention.GetRegisterAt(0),
@@ -339,9 +339,9 @@
// For non-Baker read barriers we need to re-calculate the address of
// the string entry.
CodeGeneratorMIPS64::PcRelativePatchInfo* info_high =
- mips64_codegen->NewPcRelativeStringPatch(load->GetDexFile(), string_index);
+ mips64_codegen->NewStringBssEntryPatch(load->GetDexFile(), string_index);
CodeGeneratorMIPS64::PcRelativePatchInfo* info_low =
- mips64_codegen->NewPcRelativeStringPatch(load->GetDexFile(), string_index, info_high);
+ mips64_codegen->NewStringBssEntryPatch(load->GetDexFile(), string_index, info_high);
mips64_codegen->EmitPcRelativeAddressPlaceholderHigh(info_high, TMP, info_low);
__ StoreToOffset(kStoreWord, out, TMP, /* placeholder */ 0x5678);
}
@@ -1049,6 +1049,7 @@
pc_relative_type_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
type_bss_entry_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
pc_relative_string_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
+ string_bss_entry_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
jit_string_patches_(StringReferenceValueComparator(),
graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
jit_class_patches_(TypeReferenceValueComparator(),
@@ -1560,7 +1561,8 @@
method_bss_entry_patches_.size() +
pc_relative_type_patches_.size() +
type_bss_entry_patches_.size() +
- pc_relative_string_patches_.size();
+ pc_relative_string_patches_.size() +
+ string_bss_entry_patches_.size();
linker_patches->reserve(size);
if (GetCompilerOptions().IsBootImage()) {
EmitPcRelativeLinkerPatches<LinkerPatch::RelativeMethodPatch>(pc_relative_method_patches_,
@@ -1572,13 +1574,15 @@
} else {
DCHECK(pc_relative_method_patches_.empty());
DCHECK(pc_relative_type_patches_.empty());
- EmitPcRelativeLinkerPatches<LinkerPatch::StringBssEntryPatch>(pc_relative_string_patches_,
- linker_patches);
+ EmitPcRelativeLinkerPatches<LinkerPatch::StringInternTablePatch>(pc_relative_string_patches_,
+ linker_patches);
}
EmitPcRelativeLinkerPatches<LinkerPatch::MethodBssEntryPatch>(method_bss_entry_patches_,
linker_patches);
EmitPcRelativeLinkerPatches<LinkerPatch::TypeBssEntryPatch>(type_bss_entry_patches_,
linker_patches);
+ EmitPcRelativeLinkerPatches<LinkerPatch::StringBssEntryPatch>(string_bss_entry_patches_,
+ linker_patches);
DCHECK_EQ(size, linker_patches->size());
}
@@ -1621,6 +1625,13 @@
return NewPcRelativePatch(dex_file, string_index.index_, info_high, &pc_relative_string_patches_);
}
+CodeGeneratorMIPS64::PcRelativePatchInfo* CodeGeneratorMIPS64::NewStringBssEntryPatch(
+ const DexFile& dex_file,
+ dex::StringIndex string_index,
+ const PcRelativePatchInfo* info_high) {
+ return NewPcRelativePatch(dex_file, string_index.index_, info_high, &string_bss_entry_patches_);
+}
+
CodeGeneratorMIPS64::PcRelativePatchInfo* CodeGeneratorMIPS64::NewPcRelativePatch(
const DexFile& dex_file,
uint32_t offset_or_index,
@@ -5729,6 +5740,7 @@
bool fallback_load = false;
switch (desired_string_load_kind) {
case HLoadString::LoadKind::kBootImageLinkTimePcRelative:
+ case HLoadString::LoadKind::kBootImageInternTable:
case HLoadString::LoadKind::kBssEntry:
DCHECK(!Runtime::Current()->UseJitCompilation());
break;
@@ -6117,7 +6129,7 @@
codegen_->NewPcRelativeStringPatch(load->GetDexFile(), load->GetStringIndex(), info_high);
codegen_->EmitPcRelativeAddressPlaceholderHigh(info_high, AT, info_low);
__ Daddiu(out, AT, /* placeholder */ 0x5678);
- return; // No dex cache slow path.
+ return;
}
case HLoadString::LoadKind::kBootImageAddress: {
uint32_t address = dchecked_integral_cast<uint32_t>(
@@ -6126,14 +6138,24 @@
__ LoadLiteral(out,
kLoadUnsignedWord,
codegen_->DeduplicateBootImageAddressLiteral(address));
- return; // No dex cache slow path.
+ return;
}
- case HLoadString::LoadKind::kBssEntry: {
+ case HLoadString::LoadKind::kBootImageInternTable: {
DCHECK(!codegen_->GetCompilerOptions().IsBootImage());
CodeGeneratorMIPS64::PcRelativePatchInfo* info_high =
codegen_->NewPcRelativeStringPatch(load->GetDexFile(), load->GetStringIndex());
CodeGeneratorMIPS64::PcRelativePatchInfo* info_low =
codegen_->NewPcRelativeStringPatch(load->GetDexFile(), load->GetStringIndex(), info_high);
+ codegen_->EmitPcRelativeAddressPlaceholderHigh(info_high, AT, info_low);
+ __ Lwu(out, AT, /* placeholder */ 0x5678);
+ return;
+ }
+ case HLoadString::LoadKind::kBssEntry: {
+ DCHECK(!codegen_->GetCompilerOptions().IsBootImage());
+ CodeGeneratorMIPS64::PcRelativePatchInfo* info_high =
+ codegen_->NewStringBssEntryPatch(load->GetDexFile(), load->GetStringIndex());
+ CodeGeneratorMIPS64::PcRelativePatchInfo* info_low =
+ codegen_->NewStringBssEntryPatch(load->GetDexFile(), load->GetStringIndex(), info_high);
constexpr bool non_baker_read_barrier = kUseReadBarrier && !kUseBakerReadBarrier;
GpuRegister temp = non_baker_read_barrier
? out
diff --git a/compiler/optimizing/code_generator_mips64.h b/compiler/optimizing/code_generator_mips64.h
index 72d4743..3035621 100644
--- a/compiler/optimizing/code_generator_mips64.h
+++ b/compiler/optimizing/code_generator_mips64.h
@@ -605,6 +605,9 @@
PcRelativePatchInfo* NewPcRelativeStringPatch(const DexFile& dex_file,
dex::StringIndex string_index,
const PcRelativePatchInfo* info_high = nullptr);
+ PcRelativePatchInfo* NewStringBssEntryPatch(const DexFile& dex_file,
+ dex::StringIndex string_index,
+ const PcRelativePatchInfo* info_high = nullptr);
Literal* DeduplicateBootImageAddressLiteral(uint64_t address);
void EmitPcRelativeAddressPlaceholderHigh(PcRelativePatchInfo* info_high,
@@ -666,8 +669,10 @@
ArenaDeque<PcRelativePatchInfo> pc_relative_type_patches_;
// PC-relative type patch info for kBssEntry.
ArenaDeque<PcRelativePatchInfo> type_bss_entry_patches_;
- // PC-relative String patch info; type depends on configuration (app .bss or boot image PIC).
+ // PC-relative String patch info; type depends on configuration (intern table or boot image PIC).
ArenaDeque<PcRelativePatchInfo> pc_relative_string_patches_;
+ // PC-relative type patch info for kBssEntry.
+ ArenaDeque<PcRelativePatchInfo> string_bss_entry_patches_;
// Patches for string root accesses in JIT compiled code.
StringToLiteralMap jit_string_patches_;
diff --git a/compiler/optimizing/code_generator_x86.cc b/compiler/optimizing/code_generator_x86.cc
index 99b7793..512968f 100644
--- a/compiler/optimizing/code_generator_x86.cc
+++ b/compiler/optimizing/code_generator_x86.cc
@@ -1035,6 +1035,7 @@
boot_image_type_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
type_bss_entry_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
string_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
+ string_bss_entry_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
jit_string_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
jit_class_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
constant_area_start_(-1),
@@ -4652,7 +4653,6 @@
}
void CodeGeneratorX86::RecordBootStringPatch(HLoadString* load_string) {
- DCHECK(GetCompilerOptions().IsBootImage());
HX86ComputeBaseMethodAddress* address = load_string->InputAt(0)->AsX86ComputeBaseMethodAddress();
string_patches_.emplace_back(address,
load_string->GetDexFile(),
@@ -4664,9 +4664,9 @@
DCHECK(!GetCompilerOptions().IsBootImage());
HX86ComputeBaseMethodAddress* address =
load_string->InputAt(0)->AsX86ComputeBaseMethodAddress();
- string_patches_.emplace_back(
+ string_bss_entry_patches_.emplace_back(
address, load_string->GetDexFile(), load_string->GetStringIndex().index_);
- return &string_patches_.back().label;
+ return &string_bss_entry_patches_.back().label;
}
// The label points to the end of the "movl" or another instruction but the literal offset
@@ -4691,7 +4691,8 @@
method_bss_entry_patches_.size() +
boot_image_type_patches_.size() +
type_bss_entry_patches_.size() +
- string_patches_.size();
+ string_patches_.size() +
+ string_bss_entry_patches_.size();
linker_patches->reserve(size);
if (GetCompilerOptions().IsBootImage()) {
EmitPcRelativeLinkerPatches<LinkerPatch::RelativeMethodPatch>(boot_image_method_patches_,
@@ -4702,12 +4703,15 @@
} else {
DCHECK(boot_image_method_patches_.empty());
DCHECK(boot_image_type_patches_.empty());
- EmitPcRelativeLinkerPatches<LinkerPatch::StringBssEntryPatch>(string_patches_, linker_patches);
+ EmitPcRelativeLinkerPatches<LinkerPatch::StringInternTablePatch>(string_patches_,
+ linker_patches);
}
EmitPcRelativeLinkerPatches<LinkerPatch::MethodBssEntryPatch>(method_bss_entry_patches_,
linker_patches);
EmitPcRelativeLinkerPatches<LinkerPatch::TypeBssEntryPatch>(type_bss_entry_patches_,
linker_patches);
+ EmitPcRelativeLinkerPatches<LinkerPatch::StringBssEntryPatch>(string_bss_entry_patches_,
+ linker_patches);
DCHECK_EQ(size, linker_patches->size());
}
@@ -6219,6 +6223,7 @@
HLoadString::LoadKind desired_string_load_kind) {
switch (desired_string_load_kind) {
case HLoadString::LoadKind::kBootImageLinkTimePcRelative:
+ case HLoadString::LoadKind::kBootImageInternTable:
case HLoadString::LoadKind::kBssEntry:
DCHECK(!Runtime::Current()->UseJitCompilation());
break;
@@ -6237,6 +6242,7 @@
LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(load, call_kind);
HLoadString::LoadKind load_kind = load->GetLoadKind();
if (load_kind == HLoadString::LoadKind::kBootImageLinkTimePcRelative ||
+ load_kind == HLoadString::LoadKind::kBootImageInternTable ||
load_kind == HLoadString::LoadKind::kBssEntry) {
locations->SetInAt(0, Location::RequiresRegister());
}
@@ -6282,14 +6288,21 @@
Register method_address = locations->InAt(0).AsRegister<Register>();
__ leal(out, Address(method_address, CodeGeneratorX86::kDummy32BitOffset));
codegen_->RecordBootStringPatch(load);
- return; // No dex cache slow path.
+ return;
}
case HLoadString::LoadKind::kBootImageAddress: {
uint32_t address = dchecked_integral_cast<uint32_t>(
reinterpret_cast<uintptr_t>(load->GetString().Get()));
DCHECK_NE(address, 0u);
__ movl(out, Immediate(address));
- return; // No dex cache slow path.
+ return;
+ }
+ case HLoadString::LoadKind::kBootImageInternTable: {
+ DCHECK(!codegen_->GetCompilerOptions().IsBootImage());
+ Register method_address = locations->InAt(0).AsRegister<Register>();
+ __ movl(out, Address(method_address, CodeGeneratorX86::kDummy32BitOffset));
+ codegen_->RecordBootStringPatch(load);
+ return;
}
case HLoadString::LoadKind::kBssEntry: {
Register method_address = locations->InAt(0).AsRegister<Register>();
diff --git a/compiler/optimizing/code_generator_x86.h b/compiler/optimizing/code_generator_x86.h
index f48753b..b32d57a 100644
--- a/compiler/optimizing/code_generator_x86.h
+++ b/compiler/optimizing/code_generator_x86.h
@@ -640,8 +640,10 @@
ArenaDeque<X86PcRelativePatchInfo> boot_image_type_patches_;
// Type patch locations for kBssEntry.
ArenaDeque<X86PcRelativePatchInfo> type_bss_entry_patches_;
- // String patch locations; type depends on configuration (app .bss or boot image).
+ // String patch locations; type depends on configuration (intern table or boot image PIC).
ArenaDeque<X86PcRelativePatchInfo> string_patches_;
+ // String patch locations for kBssEntry.
+ ArenaDeque<X86PcRelativePatchInfo> string_bss_entry_patches_;
// Patches for string root accesses in JIT compiled code.
ArenaDeque<PatchInfo<Label>> jit_string_patches_;
diff --git a/compiler/optimizing/code_generator_x86_64.cc b/compiler/optimizing/code_generator_x86_64.cc
index 8283887..0c3b2ad 100644
--- a/compiler/optimizing/code_generator_x86_64.cc
+++ b/compiler/optimizing/code_generator_x86_64.cc
@@ -1089,15 +1089,15 @@
}
void CodeGeneratorX86_64::RecordBootStringPatch(HLoadString* load_string) {
- DCHECK(GetCompilerOptions().IsBootImage());
string_patches_.emplace_back(load_string->GetDexFile(), load_string->GetStringIndex().index_);
__ Bind(&string_patches_.back().label);
}
Label* CodeGeneratorX86_64::NewStringBssEntryPatch(HLoadString* load_string) {
DCHECK(!GetCompilerOptions().IsBootImage());
- string_patches_.emplace_back(load_string->GetDexFile(), load_string->GetStringIndex().index_);
- return &string_patches_.back().label;
+ string_bss_entry_patches_.emplace_back(
+ load_string->GetDexFile(), load_string->GetStringIndex().index_);
+ return &string_bss_entry_patches_.back().label;
}
// The label points to the end of the "movl" or another instruction but the literal offset
@@ -1122,7 +1122,8 @@
method_bss_entry_patches_.size() +
boot_image_type_patches_.size() +
type_bss_entry_patches_.size() +
- string_patches_.size();
+ string_patches_.size() +
+ string_bss_entry_patches_.size();
linker_patches->reserve(size);
if (GetCompilerOptions().IsBootImage()) {
EmitPcRelativeLinkerPatches<LinkerPatch::RelativeMethodPatch>(boot_image_method_patches_,
@@ -1133,12 +1134,15 @@
} else {
DCHECK(boot_image_method_patches_.empty());
DCHECK(boot_image_type_patches_.empty());
- EmitPcRelativeLinkerPatches<LinkerPatch::StringBssEntryPatch>(string_patches_, linker_patches);
+ EmitPcRelativeLinkerPatches<LinkerPatch::StringInternTablePatch>(string_patches_,
+ linker_patches);
}
EmitPcRelativeLinkerPatches<LinkerPatch::MethodBssEntryPatch>(method_bss_entry_patches_,
linker_patches);
EmitPcRelativeLinkerPatches<LinkerPatch::TypeBssEntryPatch>(type_bss_entry_patches_,
linker_patches);
+ EmitPcRelativeLinkerPatches<LinkerPatch::StringBssEntryPatch>(string_bss_entry_patches_,
+ linker_patches);
DCHECK_EQ(size, linker_patches->size());
}
@@ -1230,6 +1234,7 @@
boot_image_type_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
type_bss_entry_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
string_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
+ string_bss_entry_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
jit_string_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
jit_class_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
fixups_to_jump_tables_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)) {
@@ -5621,6 +5626,7 @@
HLoadString::LoadKind desired_string_load_kind) {
switch (desired_string_load_kind) {
case HLoadString::LoadKind::kBootImageLinkTimePcRelative:
+ case HLoadString::LoadKind::kBootImageInternTable:
case HLoadString::LoadKind::kBssEntry:
DCHECK(!Runtime::Current()->UseJitCompilation());
break;
@@ -5678,14 +5684,20 @@
DCHECK(codegen_->GetCompilerOptions().IsBootImage());
__ leal(out, Address::Absolute(CodeGeneratorX86_64::kDummy32BitOffset, /* no_rip */ false));
codegen_->RecordBootStringPatch(load);
- return; // No dex cache slow path.
+ return;
}
case HLoadString::LoadKind::kBootImageAddress: {
uint32_t address = dchecked_integral_cast<uint32_t>(
reinterpret_cast<uintptr_t>(load->GetString().Get()));
DCHECK_NE(address, 0u);
__ movl(out, Immediate(static_cast<int32_t>(address))); // Zero-extended.
- return; // No dex cache slow path.
+ return;
+ }
+ case HLoadString::LoadKind::kBootImageInternTable: {
+ DCHECK(!codegen_->GetCompilerOptions().IsBootImage());
+ __ movl(out, Address::Absolute(CodeGeneratorX86_64::kDummy32BitOffset, /* no_rip */ false));
+ codegen_->RecordBootStringPatch(load);
+ return;
}
case HLoadString::LoadKind::kBssEntry: {
Address address = Address::Absolute(CodeGeneratorX86_64::kDummy32BitOffset,
diff --git a/compiler/optimizing/code_generator_x86_64.h b/compiler/optimizing/code_generator_x86_64.h
index 33c6429..f5fa86b 100644
--- a/compiler/optimizing/code_generator_x86_64.h
+++ b/compiler/optimizing/code_generator_x86_64.h
@@ -611,8 +611,10 @@
ArenaDeque<PatchInfo<Label>> boot_image_type_patches_;
// Type patch locations for kBssEntry.
ArenaDeque<PatchInfo<Label>> type_bss_entry_patches_;
- // String patch locations; type depends on configuration (app .bss or boot image).
+ // String patch locations; type depends on configuration (intern table or boot image PIC).
ArenaDeque<PatchInfo<Label>> string_patches_;
+ // String patch locations for kBssEntry.
+ ArenaDeque<PatchInfo<Label>> string_bss_entry_patches_;
// Patches for string literals in JIT compiled code.
ArenaDeque<PatchInfo<Label>> jit_string_patches_;
diff --git a/compiler/optimizing/nodes.cc b/compiler/optimizing/nodes.cc
index 1510eaf..ebbea27 100644
--- a/compiler/optimizing/nodes.cc
+++ b/compiler/optimizing/nodes.cc
@@ -2791,6 +2791,7 @@
}
switch (GetLoadKind()) {
case LoadKind::kBootImageAddress:
+ case LoadKind::kBootImageInternTable:
case LoadKind::kJitTableAddress: {
ScopedObjectAccess soa(Thread::Current());
return GetString().Get() == other_load_string->GetString().Get();
@@ -2821,6 +2822,8 @@
return os << "BootImageLinkTimePcRelative";
case HLoadString::LoadKind::kBootImageAddress:
return os << "BootImageAddress";
+ case HLoadString::LoadKind::kBootImageInternTable:
+ return os << "BootImageInternTable";
case HLoadString::LoadKind::kBssEntry:
return os << "BssEntry";
case HLoadString::LoadKind::kJitTableAddress:
diff --git a/compiler/optimizing/nodes.h b/compiler/optimizing/nodes.h
index 869fdd4..93677f6 100644
--- a/compiler/optimizing/nodes.h
+++ b/compiler/optimizing/nodes.h
@@ -5872,6 +5872,10 @@
// Used for boot image strings referenced by apps in AOT- and JIT-compiled code.
kBootImageAddress,
+ // Use a PC-relative load from a boot image InternTable mmapped into the .bss
+ // of the oat file.
+ kBootImageInternTable,
+
// Load from an entry in the .bss section using a PC-relative load.
// Used for strings outside boot image when .bss is accessible with a PC-relative load.
kBssEntry,
@@ -5931,6 +5935,7 @@
LoadKind load_kind = GetLoadKind();
if (load_kind == LoadKind::kBootImageLinkTimePcRelative ||
load_kind == LoadKind::kBootImageAddress ||
+ load_kind == LoadKind::kBootImageInternTable ||
load_kind == LoadKind::kJitTableAddress) {
return false;
}
@@ -5991,8 +5996,9 @@
// The special input is used for PC-relative loads on some architectures,
// including literal pool loads, which are PC-relative too.
DCHECK(GetLoadKind() == LoadKind::kBootImageLinkTimePcRelative ||
- GetLoadKind() == LoadKind::kBssEntry ||
- GetLoadKind() == LoadKind::kBootImageAddress) << GetLoadKind();
+ GetLoadKind() == LoadKind::kBootImageAddress ||
+ GetLoadKind() == LoadKind::kBootImageInternTable ||
+ GetLoadKind() == LoadKind::kBssEntry) << GetLoadKind();
// HLoadString::GetInputRecords() returns an empty array at this point,
// so use the GetInputRecords() from the base class to set the input record.
DCHECK(special_input_.GetInstruction() == nullptr);
diff --git a/compiler/optimizing/pc_relative_fixups_mips.cc b/compiler/optimizing/pc_relative_fixups_mips.cc
index 21b6452..4cb99f9 100644
--- a/compiler/optimizing/pc_relative_fixups_mips.cc
+++ b/compiler/optimizing/pc_relative_fixups_mips.cc
@@ -88,8 +88,9 @@
void VisitLoadString(HLoadString* load_string) OVERRIDE {
HLoadString::LoadKind load_kind = load_string->GetLoadKind();
switch (load_kind) {
- case HLoadString::LoadKind::kBootImageAddress:
case HLoadString::LoadKind::kBootImageLinkTimePcRelative:
+ case HLoadString::LoadKind::kBootImageAddress:
+ case HLoadString::LoadKind::kBootImageInternTable:
case HLoadString::LoadKind::kBssEntry:
// Add a base register for PC-relative literals on R2.
InitializePCRelativeBasePointer();
diff --git a/compiler/optimizing/pc_relative_fixups_x86.cc b/compiler/optimizing/pc_relative_fixups_x86.cc
index 2743df9..c463ecd 100644
--- a/compiler/optimizing/pc_relative_fixups_x86.cc
+++ b/compiler/optimizing/pc_relative_fixups_x86.cc
@@ -92,6 +92,7 @@
void VisitLoadString(HLoadString* load_string) OVERRIDE {
HLoadString::LoadKind load_kind = load_string->GetLoadKind();
if (load_kind == HLoadString::LoadKind::kBootImageLinkTimePcRelative ||
+ load_kind == HLoadString::LoadKind::kBootImageInternTable ||
load_kind == HLoadString::LoadKind::kBssEntry) {
HX86ComputeBaseMethodAddress* method_address = GetPCRelativeBasePointer(load_string);
load_string->AddSpecialInput(method_address);
diff --git a/compiler/optimizing/sharpening.cc b/compiler/optimizing/sharpening.cc
index 9536d14..1ca63f4 100644
--- a/compiler/optimizing/sharpening.cc
+++ b/compiler/optimizing/sharpening.cc
@@ -278,10 +278,12 @@
} else {
// AOT app compilation. Try to lookup the string without allocating if not found.
string = class_linker->LookupString(dex_file, string_index, dex_cache.Get());
- if (string != nullptr &&
- runtime->GetHeap()->ObjectIsInBootImageSpace(string) &&
- !codegen_->GetCompilerOptions().GetCompilePic()) {
- desired_load_kind = HLoadString::LoadKind::kBootImageAddress;
+ if (string != nullptr && runtime->GetHeap()->ObjectIsInBootImageSpace(string)) {
+ if (codegen_->GetCompilerOptions().GetCompilePic()) {
+ desired_load_kind = HLoadString::LoadKind::kBootImageInternTable;
+ } else {
+ desired_load_kind = HLoadString::LoadKind::kBootImageAddress;
+ }
} else {
desired_load_kind = HLoadString::LoadKind::kBssEntry;
}
diff --git a/dex2oat/dex2oat_test.cc b/dex2oat/dex2oat_test.cc
index e10b171..5bf3513 100644
--- a/dex2oat/dex2oat_test.cc
+++ b/dex2oat/dex2oat_test.cc
@@ -706,7 +706,7 @@
}
}
- uint64_t GetImageSize(const std::string& image_file_name) {
+ uint64_t GetImageObjectSectionSize(const std::string& image_file_name) {
EXPECT_FALSE(image_file_name.empty());
std::unique_ptr<File> file(OS::OpenFileForReading(image_file_name.c_str()));
CHECK(file != nullptr);
@@ -715,7 +715,7 @@
CHECK(success);
CHECK(image_header.IsValid());
ReaderMutexLock mu(Thread::Current(), *Locks::mutator_lock_);
- return image_header.GetImageSize();
+ return image_header.GetObjectsSection().Size();
}
void RunTest(bool app_image) {
@@ -734,7 +734,7 @@
CheckValidity();
ASSERT_TRUE(success_);
// Don't check the result since CheckResult relies on the class being in the profile.
- image_file_empty_profile = GetImageSize(app_image_file);
+ image_file_empty_profile = GetImageObjectSectionSize(app_image_file);
EXPECT_GT(image_file_empty_profile, 0u);
}
@@ -750,8 +750,8 @@
if (app_image) {
// Test that the profile made a difference by adding more classes.
- const uint64_t image_file_small_profile = GetImageSize(app_image_file);
- CHECK_LT(image_file_empty_profile, image_file_small_profile);
+ const uint64_t image_file_small_profile = GetImageObjectSectionSize(app_image_file);
+ ASSERT_LT(image_file_empty_profile, image_file_small_profile);
}
}
diff --git a/oatdump/oatdump.cc b/oatdump/oatdump.cc
index 6e4c68b..4161067 100644
--- a/oatdump/oatdump.cc
+++ b/oatdump/oatdump.cc
@@ -2006,7 +2006,7 @@
// Intern table is 8-byte aligned.
uint32_t end_caches = dex_cache_arrays_section.Offset() + dex_cache_arrays_section.Size();
- CHECK_EQ(RoundUp(end_caches, 8U), intern_section.Offset());
+ CHECK_ALIGNED(intern_section.Offset(), sizeof(uint64_t));
stats_.alignment_bytes += intern_section.Offset() - end_caches;
// Add space between intern table and class table.
diff --git a/patchoat/patchoat.cc b/patchoat/patchoat.cc
index f54e81f..edce5b4 100644
--- a/patchoat/patchoat.cc
+++ b/patchoat/patchoat.cc
@@ -469,6 +469,9 @@
void PatchOat::PatchInternedStrings(const ImageHeader* image_header) {
const auto& section = image_header->GetInternedStringsSection();
+ if (section.Size() == 0) {
+ return;
+ }
InternTable temp_table;
// Note that we require that ReadFromMemory does not make an internal copy of the elements.
// This also relies on visit roots not doing any verification which could fail after we update
diff --git a/runtime/class_linker.cc b/runtime/class_linker.cc
index efef975..4161754 100644
--- a/runtime/class_linker.cc
+++ b/runtime/class_linker.cc
@@ -1760,6 +1760,17 @@
header.VisitPackedArtMethods(&visitor, space->Begin(), image_pointer_size_);
}
+ if (!app_image) {
+ // Make the string intern table and class table immutable for boot image.
+ // PIC app oat files may mmap a read-only copy into their own .bss section,
+ // so enforce that the data in the boot image tables remains unchanged.
+ //
+ // We cannot do that for app image even after the fixup as some interned
+ // String references may actually end up pointing to moveable Strings.
+ uint8_t* const_section_begin = space->Begin() + header.GetBootImageConstantTablesOffset();
+ mprotect(const_section_begin, header.GetBootImageConstantTablesSize(), PROT_READ);
+ }
+
ClassTable* class_table = nullptr;
{
WriterMutexLock mu(self, *Locks::classlinker_classes_lock_);
diff --git a/runtime/class_linker.h b/runtime/class_linker.h
index 783ec74..8c2f0a5 100644
--- a/runtime/class_linker.h
+++ b/runtime/class_linker.h
@@ -1278,6 +1278,7 @@
friend class ImageWriter; // for GetClassRoots
friend class JniCompilerTest; // for GetRuntimeQuickGenericJniStub
friend class JniInternalTest; // for GetRuntimeQuickGenericJniStub
+ friend class OatWriter; // for boot image string table slot address lookup.
friend class VMClassLoader; // for LookupClass and FindClassInBaseDexClassLoader.
ART_FRIEND_TEST(ClassLinkerTest, RegisterDexFileName); // for DexLock, and RegisterDexFileLocked
ART_FRIEND_TEST(mirror::DexCacheMethodHandlesTest, Open); // for AllocDexCache
diff --git a/runtime/image.cc b/runtime/image.cc
index 8debc71..c8581c1 100644
--- a/runtime/image.cc
+++ b/runtime/image.cc
@@ -26,7 +26,7 @@
namespace art {
const uint8_t ImageHeader::kImageMagic[] = { 'a', 'r', 't', '\n' };
-const uint8_t ImageHeader::kImageVersion[] = { '0', '4', '7', '\0' }; // Smaller ArtMethod.
+const uint8_t ImageHeader::kImageVersion[] = { '0', '4', '8', '\0' }; // Map boot image tables.
ImageHeader::ImageHeader(uint32_t image_begin,
uint32_t image_size,
diff --git a/runtime/image.h b/runtime/image.h
index 42abffc..2b24087 100644
--- a/runtime/image.h
+++ b/runtime/image.h
@@ -19,6 +19,7 @@
#include <string.h>
+#include "base/bit_utils.h"
#include "base/enums.h"
#include "globals.h"
#include "mirror/object.h"
@@ -311,6 +312,22 @@
return boot_image_size_ != 0u;
}
+ uint32_t GetBootImageConstantTablesOffset() const {
+ // Interned strings table and class table for boot image are mmapped read only.
+ DCHECK(!IsAppImage());
+ const ImageSection& interned_strings = GetInternedStringsSection();
+ DCHECK_ALIGNED(interned_strings.Offset(), kPageSize);
+ return interned_strings.Offset();
+ }
+
+ uint32_t GetBootImageConstantTablesSize() const {
+ uint32_t start_offset = GetBootImageConstantTablesOffset();
+ const ImageSection& class_table = GetClassTableSection();
+ DCHECK_LE(start_offset, class_table.Offset());
+ size_t tables_size = class_table.Offset() + class_table.Size() - start_offset;
+ return RoundUp(tables_size, kPageSize);
+ }
+
// Visit ArtMethods in the section starting at base. Includes runtime methods.
// TODO: Delete base parameter if it is always equal to GetImageBegin.
void VisitPackedArtMethods(ArtMethodVisitor* visitor,
diff --git a/runtime/intern_table.h b/runtime/intern_table.h
index 8714840..3e3abbd 100644
--- a/runtime/intern_table.h
+++ b/runtime/intern_table.h
@@ -223,6 +223,7 @@
// modifying the zygote intern table. The back of table is modified when strings are interned.
std::vector<UnorderedSet> tables_;
+ friend class OatWriter; // for boot image string table slot address lookup.
ART_FRIEND_TEST(InternTableTest, CrossHash);
};
@@ -282,6 +283,7 @@
// Weak root state, used for concurrent system weak processing and more.
gc::WeakRootState weak_root_state_ GUARDED_BY(Locks::intern_table_lock_);
+ friend class OatWriter; // for boot image string table slot address lookup.
friend class Transaction;
ART_FRIEND_TEST(InternTableTest, CrossHash);
DISALLOW_COPY_AND_ASSIGN(InternTable);
diff --git a/runtime/oat.h b/runtime/oat.h
index ab7c42e..a3e8eef 100644
--- a/runtime/oat.h
+++ b/runtime/oat.h
@@ -32,8 +32,8 @@
class PACKED(4) OatHeader {
public:
static constexpr uint8_t kOatMagic[] = { 'o', 'a', 't', '\n' };
- // Last oat version changed reason: Remove DexCache arrays from .bss.
- static constexpr uint8_t kOatVersion[] = { '1', '3', '3', '\0' };
+ // Last oat version changed reason: Map boot image InternTable and ClassTable into app .bss.
+ static constexpr uint8_t kOatVersion[] = { '1', '3', '4', '\0' };
static constexpr const char* kImageLocationKey = "image-location";
static constexpr const char* kDex2OatCmdLineKey = "dex2oat-cmdline";
diff --git a/runtime/oat_file.cc b/runtime/oat_file.cc
index 200681e..6515cfa 100644
--- a/runtime/oat_file.cc
+++ b/runtime/oat_file.cc
@@ -44,6 +44,7 @@
#include "elf_file.h"
#include "elf_utils.h"
#include "gc_root.h"
+#include "gc/space/image_space.h"
#include "mem_map.h"
#include "mirror/class.h"
#include "mirror/object-inl.h"
@@ -278,6 +279,37 @@
return true;
}
+static inline bool MapConstantTables(const gc::space::ImageSpace* space,
+ uint8_t* address) {
+ // If MREMAP_DUP is ever merged to Linux kernel, use it to avoid the unnecessary open()/close().
+ // Note: The current approach relies on the filename still referencing the same inode.
+
+ File file(space->GetImageFilename(), O_RDONLY, /* checkUsage */ false);
+ if (!file.IsOpened()) {
+ LOG(ERROR) << "Failed to open boot image file " << space->GetImageFilename();
+ return false;
+ }
+
+ uint32_t offset = space->GetImageHeader().GetBootImageConstantTablesOffset();
+ uint32_t size = space->GetImageHeader().GetBootImageConstantTablesSize();
+ std::string error_msg;
+ std::unique_ptr<MemMap> mem_map(MemMap::MapFileAtAddress(address,
+ size,
+ PROT_READ,
+ MAP_PRIVATE,
+ file.Fd(),
+ offset,
+ /* low_4gb */ false,
+ /* reuse */ true,
+ file.GetPath().c_str(),
+ &error_msg));
+ if (mem_map == nullptr) {
+ LOG(ERROR) << "Failed to mmap boot image tables from file " << space->GetImageFilename();
+ return false;
+ }
+ return true;
+}
+
bool OatFileBase::Setup(const char* abs_dex_location, std::string* error_msg) {
if (!GetOatHeader().IsValid()) {
std::string cause = GetOatHeader().GetValidationErrorMessage();
@@ -339,15 +371,12 @@
return false;
}
- if (bss_methods_ != nullptr && bss_methods_ != bss_begin_) {
- *error_msg = StringPrintf("In oat file '%s' found unexpected .bss gap before 'oatbssmethods': "
- "begin = %p, methods = %p",
- GetLocation().c_str(),
- bss_begin_,
- bss_methods_);
- return false;
- }
-
+ uint8_t* after_tables =
+ (bss_methods_ != nullptr) ? bss_methods_ : bss_roots_; // May be null.
+ uint8_t* boot_image_tables = (bss_begin_ == after_tables) ? nullptr : bss_begin_;
+ uint8_t* boot_image_tables_end =
+ (bss_begin_ == after_tables) ? nullptr : (after_tables != nullptr) ? after_tables : bss_end_;
+ DCHECK_EQ(boot_image_tables != nullptr, boot_image_tables_end != nullptr);
uint32_t dex_file_count = GetOatHeader().GetDexFileCount();
oat_dex_files_storage_.reserve(dex_file_count);
for (size_t i = 0; i < dex_file_count; i++) {
@@ -605,6 +634,31 @@
}
}
+ if (boot_image_tables != nullptr) {
+ // Map boot image tables into the .bss. The reserved size must match size of the tables.
+ size_t reserved_size = static_cast<size_t>(boot_image_tables_end - boot_image_tables);
+ size_t tables_size = 0u;
+ for (gc::space::ImageSpace* space : Runtime::Current()->GetHeap()->GetBootImageSpaces()) {
+ tables_size += space->GetImageHeader().GetBootImageConstantTablesSize();
+ DCHECK_ALIGNED(tables_size, kPageSize);
+ }
+ if (tables_size != reserved_size) {
+ *error_msg = StringPrintf("In oat file '%s' found unexpected boot image table sizes, "
+ " %zu bytes, should be %zu.",
+ GetLocation().c_str(),
+ reserved_size,
+ tables_size);
+ return false;
+ }
+ for (gc::space::ImageSpace* space : Runtime::Current()->GetHeap()->GetBootImageSpaces()) {
+ uint32_t current_tables_size = space->GetImageHeader().GetBootImageConstantTablesSize();
+ if (current_tables_size != 0u && !MapConstantTables(space, boot_image_tables)) {
+ return false;
+ }
+ boot_image_tables += current_tables_size;
+ }
+ DCHECK(boot_image_tables == boot_image_tables_end);
+ }
return true;
}
diff --git a/test/552-checker-sharpening/src/Main.java b/test/552-checker-sharpening/src/Main.java
index 7408e6d..7a60bb5 100644
--- a/test/552-checker-sharpening/src/Main.java
+++ b/test/552-checker-sharpening/src/Main.java
@@ -214,27 +214,27 @@
/// CHECK-START-X86: java.lang.String Main.$noinline$getBootImageString() sharpening (after)
// Note: load kind depends on PIC/non-PIC
- /// CHECK: LoadString load_kind:{{BootImageAddress|BssEntry}}
+ /// CHECK: LoadString load_kind:{{BootImageAddress|BootImageInternTable}}
/// CHECK-START-X86_64: java.lang.String Main.$noinline$getBootImageString() sharpening (after)
// Note: load kind depends on PIC/non-PIC
- /// CHECK: LoadString load_kind:{{BootImageAddress|BssEntry}}
+ /// CHECK: LoadString load_kind:{{BootImageAddress|BootImageInternTable}}
/// CHECK-START-ARM: java.lang.String Main.$noinline$getBootImageString() sharpening (after)
// Note: load kind depends on PIC/non-PIC
- /// CHECK: LoadString load_kind:{{BootImageAddress|BssEntry}}
+ /// CHECK: LoadString load_kind:{{BootImageAddress|BootImageInternTable}}
/// CHECK-START-ARM64: java.lang.String Main.$noinline$getBootImageString() sharpening (after)
// Note: load kind depends on PIC/non-PIC
- /// CHECK: LoadString load_kind:{{BootImageAddress|BssEntry}}
+ /// CHECK: LoadString load_kind:{{BootImageAddress|BootImageInternTable}}
/// CHECK-START-MIPS: java.lang.String Main.$noinline$getBootImageString() sharpening (after)
// Note: load kind depends on PIC/non-PIC
- /// CHECK: LoadString load_kind:{{BootImageAddress|BssEntry}}
+ /// CHECK: LoadString load_kind:{{BootImageAddress|BootImageInternTable}}
/// CHECK-START-MIPS64: java.lang.String Main.$noinline$getBootImageString() sharpening (after)
// Note: load kind depends on PIC/non-PIC
- /// CHECK: LoadString load_kind:{{BootImageAddress|BssEntry}}
+ /// CHECK: LoadString load_kind:{{BootImageAddress|BootImageInternTable}}
public static String $noinline$getBootImageString() {
// Prevent inlining to avoid the string comparison being optimized away.