Add tests for Thumb2RelativePatcher.
Also make the thumb2/arm64 thunk allocation precise instead
of eagerly allocating thunk space. This allows the calls to
use the maximum positive offset.
Change-Id: Ifa95b0bb00bd73eeab0c2905d21e2f3078f4b0a8
diff --git a/compiler/linker/arm/relative_patcher_arm_base.cc b/compiler/linker/arm/relative_patcher_arm_base.cc
index ecbbd09..2eae2a8 100644
--- a/compiler/linker/arm/relative_patcher_arm_base.cc
+++ b/compiler/linker/arm/relative_patcher_arm_base.cc
@@ -24,8 +24,9 @@
namespace linker {
uint32_t ArmBaseRelativePatcher::ReserveSpace(uint32_t offset,
- const CompiledMethod* compiled_method) {
- return ReserveSpaceInternal(offset, compiled_method, 0u);
+ const CompiledMethod* compiled_method,
+ MethodReference method_ref) {
+ return ReserveSpaceInternal(offset, compiled_method, method_ref, 0u);
}
uint32_t ArmBaseRelativePatcher::WriteThunks(OutputStream* out, uint32_t offset) {
@@ -66,6 +67,7 @@
uint32_t ArmBaseRelativePatcher::ReserveSpaceInternal(uint32_t offset,
const CompiledMethod* compiled_method,
+ MethodReference method_ref,
uint32_t max_extra_space) {
// NOTE: The final thunk can be reserved from InitCodeMethodVisitor::EndClass() while it
// may be written early by WriteCodeMethodVisitor::VisitMethod() for a deduplicated chunk
@@ -73,7 +75,8 @@
// offset after reserving of writing any chunk.
if (UNLIKELY(compiled_method == nullptr)) {
uint32_t aligned_offset = CompiledMethod::AlignCode(offset, instruction_set_);
- bool needs_thunk = ReserveSpaceProcessPatches(aligned_offset);
+ DCHECK(method_ref.dex_file == nullptr && method_ref.dex_method_index == 0u);
+ bool needs_thunk = ReserveSpaceProcessPatches(aligned_offset, method_ref, aligned_offset);
if (needs_thunk) {
thunk_locations_.push_back(aligned_offset);
offset = CompiledMethod::AlignCode(aligned_offset + thunk_code_.size(), instruction_set_);
@@ -86,9 +89,12 @@
uint32_t next_aligned_offset = compiled_method->AlignCode(quick_code_offset + quick_code_size);
// Adjust for extra space required by the subclass.
next_aligned_offset = compiled_method->AlignCode(next_aligned_offset + max_extra_space);
+ // TODO: ignore unprocessed patches targeting this method if they can reach quick_code_offset.
+ // We need the MethodReference for that.
if (!unprocessed_patches_.empty() &&
next_aligned_offset - unprocessed_patches_.front().second > max_positive_displacement_) {
- bool needs_thunk = ReserveSpaceProcessPatches(next_aligned_offset);
+ bool needs_thunk = ReserveSpaceProcessPatches(quick_code_offset, method_ref,
+ next_aligned_offset);
if (needs_thunk) {
// A single thunk will cover all pending patches.
unprocessed_patches_.clear();
@@ -129,30 +135,42 @@
return displacement;
}
-bool ArmBaseRelativePatcher::ReserveSpaceProcessPatches(uint32_t next_aligned_offset) {
+bool ArmBaseRelativePatcher::ReserveSpaceProcessPatches(uint32_t quick_code_offset,
+ MethodReference method_ref,
+ uint32_t next_aligned_offset) {
// Process as many patches as possible, stop only on unresolved targets or calls too far back.
while (!unprocessed_patches_.empty()) {
+ MethodReference patch_ref = unprocessed_patches_.front().first;
uint32_t patch_offset = unprocessed_patches_.front().second;
- auto result = provider_->FindMethodOffset(unprocessed_patches_.front().first);
- if (!result.first) {
- // If still unresolved, check if we have a thunk within range.
- DCHECK(thunk_locations_.empty() || thunk_locations_.back() <= patch_offset);
- if (thunk_locations_.empty() ||
- patch_offset - thunk_locations_.back() > max_negative_displacement_) {
- return next_aligned_offset - patch_offset > max_positive_displacement_;
- }
- } else if (result.second >= patch_offset) {
- DCHECK_LE(result.second - patch_offset, max_positive_displacement_);
- } else {
- // When calling back, check if we have a thunk that's closer than the actual target.
- uint32_t target_offset =
- (thunk_locations_.empty() || result.second > thunk_locations_.back())
- ? result.second
- : thunk_locations_.back();
- DCHECK_GT(patch_offset, target_offset);
- if (patch_offset - target_offset > max_negative_displacement_) {
+ DCHECK(thunk_locations_.empty() || thunk_locations_.back() <= patch_offset);
+ if (patch_ref.dex_file == method_ref.dex_file &&
+ patch_ref.dex_method_index == method_ref.dex_method_index) {
+ DCHECK_GT(quick_code_offset, patch_offset);
+ if (quick_code_offset - patch_offset > max_positive_displacement_) {
return true;
}
+ } else {
+ auto result = provider_->FindMethodOffset(patch_ref);
+ if (!result.first) {
+ // If still unresolved, check if we have a thunk within range.
+ if (thunk_locations_.empty() ||
+ patch_offset - thunk_locations_.back() > max_negative_displacement_) {
+ return next_aligned_offset - patch_offset > max_positive_displacement_;
+ }
+ } else {
+ uint32_t target_offset = result.second - CompiledCode::CodeDelta(instruction_set_);
+ if (target_offset >= patch_offset) {
+ DCHECK_LE(target_offset - patch_offset, max_positive_displacement_);
+ } else {
+ // When calling back, check if we have a thunk that's closer than the actual target.
+ if (!thunk_locations_.empty()) {
+ target_offset = std::max(target_offset, thunk_locations_.back());
+ }
+ if (patch_offset - target_offset > max_negative_displacement_) {
+ return true;
+ }
+ }
+ }
}
unprocessed_patches_.pop_front();
}