blob: 2eae2a80011365b9e675a4297016edd9a7d88c5a [file] [log] [blame]
Vladimir Markob163bb72015-03-31 21:49:49 +01001/*
2 * Copyright (C) 2015 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "linker/arm/relative_patcher_arm_base.h"
18
19#include "compiled_method.h"
20#include "oat.h"
21#include "output_stream.h"
22
23namespace art {
24namespace linker {
25
26uint32_t ArmBaseRelativePatcher::ReserveSpace(uint32_t offset,
Vladimir Marko4d23c9d2015-04-01 23:03:09 +010027 const CompiledMethod* compiled_method,
28 MethodReference method_ref) {
29 return ReserveSpaceInternal(offset, compiled_method, method_ref, 0u);
Vladimir Markob163bb72015-03-31 21:49:49 +010030}
31
32uint32_t ArmBaseRelativePatcher::WriteThunks(OutputStream* out, uint32_t offset) {
33 if (current_thunk_to_write_ == thunk_locations_.size()) {
34 return offset;
35 }
36 uint32_t aligned_offset = CompiledMethod::AlignCode(offset, instruction_set_);
37 if (UNLIKELY(aligned_offset == thunk_locations_[current_thunk_to_write_])) {
38 ++current_thunk_to_write_;
39 uint32_t aligned_code_delta = aligned_offset - offset;
40 if (aligned_code_delta != 0u && !WriteCodeAlignment(out, aligned_code_delta)) {
41 return 0u;
42 }
43 if (UNLIKELY(!WriteRelCallThunk(out, ArrayRef<const uint8_t>(thunk_code_)))) {
44 return 0u;
45 }
46 uint32_t thunk_end_offset = aligned_offset + thunk_code_.size();
47 // Align after writing chunk, see the ReserveSpace() above.
48 offset = CompiledMethod::AlignCode(thunk_end_offset, instruction_set_);
49 aligned_code_delta = offset - thunk_end_offset;
50 if (aligned_code_delta != 0u && !WriteCodeAlignment(out, aligned_code_delta)) {
51 return 0u;
52 }
53 }
54 return offset;
55}
56
57ArmBaseRelativePatcher::ArmBaseRelativePatcher(RelativePatcherTargetProvider* provider,
58 InstructionSet instruction_set,
59 std::vector<uint8_t> thunk_code,
60 uint32_t max_positive_displacement,
61 uint32_t max_negative_displacement)
62 : provider_(provider), instruction_set_(instruction_set), thunk_code_(thunk_code),
63 max_positive_displacement_(max_positive_displacement),
64 max_negative_displacement_(max_negative_displacement),
65 thunk_locations_(), current_thunk_to_write_(0u), unprocessed_patches_() {
66}
67
68uint32_t ArmBaseRelativePatcher::ReserveSpaceInternal(uint32_t offset,
69 const CompiledMethod* compiled_method,
Vladimir Marko4d23c9d2015-04-01 23:03:09 +010070 MethodReference method_ref,
Vladimir Markob163bb72015-03-31 21:49:49 +010071 uint32_t max_extra_space) {
72 // NOTE: The final thunk can be reserved from InitCodeMethodVisitor::EndClass() while it
73 // may be written early by WriteCodeMethodVisitor::VisitMethod() for a deduplicated chunk
74 // of code. To avoid any alignment discrepancies for the final chunk, we always align the
75 // offset after reserving of writing any chunk.
76 if (UNLIKELY(compiled_method == nullptr)) {
77 uint32_t aligned_offset = CompiledMethod::AlignCode(offset, instruction_set_);
Vladimir Marko4d23c9d2015-04-01 23:03:09 +010078 DCHECK(method_ref.dex_file == nullptr && method_ref.dex_method_index == 0u);
79 bool needs_thunk = ReserveSpaceProcessPatches(aligned_offset, method_ref, aligned_offset);
Vladimir Markob163bb72015-03-31 21:49:49 +010080 if (needs_thunk) {
81 thunk_locations_.push_back(aligned_offset);
82 offset = CompiledMethod::AlignCode(aligned_offset + thunk_code_.size(), instruction_set_);
83 }
84 return offset;
85 }
86 DCHECK(compiled_method->GetQuickCode() != nullptr);
87 uint32_t quick_code_size = compiled_method->GetQuickCode()->size();
88 uint32_t quick_code_offset = compiled_method->AlignCode(offset) + sizeof(OatQuickMethodHeader);
89 uint32_t next_aligned_offset = compiled_method->AlignCode(quick_code_offset + quick_code_size);
90 // Adjust for extra space required by the subclass.
91 next_aligned_offset = compiled_method->AlignCode(next_aligned_offset + max_extra_space);
Vladimir Marko4d23c9d2015-04-01 23:03:09 +010092 // TODO: ignore unprocessed patches targeting this method if they can reach quick_code_offset.
93 // We need the MethodReference for that.
Vladimir Markob163bb72015-03-31 21:49:49 +010094 if (!unprocessed_patches_.empty() &&
95 next_aligned_offset - unprocessed_patches_.front().second > max_positive_displacement_) {
Vladimir Marko4d23c9d2015-04-01 23:03:09 +010096 bool needs_thunk = ReserveSpaceProcessPatches(quick_code_offset, method_ref,
97 next_aligned_offset);
Vladimir Markob163bb72015-03-31 21:49:49 +010098 if (needs_thunk) {
99 // A single thunk will cover all pending patches.
100 unprocessed_patches_.clear();
101 uint32_t thunk_location = compiled_method->AlignCode(offset);
102 thunk_locations_.push_back(thunk_location);
103 offset = CompiledMethod::AlignCode(thunk_location + thunk_code_.size(), instruction_set_);
104 }
105 }
106 for (const LinkerPatch& patch : compiled_method->GetPatches()) {
107 if (patch.Type() == kLinkerPatchCallRelative) {
108 unprocessed_patches_.emplace_back(patch.TargetMethod(),
109 quick_code_offset + patch.LiteralOffset());
110 }
111 }
112 return offset;
113}
114
115uint32_t ArmBaseRelativePatcher::CalculateDisplacement(uint32_t patch_offset,
116 uint32_t target_offset) {
117 // Unsigned arithmetic with its well-defined overflow behavior is just fine here.
118 uint32_t displacement = target_offset - patch_offset;
119 // NOTE: With unsigned arithmetic we do mean to use && rather than || below.
120 if (displacement > max_positive_displacement_ && displacement < -max_negative_displacement_) {
121 // Unwritten thunks have higher offsets, check if it's within range.
122 DCHECK(current_thunk_to_write_ == thunk_locations_.size() ||
123 thunk_locations_[current_thunk_to_write_] > patch_offset);
124 if (current_thunk_to_write_ != thunk_locations_.size() &&
125 thunk_locations_[current_thunk_to_write_] - patch_offset < max_positive_displacement_) {
126 displacement = thunk_locations_[current_thunk_to_write_] - patch_offset;
127 } else {
128 // We must have a previous thunk then.
129 DCHECK_NE(current_thunk_to_write_, 0u);
130 DCHECK_LT(thunk_locations_[current_thunk_to_write_ - 1], patch_offset);
131 displacement = thunk_locations_[current_thunk_to_write_ - 1] - patch_offset;
132 DCHECK(displacement >= -max_negative_displacement_);
133 }
134 }
135 return displacement;
136}
137
Vladimir Marko4d23c9d2015-04-01 23:03:09 +0100138bool ArmBaseRelativePatcher::ReserveSpaceProcessPatches(uint32_t quick_code_offset,
139 MethodReference method_ref,
140 uint32_t next_aligned_offset) {
Vladimir Markob163bb72015-03-31 21:49:49 +0100141 // Process as many patches as possible, stop only on unresolved targets or calls too far back.
142 while (!unprocessed_patches_.empty()) {
Vladimir Marko4d23c9d2015-04-01 23:03:09 +0100143 MethodReference patch_ref = unprocessed_patches_.front().first;
Vladimir Markob163bb72015-03-31 21:49:49 +0100144 uint32_t patch_offset = unprocessed_patches_.front().second;
Vladimir Marko4d23c9d2015-04-01 23:03:09 +0100145 DCHECK(thunk_locations_.empty() || thunk_locations_.back() <= patch_offset);
146 if (patch_ref.dex_file == method_ref.dex_file &&
147 patch_ref.dex_method_index == method_ref.dex_method_index) {
148 DCHECK_GT(quick_code_offset, patch_offset);
149 if (quick_code_offset - patch_offset > max_positive_displacement_) {
Vladimir Markob163bb72015-03-31 21:49:49 +0100150 return true;
151 }
Vladimir Marko4d23c9d2015-04-01 23:03:09 +0100152 } else {
153 auto result = provider_->FindMethodOffset(patch_ref);
154 if (!result.first) {
155 // If still unresolved, check if we have a thunk within range.
156 if (thunk_locations_.empty() ||
157 patch_offset - thunk_locations_.back() > max_negative_displacement_) {
158 return next_aligned_offset - patch_offset > max_positive_displacement_;
159 }
160 } else {
161 uint32_t target_offset = result.second - CompiledCode::CodeDelta(instruction_set_);
162 if (target_offset >= patch_offset) {
163 DCHECK_LE(target_offset - patch_offset, max_positive_displacement_);
164 } else {
165 // When calling back, check if we have a thunk that's closer than the actual target.
166 if (!thunk_locations_.empty()) {
167 target_offset = std::max(target_offset, thunk_locations_.back());
168 }
169 if (patch_offset - target_offset > max_negative_displacement_) {
170 return true;
171 }
172 }
173 }
Vladimir Markob163bb72015-03-31 21:49:49 +0100174 }
175 unprocessed_patches_.pop_front();
176 }
177 return false;
178}
179
180} // namespace linker
181} // namespace art