blob: 8bae5d47f19b3f4eb61014c9026691684c66686d [file] [log] [blame]
Vladimir Marko3f311cf2015-04-02 15:28:45 +01001/*
2 * Copyright (C) 2015 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
Andreas Gampe8cf9cb32017-07-19 09:28:38 -070017#include "linker/arm64/relative_patcher_arm64.h"
18
Vladimir Markoca1e0382018-04-11 09:58:41 +000019#include "arch/arm64/instruction_set_features_arm64.h"
Vladimir Markof4f2daa2017-03-20 18:26:59 +000020#include "base/casts.h"
Vladimir Marko33f7c8a2018-11-19 10:22:01 +000021#include "driver/compiler_options.h"
Vladimir Marko3f311cf2015-04-02 15:28:45 +010022#include "linker/relative_patcher_test.h"
Vladimir Markof4f2daa2017-03-20 18:26:59 +000023#include "lock_word.h"
Vladimir Marko66d691d2017-04-07 17:53:39 +010024#include "mirror/array-inl.h"
Vladimir Markof4f2daa2017-03-20 18:26:59 +000025#include "mirror/object.h"
Nicolas Geoffray524e7ea2015-10-16 17:13:34 +010026#include "oat_quick_method_header.h"
Vladimir Markoca1e0382018-04-11 09:58:41 +000027#include "optimizing/code_generator_arm64.h"
28#include "optimizing/optimizing_unit_test.h"
Vladimir Marko3f311cf2015-04-02 15:28:45 +010029
30namespace art {
31namespace linker {
32
33class Arm64RelativePatcherTest : public RelativePatcherTest {
34 public:
35 explicit Arm64RelativePatcherTest(const std::string& variant)
Vladimir Marko33bff252017-11-01 14:35:42 +000036 : RelativePatcherTest(InstructionSet::kArm64, variant) { }
Vladimir Marko3f311cf2015-04-02 15:28:45 +010037
38 protected:
39 static const uint8_t kCallRawCode[];
40 static const ArrayRef<const uint8_t> kCallCode;
41 static const uint8_t kNopRawCode[];
42 static const ArrayRef<const uint8_t> kNopCode;
43
Vladimir Markof4f2daa2017-03-20 18:26:59 +000044 // NOP instruction.
45 static constexpr uint32_t kNopInsn = 0xd503201f;
46
Vladimir Marko3f311cf2015-04-02 15:28:45 +010047 // All branches can be created from kBlPlus0 or kBPlus0 by adding the low 26 bits.
48 static constexpr uint32_t kBlPlus0 = 0x94000000u;
49 static constexpr uint32_t kBPlus0 = 0x14000000u;
50
51 // Special BL values.
52 static constexpr uint32_t kBlPlusMax = 0x95ffffffu;
53 static constexpr uint32_t kBlMinusMax = 0x96000000u;
54
Vladimir Marko66d691d2017-04-07 17:53:39 +010055 // LDR immediate, 32-bit, unsigned offset.
Vladimir Markocac5a7e2016-02-22 10:39:50 +000056 static constexpr uint32_t kLdrWInsn = 0xb9400000u;
57
Vladimir Marko66d691d2017-04-07 17:53:39 +010058 // LDR register, 32-bit, LSL #2.
59 static constexpr uint32_t kLdrWLsl2Insn = 0xb8607800u;
60
61 // LDUR, 32-bit.
62 static constexpr uint32_t kLdurWInsn = 0xb8400000u;
63
Vladimir Markocac5a7e2016-02-22 10:39:50 +000064 // ADD/ADDS/SUB/SUBS immediate, 64-bit.
65 static constexpr uint32_t kAddXInsn = 0x91000000u;
66 static constexpr uint32_t kAddsXInsn = 0xb1000000u;
67 static constexpr uint32_t kSubXInsn = 0xd1000000u;
68 static constexpr uint32_t kSubsXInsn = 0xf1000000u;
69
Vladimir Marko3f311cf2015-04-02 15:28:45 +010070 // LDUR x2, [sp, #4], i.e. unaligned load crossing 64-bit boundary (assuming aligned sp).
71 static constexpr uint32_t kLdurInsn = 0xf840405fu;
72
Matteo Franchin97e2f262015-04-02 15:49:06 +010073 // LDR w12, <label> and LDR x12, <label>. Bits 5-23 contain label displacement in 4-byte units.
74 static constexpr uint32_t kLdrWPcRelInsn = 0x1800000cu;
75 static constexpr uint32_t kLdrXPcRelInsn = 0x5800000cu;
76
77 // LDR w13, [SP, #<pimm>] and LDR x13, [SP, #<pimm>]. Bits 10-21 contain displacement from SP
78 // in units of 4-bytes (for 32-bit load) or 8-bytes (for 64-bit load).
79 static constexpr uint32_t kLdrWSpRelInsn = 0xb94003edu;
80 static constexpr uint32_t kLdrXSpRelInsn = 0xf94003edu;
81
Vladimir Markof4f2daa2017-03-20 18:26:59 +000082 // CBNZ x17, +0. Bits 5-23 are a placeholder for target offset from PC in units of 4-bytes.
Vladimir Marko66d691d2017-04-07 17:53:39 +010083 static constexpr uint32_t kCbnzIP1Plus0Insn = 0xb5000011u;
Vladimir Markof4f2daa2017-03-20 18:26:59 +000084
85 void InsertInsn(std::vector<uint8_t>* code, size_t pos, uint32_t insn) {
86 CHECK_LE(pos, code->size());
87 const uint8_t insn_code[] = {
88 static_cast<uint8_t>(insn),
89 static_cast<uint8_t>(insn >> 8),
90 static_cast<uint8_t>(insn >> 16),
91 static_cast<uint8_t>(insn >> 24),
92 };
93 static_assert(sizeof(insn_code) == 4u, "Invalid sizeof(insn_code).");
94 code->insert(code->begin() + pos, insn_code, insn_code + sizeof(insn_code));
95 }
96
97 void PushBackInsn(std::vector<uint8_t>* code, uint32_t insn) {
98 InsertInsn(code, code->size(), insn);
99 }
100
101 std::vector<uint8_t> RawCode(std::initializer_list<uint32_t> insns) {
102 std::vector<uint8_t> raw_code;
103 raw_code.reserve(insns.size() * 4u);
104 for (uint32_t insn : insns) {
105 PushBackInsn(&raw_code, insn);
106 }
107 return raw_code;
108 }
109
Vladimir Marko3f311cf2015-04-02 15:28:45 +0100110 uint32_t Create2MethodsWithGap(const ArrayRef<const uint8_t>& method1_code,
Vladimir Markob207e142015-04-02 21:25:21 +0100111 const ArrayRef<const LinkerPatch>& method1_patches,
Vladimir Marko3f311cf2015-04-02 15:28:45 +0100112 const ArrayRef<const uint8_t>& last_method_code,
Vladimir Markob207e142015-04-02 21:25:21 +0100113 const ArrayRef<const LinkerPatch>& last_method_patches,
Vladimir Marko3f311cf2015-04-02 15:28:45 +0100114 uint32_t distance_without_thunks) {
115 CHECK_EQ(distance_without_thunks % kArm64Alignment, 0u);
Vladimir Marko0c737df2016-08-01 16:33:16 +0100116 uint32_t method1_offset =
117 kTrampolineSize + CodeAlignmentSize(kTrampolineSize) + sizeof(OatQuickMethodHeader);
Vladimir Markob207e142015-04-02 21:25:21 +0100118 AddCompiledMethod(MethodRef(1u), method1_code, method1_patches);
Vladimir Marko0c737df2016-08-01 16:33:16 +0100119 const uint32_t gap_start = method1_offset + method1_code.size();
Vladimir Marko3f311cf2015-04-02 15:28:45 +0100120
Vladimir Marko38714e82019-02-07 15:06:30 +0000121 // We want to put the last method at a very precise offset.
Vladimir Marko3f311cf2015-04-02 15:28:45 +0100122 const uint32_t last_method_offset = method1_offset + distance_without_thunks;
Vladimir Marko0c737df2016-08-01 16:33:16 +0100123 CHECK_ALIGNED(last_method_offset, kArm64Alignment);
Vladimir Marko3f311cf2015-04-02 15:28:45 +0100124 const uint32_t gap_end = last_method_offset - sizeof(OatQuickMethodHeader);
Vladimir Marko3f311cf2015-04-02 15:28:45 +0100125
Vladimir Marko0c737df2016-08-01 16:33:16 +0100126 // Fill the gap with intermediate methods in chunks of 2MiB and the first in [2MiB, 4MiB).
Vladimir Marko3f311cf2015-04-02 15:28:45 +0100127 // (This allows deduplicating the small chunks to avoid using 256MiB of memory for +-128MiB
Vladimir Marko0c737df2016-08-01 16:33:16 +0100128 // offsets by this test. Making the first chunk bigger makes it easy to give all intermediate
129 // methods the same alignment of the end, so the thunk insertion adds a predictable size as
130 // long as it's after the first chunk.)
Vladimir Marko3f311cf2015-04-02 15:28:45 +0100131 uint32_t method_idx = 2u;
132 constexpr uint32_t kSmallChunkSize = 2 * MB;
133 std::vector<uint8_t> gap_code;
Vladimir Marko0c737df2016-08-01 16:33:16 +0100134 uint32_t gap_size = gap_end - gap_start;
135 uint32_t num_small_chunks = std::max(gap_size / kSmallChunkSize, 1u) - 1u;
136 uint32_t chunk_start = gap_start;
137 uint32_t chunk_size = gap_size - num_small_chunks * kSmallChunkSize;
138 for (uint32_t i = 0; i <= num_small_chunks; ++i) { // num_small_chunks+1 iterations.
139 uint32_t chunk_code_size =
140 chunk_size - CodeAlignmentSize(chunk_start) - sizeof(OatQuickMethodHeader);
Vladimir Marko3f311cf2015-04-02 15:28:45 +0100141 gap_code.resize(chunk_code_size, 0u);
Vladimir Markof4f2daa2017-03-20 18:26:59 +0000142 AddCompiledMethod(MethodRef(method_idx), ArrayRef<const uint8_t>(gap_code));
Vladimir Marko3f311cf2015-04-02 15:28:45 +0100143 method_idx += 1u;
Vladimir Marko0c737df2016-08-01 16:33:16 +0100144 chunk_start += chunk_size;
145 chunk_size = kSmallChunkSize; // For all but the first chunk.
146 DCHECK_EQ(CodeAlignmentSize(gap_end), CodeAlignmentSize(chunk_start));
Vladimir Marko3f311cf2015-04-02 15:28:45 +0100147 }
Vladimir Marko3f311cf2015-04-02 15:28:45 +0100148
149 // Add the last method and link
150 AddCompiledMethod(MethodRef(method_idx), last_method_code, last_method_patches);
151 Link();
152
153 // Check assumptions.
154 CHECK_EQ(GetMethodOffset(1), method1_offset);
155 auto last_result = method_offset_map_.FindMethodOffset(MethodRef(method_idx));
156 CHECK(last_result.first);
157 // There may be a thunk before method2.
158 if (last_result.second != last_method_offset) {
159 // Thunk present. Check that there's only one.
Vladimir Marko33bff252017-11-01 14:35:42 +0000160 uint32_t thunk_end =
161 CompiledCode::AlignCode(gap_end, InstructionSet::kArm64) + MethodCallThunkSize();
Vladimir Marko0c737df2016-08-01 16:33:16 +0100162 uint32_t header_offset = thunk_end + CodeAlignmentSize(thunk_end);
163 CHECK_EQ(last_result.second, header_offset + sizeof(OatQuickMethodHeader));
Vladimir Marko3f311cf2015-04-02 15:28:45 +0100164 }
165 return method_idx;
166 }
167
168 uint32_t GetMethodOffset(uint32_t method_idx) {
169 auto result = method_offset_map_.FindMethodOffset(MethodRef(method_idx));
170 CHECK(result.first);
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000171 CHECK_ALIGNED(result.second, 4u);
Vladimir Marko3f311cf2015-04-02 15:28:45 +0100172 return result.second;
173 }
174
Vladimir Markoca1e0382018-04-11 09:58:41 +0000175 std::vector<uint8_t> CompileThunk(const LinkerPatch& patch,
176 /*out*/ std::string* debug_name = nullptr) {
177 OptimizingUnitTestHelper helper;
178 HGraph* graph = helper.CreateGraph();
Vladimir Marko33f7c8a2018-11-19 10:22:01 +0000179 CompilerOptions compiler_options;
Artem Serovaa6f4832018-11-21 18:57:54 +0000180
181 // Set isa to arm64.
182 compiler_options.instruction_set_ = instruction_set_;
183 compiler_options.instruction_set_features_ =
184 InstructionSetFeatures::FromBitmap(instruction_set_, instruction_set_features_->AsBitmap());
185 CHECK(compiler_options.instruction_set_features_->Equals(instruction_set_features_.get()));
186
Vladimir Marko33f7c8a2018-11-19 10:22:01 +0000187 arm64::CodeGeneratorARM64 codegen(graph, compiler_options);
Vladimir Markoca1e0382018-04-11 09:58:41 +0000188 ArenaVector<uint8_t> code(helper.GetAllocator()->Adapter());
189 codegen.EmitThunkCode(patch, &code, debug_name);
190 return std::vector<uint8_t>(code.begin(), code.end());
191 }
192
193 void AddCompiledMethod(
194 MethodReference method_ref,
195 const ArrayRef<const uint8_t>& code,
196 const ArrayRef<const LinkerPatch>& patches = ArrayRef<const LinkerPatch>()) {
197 RelativePatcherTest::AddCompiledMethod(method_ref, code, patches);
198
199 // Make sure the ThunkProvider has all the necessary thunks.
200 for (const LinkerPatch& patch : patches) {
Vladimir Markof6675082019-05-17 12:05:28 +0100201 if (patch.GetType() == LinkerPatch::Type::kCallEntrypoint ||
202 patch.GetType() == LinkerPatch::Type::kBakerReadBarrierBranch ||
Vladimir Markoca1e0382018-04-11 09:58:41 +0000203 patch.GetType() == LinkerPatch::Type::kCallRelative) {
204 std::string debug_name;
205 std::vector<uint8_t> thunk_code = CompileThunk(patch, &debug_name);
206 thunk_provider_.SetThunkCode(patch, ArrayRef<const uint8_t>(thunk_code), debug_name);
207 }
208 }
209 }
210
Vladimir Markof4f2daa2017-03-20 18:26:59 +0000211 std::vector<uint8_t> CompileMethodCallThunk() {
Vladimir Markoca1e0382018-04-11 09:58:41 +0000212 LinkerPatch patch = LinkerPatch::RelativeCodePatch(/* literal_offset */ 0u,
213 /* target_dex_file*/ nullptr,
214 /* target_method_idx */ 0u);
215 return CompileThunk(patch);
Vladimir Markof4f2daa2017-03-20 18:26:59 +0000216 }
217
218 uint32_t MethodCallThunkSize() {
219 return CompileMethodCallThunk().size();
Vladimir Marko3f311cf2015-04-02 15:28:45 +0100220 }
221
222 bool CheckThunk(uint32_t thunk_offset) {
Vladimir Markof4f2daa2017-03-20 18:26:59 +0000223 const std::vector<uint8_t> expected_code = CompileMethodCallThunk();
Vladimir Marko3f311cf2015-04-02 15:28:45 +0100224 if (output_.size() < thunk_offset + expected_code.size()) {
225 LOG(ERROR) << "output_.size() == " << output_.size() << " < "
226 << "thunk_offset + expected_code.size() == " << (thunk_offset + expected_code.size());
227 return false;
228 }
229 ArrayRef<const uint8_t> linked_code(&output_[thunk_offset], expected_code.size());
Vladimir Markof4f2daa2017-03-20 18:26:59 +0000230 if (linked_code == ArrayRef<const uint8_t>(expected_code)) {
Vladimir Marko3f311cf2015-04-02 15:28:45 +0100231 return true;
232 }
233 // Log failure info.
Vladimir Markof4f2daa2017-03-20 18:26:59 +0000234 DumpDiff(ArrayRef<const uint8_t>(expected_code), linked_code);
Vladimir Marko3f311cf2015-04-02 15:28:45 +0100235 return false;
236 }
237
Vladimir Markof4f2daa2017-03-20 18:26:59 +0000238 std::vector<uint8_t> GenNops(size_t num_nops) {
239 std::vector<uint8_t> result;
Vladimir Marko66d691d2017-04-07 17:53:39 +0100240 result.reserve(num_nops * 4u);
Vladimir Markof4f2daa2017-03-20 18:26:59 +0000241 for (size_t i = 0; i != num_nops; ++i) {
242 PushBackInsn(&result, kNopInsn);
243 }
244 return result;
245 }
246
Vladimir Marko3f311cf2015-04-02 15:28:45 +0100247 std::vector<uint8_t> GenNopsAndBl(size_t num_nops, uint32_t bl) {
248 std::vector<uint8_t> result;
249 result.reserve(num_nops * 4u + 4u);
250 for (size_t i = 0; i != num_nops; ++i) {
Vladimir Markof4f2daa2017-03-20 18:26:59 +0000251 PushBackInsn(&result, kNopInsn);
Vladimir Marko3f311cf2015-04-02 15:28:45 +0100252 }
Vladimir Markof4f2daa2017-03-20 18:26:59 +0000253 PushBackInsn(&result, bl);
Vladimir Marko3f311cf2015-04-02 15:28:45 +0100254 return result;
255 }
256
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000257 std::vector<uint8_t> GenNopsAndAdrpAndUse(size_t num_nops,
258 uint32_t method_offset,
259 uint32_t target_offset,
260 uint32_t use_insn) {
Vladimir Marko3f311cf2015-04-02 15:28:45 +0100261 std::vector<uint8_t> result;
262 result.reserve(num_nops * 4u + 8u);
263 for (size_t i = 0; i != num_nops; ++i) {
Vladimir Markof4f2daa2017-03-20 18:26:59 +0000264 PushBackInsn(&result, kNopInsn);
Vladimir Marko3f311cf2015-04-02 15:28:45 +0100265 }
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000266 CHECK_ALIGNED(method_offset, 4u);
267 CHECK_ALIGNED(target_offset, 4u);
Vladimir Marko3f311cf2015-04-02 15:28:45 +0100268 uint32_t adrp_offset = method_offset + num_nops * 4u;
269 uint32_t disp = target_offset - (adrp_offset & ~0xfffu);
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000270 if (use_insn == kLdrWInsn) {
271 DCHECK_ALIGNED(disp, 1u << 2);
272 use_insn |= 1 | // LDR x1, [x0, #(imm12 << 2)]
273 ((disp & 0xfffu) << (10 - 2)); // imm12 = ((disp & 0xfffu) >> 2) is at bit 10.
274 } else if (use_insn == kAddXInsn) {
275 use_insn |= 1 | // ADD x1, x0, #imm
276 (disp & 0xfffu) << 10; // imm12 = (disp & 0xfffu) is at bit 10.
277 } else {
278 LOG(FATAL) << "Unexpected instruction: 0x" << std::hex << use_insn;
279 }
Vladimir Marko66d691d2017-04-07 17:53:39 +0100280 uint32_t adrp = 0x90000000u | // ADRP x0, +SignExtend(immhi:immlo:Zeros(12), 64)
Vladimir Marko3f311cf2015-04-02 15:28:45 +0100281 ((disp & 0x3000u) << (29 - 12)) | // immlo = ((disp & 0x3000u) >> 12) is at bit 29,
282 ((disp & 0xffffc000) >> (14 - 5)) | // immhi = (disp >> 14) is at bit 5,
283 // We take the sign bit from the disp, limiting disp to +- 2GiB.
284 ((disp & 0x80000000) >> (31 - 23)); // sign bit in immhi is at bit 23.
Vladimir Markof4f2daa2017-03-20 18:26:59 +0000285 PushBackInsn(&result, adrp);
286 PushBackInsn(&result, use_insn);
Vladimir Marko3f311cf2015-04-02 15:28:45 +0100287 return result;
288 }
289
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000290 std::vector<uint8_t> GenNopsAndAdrpLdr(size_t num_nops,
291 uint32_t method_offset,
292 uint32_t target_offset) {
293 return GenNopsAndAdrpAndUse(num_nops, method_offset, target_offset, kLdrWInsn);
294 }
295
Vladimir Marko5f078202017-05-18 13:32:53 +0100296 void TestNopsAdrpLdr(size_t num_nops, uint32_t bss_begin, uint32_t string_entry_offset) {
297 constexpr uint32_t kStringIndex = 1u;
298 string_index_to_offset_map_.Put(kStringIndex, string_entry_offset);
299 bss_begin_ = bss_begin;
Vladimir Marko3f311cf2015-04-02 15:28:45 +0100300 auto code = GenNopsAndAdrpLdr(num_nops, 0u, 0u); // Unpatched.
Vladimir Markof4f2daa2017-03-20 18:26:59 +0000301 const LinkerPatch patches[] = {
Vladimir Marko5f078202017-05-18 13:32:53 +0100302 LinkerPatch::StringBssEntryPatch(num_nops * 4u , nullptr, num_nops * 4u, kStringIndex),
303 LinkerPatch::StringBssEntryPatch(num_nops * 4u + 4u, nullptr, num_nops * 4u, kStringIndex),
Vladimir Marko3f311cf2015-04-02 15:28:45 +0100304 };
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000305 AddCompiledMethod(MethodRef(1u),
306 ArrayRef<const uint8_t>(code),
Vladimir Markob207e142015-04-02 21:25:21 +0100307 ArrayRef<const LinkerPatch>(patches));
Vladimir Marko3f311cf2015-04-02 15:28:45 +0100308 Link();
309
310 uint32_t method1_offset = GetMethodOffset(1u);
Vladimir Marko5f078202017-05-18 13:32:53 +0100311 uint32_t target_offset = bss_begin_ + string_entry_offset;
Vladimir Marko3f311cf2015-04-02 15:28:45 +0100312 auto expected_code = GenNopsAndAdrpLdr(num_nops, method1_offset, target_offset);
313 EXPECT_TRUE(CheckLinkedMethod(MethodRef(1u), ArrayRef<const uint8_t>(expected_code)));
314 }
315
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000316 std::vector<uint8_t> GenNopsAndAdrpAdd(size_t num_nops,
317 uint32_t method_offset,
318 uint32_t target_offset) {
319 return GenNopsAndAdrpAndUse(num_nops, method_offset, target_offset, kAddXInsn);
320 }
321
322 void TestNopsAdrpAdd(size_t num_nops, uint32_t string_offset) {
323 constexpr uint32_t kStringIndex = 1u;
324 string_index_to_offset_map_.Put(kStringIndex, string_offset);
325 auto code = GenNopsAndAdrpAdd(num_nops, 0u, 0u); // Unpatched.
Vladimir Markof4f2daa2017-03-20 18:26:59 +0000326 const LinkerPatch patches[] = {
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000327 LinkerPatch::RelativeStringPatch(num_nops * 4u , nullptr, num_nops * 4u, kStringIndex),
328 LinkerPatch::RelativeStringPatch(num_nops * 4u + 4u, nullptr, num_nops * 4u, kStringIndex),
329 };
330 AddCompiledMethod(MethodRef(1u),
331 ArrayRef<const uint8_t>(code),
332 ArrayRef<const LinkerPatch>(patches));
333 Link();
334
335 uint32_t method1_offset = GetMethodOffset(1u);
336 auto expected_code = GenNopsAndAdrpAdd(num_nops, method1_offset, string_offset);
337 EXPECT_TRUE(CheckLinkedMethod(MethodRef(1u), ArrayRef<const uint8_t>(expected_code)));
338 }
339
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000340 void PrepareNopsAdrpInsn2Ldr(size_t num_nops,
341 uint32_t insn2,
Vladimir Marko5f078202017-05-18 13:32:53 +0100342 uint32_t bss_begin,
343 uint32_t string_entry_offset) {
344 constexpr uint32_t kStringIndex = 1u;
345 string_index_to_offset_map_.Put(kStringIndex, string_entry_offset);
346 bss_begin_ = bss_begin;
Vladimir Marko3f311cf2015-04-02 15:28:45 +0100347 auto code = GenNopsAndAdrpLdr(num_nops, 0u, 0u); // Unpatched.
348 InsertInsn(&code, num_nops * 4u + 4u, insn2);
Vladimir Markof4f2daa2017-03-20 18:26:59 +0000349 const LinkerPatch patches[] = {
Vladimir Marko5f078202017-05-18 13:32:53 +0100350 LinkerPatch::StringBssEntryPatch(num_nops * 4u , nullptr, num_nops * 4u, kStringIndex),
351 LinkerPatch::StringBssEntryPatch(num_nops * 4u + 8u, nullptr, num_nops * 4u, kStringIndex),
Vladimir Marko3f311cf2015-04-02 15:28:45 +0100352 };
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000353 AddCompiledMethod(MethodRef(1u),
354 ArrayRef<const uint8_t>(code),
Vladimir Markob207e142015-04-02 21:25:21 +0100355 ArrayRef<const LinkerPatch>(patches));
Vladimir Marko3f311cf2015-04-02 15:28:45 +0100356 Link();
357 }
358
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000359 void PrepareNopsAdrpInsn2Add(size_t num_nops, uint32_t insn2, uint32_t string_offset) {
360 constexpr uint32_t kStringIndex = 1u;
361 string_index_to_offset_map_.Put(kStringIndex, string_offset);
362 auto code = GenNopsAndAdrpAdd(num_nops, 0u, 0u); // Unpatched.
363 InsertInsn(&code, num_nops * 4u + 4u, insn2);
Vladimir Markof4f2daa2017-03-20 18:26:59 +0000364 const LinkerPatch patches[] = {
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000365 LinkerPatch::RelativeStringPatch(num_nops * 4u , nullptr, num_nops * 4u, kStringIndex),
366 LinkerPatch::RelativeStringPatch(num_nops * 4u + 8u, nullptr, num_nops * 4u, kStringIndex),
367 };
368 AddCompiledMethod(MethodRef(1u),
369 ArrayRef<const uint8_t>(code),
370 ArrayRef<const LinkerPatch>(patches));
371 Link();
372 }
Vladimir Marko3f311cf2015-04-02 15:28:45 +0100373
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000374 void TestNopsAdrpInsn2AndUse(size_t num_nops,
375 uint32_t insn2,
376 uint32_t target_offset,
377 uint32_t use_insn) {
Vladimir Marko3f311cf2015-04-02 15:28:45 +0100378 uint32_t method1_offset = GetMethodOffset(1u);
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000379 auto expected_code = GenNopsAndAdrpAndUse(num_nops, method1_offset, target_offset, use_insn);
Vladimir Marko3f311cf2015-04-02 15:28:45 +0100380 InsertInsn(&expected_code, num_nops * 4u + 4u, insn2);
381 EXPECT_TRUE(CheckLinkedMethod(MethodRef(1u), ArrayRef<const uint8_t>(expected_code)));
382 }
383
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000384 void TestNopsAdrpInsn2AndUseHasThunk(size_t num_nops,
385 uint32_t insn2,
386 uint32_t target_offset,
387 uint32_t use_insn) {
Vladimir Marko3f311cf2015-04-02 15:28:45 +0100388 uint32_t method1_offset = GetMethodOffset(1u);
389 CHECK(!compiled_method_refs_.empty());
Mathieu Chartierfc8b4222017-09-17 13:44:24 -0700390 CHECK_EQ(compiled_method_refs_[0].index, 1u);
Vladimir Marko3f311cf2015-04-02 15:28:45 +0100391 CHECK_EQ(compiled_method_refs_.size(), compiled_methods_.size());
Vladimir Marko35831e82015-09-11 11:59:18 +0100392 uint32_t method1_size = compiled_methods_[0]->GetQuickCode().size();
Vladimir Marko33bff252017-11-01 14:35:42 +0000393 uint32_t thunk_offset =
394 CompiledCode::AlignCode(method1_offset + method1_size, InstructionSet::kArm64);
Vladimir Marko3f311cf2015-04-02 15:28:45 +0100395 uint32_t b_diff = thunk_offset - (method1_offset + num_nops * 4u);
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000396 CHECK_ALIGNED(b_diff, 4u);
Vladimir Marko3f311cf2015-04-02 15:28:45 +0100397 ASSERT_LT(b_diff, 128 * MB);
398 uint32_t b_out = kBPlus0 + ((b_diff >> 2) & 0x03ffffffu);
399 uint32_t b_in = kBPlus0 + ((-b_diff >> 2) & 0x03ffffffu);
400
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000401 auto expected_code = GenNopsAndAdrpAndUse(num_nops, method1_offset, target_offset, use_insn);
Vladimir Marko3f311cf2015-04-02 15:28:45 +0100402 InsertInsn(&expected_code, num_nops * 4u + 4u, insn2);
403 // Replace adrp with bl.
404 expected_code.erase(expected_code.begin() + num_nops * 4u,
405 expected_code.begin() + num_nops * 4u + 4u);
406 InsertInsn(&expected_code, num_nops * 4u, b_out);
407 EXPECT_TRUE(CheckLinkedMethod(MethodRef(1u), ArrayRef<const uint8_t>(expected_code)));
408
409 auto expected_thunk_code = GenNopsAndAdrpLdr(0u, thunk_offset, target_offset);
410 ASSERT_EQ(expected_thunk_code.size(), 8u);
411 expected_thunk_code.erase(expected_thunk_code.begin() + 4u, expected_thunk_code.begin() + 8u);
412 InsertInsn(&expected_thunk_code, 4u, b_in);
413 ASSERT_EQ(expected_thunk_code.size(), 8u);
414
Vladimir Markof4f2daa2017-03-20 18:26:59 +0000415 uint32_t thunk_size = MethodCallThunkSize();
Vladimir Marko3f311cf2015-04-02 15:28:45 +0100416 ASSERT_EQ(thunk_offset + thunk_size, output_.size());
417 ASSERT_EQ(thunk_size, expected_thunk_code.size());
418 ArrayRef<const uint8_t> thunk_code(&output_[thunk_offset], thunk_size);
419 if (ArrayRef<const uint8_t>(expected_thunk_code) != thunk_code) {
420 DumpDiff(ArrayRef<const uint8_t>(expected_thunk_code), thunk_code);
421 FAIL();
422 }
423 }
424
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000425 void TestAdrpInsn2Ldr(uint32_t insn2,
426 uint32_t adrp_offset,
427 bool has_thunk,
Vladimir Marko5f078202017-05-18 13:32:53 +0100428 uint32_t bss_begin,
429 uint32_t string_entry_offset) {
Vladimir Marko3f311cf2015-04-02 15:28:45 +0100430 uint32_t method1_offset =
Vladimir Marko0c737df2016-08-01 16:33:16 +0100431 kTrampolineSize + CodeAlignmentSize(kTrampolineSize) + sizeof(OatQuickMethodHeader);
Vladimir Marko3f311cf2015-04-02 15:28:45 +0100432 ASSERT_LT(method1_offset, adrp_offset);
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000433 CHECK_ALIGNED(adrp_offset, 4u);
Vladimir Marko3f311cf2015-04-02 15:28:45 +0100434 uint32_t num_nops = (adrp_offset - method1_offset) / 4u;
Vladimir Marko5f078202017-05-18 13:32:53 +0100435 PrepareNopsAdrpInsn2Ldr(num_nops, insn2, bss_begin, string_entry_offset);
436 uint32_t target_offset = bss_begin_ + string_entry_offset;
Vladimir Marko3f311cf2015-04-02 15:28:45 +0100437 if (has_thunk) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000438 TestNopsAdrpInsn2AndUseHasThunk(num_nops, insn2, target_offset, kLdrWInsn);
Vladimir Marko3f311cf2015-04-02 15:28:45 +0100439 } else {
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000440 TestNopsAdrpInsn2AndUse(num_nops, insn2, target_offset, kLdrWInsn);
Vladimir Marko3f311cf2015-04-02 15:28:45 +0100441 }
442 ASSERT_EQ(method1_offset, GetMethodOffset(1u)); // If this fails, num_nops is wrong.
443 }
Matteo Franchin97e2f262015-04-02 15:49:06 +0100444
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000445 void TestAdrpLdurLdr(uint32_t adrp_offset,
446 bool has_thunk,
Vladimir Marko5f078202017-05-18 13:32:53 +0100447 uint32_t bss_begin,
448 uint32_t string_entry_offset) {
449 TestAdrpInsn2Ldr(kLdurInsn, adrp_offset, has_thunk, bss_begin, string_entry_offset);
Matteo Franchin97e2f262015-04-02 15:49:06 +0100450 }
451
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000452 void TestAdrpLdrPcRelLdr(uint32_t pcrel_ldr_insn,
453 int32_t pcrel_disp,
454 uint32_t adrp_offset,
455 bool has_thunk,
Vladimir Marko5f078202017-05-18 13:32:53 +0100456 uint32_t bss_begin,
457 uint32_t string_entry_offset) {
Matteo Franchin97e2f262015-04-02 15:49:06 +0100458 ASSERT_LT(pcrel_disp, 0x100000);
459 ASSERT_GE(pcrel_disp, -0x100000);
460 ASSERT_EQ(pcrel_disp & 0x3, 0);
461 uint32_t insn2 = pcrel_ldr_insn | (((static_cast<uint32_t>(pcrel_disp) >> 2) & 0x7ffffu) << 5);
Vladimir Marko5f078202017-05-18 13:32:53 +0100462 TestAdrpInsn2Ldr(insn2, adrp_offset, has_thunk, bss_begin, string_entry_offset);
Matteo Franchin97e2f262015-04-02 15:49:06 +0100463 }
464
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000465 void TestAdrpLdrSpRelLdr(uint32_t sprel_ldr_insn,
466 uint32_t sprel_disp_in_load_units,
467 uint32_t adrp_offset,
468 bool has_thunk,
Vladimir Marko5f078202017-05-18 13:32:53 +0100469 uint32_t bss_begin,
470 uint32_t string_entry_offset) {
Matteo Franchin97e2f262015-04-02 15:49:06 +0100471 ASSERT_LT(sprel_disp_in_load_units, 0x1000u);
472 uint32_t insn2 = sprel_ldr_insn | ((sprel_disp_in_load_units & 0xfffu) << 10);
Vladimir Marko5f078202017-05-18 13:32:53 +0100473 TestAdrpInsn2Ldr(insn2, adrp_offset, has_thunk, bss_begin, string_entry_offset);
Matteo Franchin97e2f262015-04-02 15:49:06 +0100474 }
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000475
476 void TestAdrpInsn2Add(uint32_t insn2,
477 uint32_t adrp_offset,
478 bool has_thunk,
479 uint32_t string_offset) {
480 uint32_t method1_offset =
Vladimir Marko0c737df2016-08-01 16:33:16 +0100481 kTrampolineSize + CodeAlignmentSize(kTrampolineSize) + sizeof(OatQuickMethodHeader);
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000482 ASSERT_LT(method1_offset, adrp_offset);
483 CHECK_ALIGNED(adrp_offset, 4u);
484 uint32_t num_nops = (adrp_offset - method1_offset) / 4u;
485 PrepareNopsAdrpInsn2Add(num_nops, insn2, string_offset);
486 if (has_thunk) {
487 TestNopsAdrpInsn2AndUseHasThunk(num_nops, insn2, string_offset, kAddXInsn);
488 } else {
489 TestNopsAdrpInsn2AndUse(num_nops, insn2, string_offset, kAddXInsn);
490 }
491 ASSERT_EQ(method1_offset, GetMethodOffset(1u)); // If this fails, num_nops is wrong.
492 }
493
494 void TestAdrpLdurAdd(uint32_t adrp_offset, bool has_thunk, uint32_t string_offset) {
495 TestAdrpInsn2Add(kLdurInsn, adrp_offset, has_thunk, string_offset);
496 }
497
498 void TestAdrpLdrPcRelAdd(uint32_t pcrel_ldr_insn,
499 int32_t pcrel_disp,
500 uint32_t adrp_offset,
501 bool has_thunk,
502 uint32_t string_offset) {
503 ASSERT_LT(pcrel_disp, 0x100000);
504 ASSERT_GE(pcrel_disp, -0x100000);
505 ASSERT_EQ(pcrel_disp & 0x3, 0);
506 uint32_t insn2 = pcrel_ldr_insn | (((static_cast<uint32_t>(pcrel_disp) >> 2) & 0x7ffffu) << 5);
507 TestAdrpInsn2Add(insn2, adrp_offset, has_thunk, string_offset);
508 }
509
510 void TestAdrpLdrSpRelAdd(uint32_t sprel_ldr_insn,
511 uint32_t sprel_disp_in_load_units,
512 uint32_t adrp_offset,
513 bool has_thunk,
514 uint32_t string_offset) {
515 ASSERT_LT(sprel_disp_in_load_units, 0x1000u);
516 uint32_t insn2 = sprel_ldr_insn | ((sprel_disp_in_load_units & 0xfffu) << 10);
517 TestAdrpInsn2Add(insn2, adrp_offset, has_thunk, string_offset);
518 }
Vladimir Markof4f2daa2017-03-20 18:26:59 +0000519
Vladimir Markoca1e0382018-04-11 09:58:41 +0000520 static uint32_t EncodeBakerReadBarrierFieldData(uint32_t base_reg, uint32_t holder_reg) {
521 return arm64::CodeGeneratorARM64::EncodeBakerReadBarrierFieldData(base_reg, holder_reg);
522 }
523
524 static uint32_t EncodeBakerReadBarrierArrayData(uint32_t base_reg) {
525 return arm64::CodeGeneratorARM64::EncodeBakerReadBarrierArrayData(base_reg);
526 }
527
528 static uint32_t EncodeBakerReadBarrierGcRootData(uint32_t root_reg) {
529 return arm64::CodeGeneratorARM64::EncodeBakerReadBarrierGcRootData(root_reg);
530 }
531
Vladimir Markof4f2daa2017-03-20 18:26:59 +0000532 std::vector<uint8_t> CompileBakerOffsetThunk(uint32_t base_reg, uint32_t holder_reg) {
533 const LinkerPatch patch = LinkerPatch::BakerReadBarrierBranchPatch(
Vladimir Markoca1e0382018-04-11 09:58:41 +0000534 /* literal_offset */ 0u, EncodeBakerReadBarrierFieldData(base_reg, holder_reg));
535 return CompileThunk(patch);
Vladimir Markof4f2daa2017-03-20 18:26:59 +0000536 }
537
Vladimir Marko66d691d2017-04-07 17:53:39 +0100538 std::vector<uint8_t> CompileBakerArrayThunk(uint32_t base_reg) {
539 LinkerPatch patch = LinkerPatch::BakerReadBarrierBranchPatch(
Vladimir Markoca1e0382018-04-11 09:58:41 +0000540 /* literal_offset */ 0u, EncodeBakerReadBarrierArrayData(base_reg));
541 return CompileThunk(patch);
Vladimir Marko66d691d2017-04-07 17:53:39 +0100542 }
543
Vladimir Markof4f2daa2017-03-20 18:26:59 +0000544 std::vector<uint8_t> CompileBakerGcRootThunk(uint32_t root_reg) {
545 LinkerPatch patch = LinkerPatch::BakerReadBarrierBranchPatch(
Vladimir Markoca1e0382018-04-11 09:58:41 +0000546 /* literal_offset */ 0u, EncodeBakerReadBarrierGcRootData(root_reg));
547 return CompileThunk(patch);
Vladimir Markof4f2daa2017-03-20 18:26:59 +0000548 }
549
550 uint32_t GetOutputInsn(uint32_t offset) {
551 CHECK_LE(offset, output_.size());
552 CHECK_GE(output_.size() - offset, 4u);
553 return (static_cast<uint32_t>(output_[offset]) << 0) |
554 (static_cast<uint32_t>(output_[offset + 1]) << 8) |
555 (static_cast<uint32_t>(output_[offset + 2]) << 16) |
556 (static_cast<uint32_t>(output_[offset + 3]) << 24);
557 }
558
Vladimir Marko66d691d2017-04-07 17:53:39 +0100559 void TestBakerField(uint32_t offset, uint32_t ref_reg);
Vladimir Marko3f311cf2015-04-02 15:28:45 +0100560};
561
562const uint8_t Arm64RelativePatcherTest::kCallRawCode[] = {
563 0x00, 0x00, 0x00, 0x94
564};
565
566const ArrayRef<const uint8_t> Arm64RelativePatcherTest::kCallCode(kCallRawCode);
567
568const uint8_t Arm64RelativePatcherTest::kNopRawCode[] = {
569 0x1f, 0x20, 0x03, 0xd5
570};
571
572const ArrayRef<const uint8_t> Arm64RelativePatcherTest::kNopCode(kNopRawCode);
573
574class Arm64RelativePatcherTestDefault : public Arm64RelativePatcherTest {
575 public:
576 Arm64RelativePatcherTestDefault() : Arm64RelativePatcherTest("default") { }
577};
578
Vladimir Marko3f311cf2015-04-02 15:28:45 +0100579TEST_F(Arm64RelativePatcherTestDefault, CallSelf) {
Vladimir Markof4f2daa2017-03-20 18:26:59 +0000580 const LinkerPatch patches[] = {
Vladimir Marko3f311cf2015-04-02 15:28:45 +0100581 LinkerPatch::RelativeCodePatch(0u, nullptr, 1u),
582 };
Vladimir Markob207e142015-04-02 21:25:21 +0100583 AddCompiledMethod(MethodRef(1u), kCallCode, ArrayRef<const LinkerPatch>(patches));
Vladimir Marko3f311cf2015-04-02 15:28:45 +0100584 Link();
585
Vladimir Markof4f2daa2017-03-20 18:26:59 +0000586 const std::vector<uint8_t> expected_code = RawCode({kBlPlus0});
Vladimir Marko3f311cf2015-04-02 15:28:45 +0100587 EXPECT_TRUE(CheckLinkedMethod(MethodRef(1u), ArrayRef<const uint8_t>(expected_code)));
588}
589
590TEST_F(Arm64RelativePatcherTestDefault, CallOther) {
Vladimir Markof4f2daa2017-03-20 18:26:59 +0000591 const LinkerPatch method1_patches[] = {
Vladimir Marko3f311cf2015-04-02 15:28:45 +0100592 LinkerPatch::RelativeCodePatch(0u, nullptr, 2u),
593 };
Vladimir Markob207e142015-04-02 21:25:21 +0100594 AddCompiledMethod(MethodRef(1u), kCallCode, ArrayRef<const LinkerPatch>(method1_patches));
Vladimir Markof4f2daa2017-03-20 18:26:59 +0000595 const LinkerPatch method2_patches[] = {
Vladimir Marko3f311cf2015-04-02 15:28:45 +0100596 LinkerPatch::RelativeCodePatch(0u, nullptr, 1u),
597 };
Vladimir Markob207e142015-04-02 21:25:21 +0100598 AddCompiledMethod(MethodRef(2u), kCallCode, ArrayRef<const LinkerPatch>(method2_patches));
Vladimir Marko3f311cf2015-04-02 15:28:45 +0100599 Link();
600
601 uint32_t method1_offset = GetMethodOffset(1u);
602 uint32_t method2_offset = GetMethodOffset(2u);
603 uint32_t diff_after = method2_offset - method1_offset;
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000604 CHECK_ALIGNED(diff_after, 4u);
Vladimir Marko3f311cf2015-04-02 15:28:45 +0100605 ASSERT_LT(diff_after >> 2, 1u << 8); // Simple encoding, (diff_after >> 2) fits into 8 bits.
Vladimir Markof4f2daa2017-03-20 18:26:59 +0000606 const std::vector<uint8_t> method1_expected_code = RawCode({kBlPlus0 + (diff_after >> 2)});
Vladimir Marko3f311cf2015-04-02 15:28:45 +0100607 EXPECT_TRUE(CheckLinkedMethod(MethodRef(1u), ArrayRef<const uint8_t>(method1_expected_code)));
608 uint32_t diff_before = method1_offset - method2_offset;
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000609 CHECK_ALIGNED(diff_before, 4u);
Vladimir Marko3f311cf2015-04-02 15:28:45 +0100610 ASSERT_GE(diff_before, -1u << 27);
611 auto method2_expected_code = GenNopsAndBl(0u, kBlPlus0 | ((diff_before >> 2) & 0x03ffffffu));
612 EXPECT_TRUE(CheckLinkedMethod(MethodRef(2u), ArrayRef<const uint8_t>(method2_expected_code)));
613}
614
615TEST_F(Arm64RelativePatcherTestDefault, CallTrampoline) {
Vladimir Markof4f2daa2017-03-20 18:26:59 +0000616 const LinkerPatch patches[] = {
Vladimir Marko3f311cf2015-04-02 15:28:45 +0100617 LinkerPatch::RelativeCodePatch(0u, nullptr, 2u),
618 };
Vladimir Markob207e142015-04-02 21:25:21 +0100619 AddCompiledMethod(MethodRef(1u), kCallCode, ArrayRef<const LinkerPatch>(patches));
Vladimir Marko3f311cf2015-04-02 15:28:45 +0100620 Link();
621
622 uint32_t method1_offset = GetMethodOffset(1u);
623 uint32_t diff = kTrampolineOffset - method1_offset;
624 ASSERT_EQ(diff & 1u, 0u);
625 ASSERT_GE(diff, -1u << 9); // Simple encoding, -256 <= (diff >> 1) < 0 (checked as unsigned).
626 auto expected_code = GenNopsAndBl(0u, kBlPlus0 | ((diff >> 2) & 0x03ffffffu));
627 EXPECT_TRUE(CheckLinkedMethod(MethodRef(1u), ArrayRef<const uint8_t>(expected_code)));
628}
629
Vladimir Markod1eaf0d2015-10-29 12:18:29 +0000630TEST_F(Arm64RelativePatcherTestDefault, CallTrampolineTooFar) {
631 constexpr uint32_t missing_method_index = 1024u;
632 auto last_method_raw_code = GenNopsAndBl(1u, kBlPlus0);
633 constexpr uint32_t bl_offset_in_last_method = 1u * 4u; // After NOPs.
634 ArrayRef<const uint8_t> last_method_code(last_method_raw_code);
635 ASSERT_EQ(bl_offset_in_last_method + 4u, last_method_code.size());
Vladimir Markof4f2daa2017-03-20 18:26:59 +0000636 const LinkerPatch last_method_patches[] = {
Vladimir Markod1eaf0d2015-10-29 12:18:29 +0000637 LinkerPatch::RelativeCodePatch(bl_offset_in_last_method, nullptr, missing_method_index),
638 };
639
640 constexpr uint32_t just_over_max_negative_disp = 128 * MB + 4;
641 uint32_t last_method_idx = Create2MethodsWithGap(
Vladimir Marko38714e82019-02-07 15:06:30 +0000642 kNopCode,
643 ArrayRef<const LinkerPatch>(),
644 last_method_code,
Vladimir Markod1eaf0d2015-10-29 12:18:29 +0000645 ArrayRef<const LinkerPatch>(last_method_patches),
646 just_over_max_negative_disp - bl_offset_in_last_method);
647 uint32_t method1_offset = GetMethodOffset(1u);
648 uint32_t last_method_offset = GetMethodOffset(last_method_idx);
649 ASSERT_EQ(method1_offset,
650 last_method_offset + bl_offset_in_last_method - just_over_max_negative_disp);
651 ASSERT_FALSE(method_offset_map_.FindMethodOffset(MethodRef(missing_method_index)).first);
652
653 // Check linked code.
654 uint32_t thunk_offset =
Vladimir Marko33bff252017-11-01 14:35:42 +0000655 CompiledCode::AlignCode(last_method_offset + last_method_code.size(), InstructionSet::kArm64);
Vladimir Markod1eaf0d2015-10-29 12:18:29 +0000656 uint32_t diff = thunk_offset - (last_method_offset + bl_offset_in_last_method);
Vladimir Marko38714e82019-02-07 15:06:30 +0000657 ASSERT_TRUE(IsAligned<4u>(diff));
Vladimir Markod1eaf0d2015-10-29 12:18:29 +0000658 ASSERT_LT(diff, 128 * MB);
659 auto expected_code = GenNopsAndBl(1u, kBlPlus0 | (diff >> 2));
660 EXPECT_TRUE(CheckLinkedMethod(MethodRef(last_method_idx),
661 ArrayRef<const uint8_t>(expected_code)));
662 EXPECT_TRUE(CheckThunk(thunk_offset));
663}
664
Vladimir Marko3f311cf2015-04-02 15:28:45 +0100665TEST_F(Arm64RelativePatcherTestDefault, CallOtherAlmostTooFarAfter) {
666 auto method1_raw_code = GenNopsAndBl(1u, kBlPlus0);
667 constexpr uint32_t bl_offset_in_method1 = 1u * 4u; // After NOPs.
668 ArrayRef<const uint8_t> method1_code(method1_raw_code);
669 ASSERT_EQ(bl_offset_in_method1 + 4u, method1_code.size());
Vladimir Marko38714e82019-02-07 15:06:30 +0000670 const uint32_t kExpectedLastMethodIdx = 65u; // Based on 2MiB chunks in Create2MethodsWithGap().
Vladimir Markof4f2daa2017-03-20 18:26:59 +0000671 const LinkerPatch method1_patches[] = {
Vladimir Marko38714e82019-02-07 15:06:30 +0000672 LinkerPatch::RelativeCodePatch(bl_offset_in_method1, nullptr, kExpectedLastMethodIdx),
Vladimir Marko3f311cf2015-04-02 15:28:45 +0100673 };
674
675 constexpr uint32_t max_positive_disp = 128 * MB - 4u;
Vladimir Marko345f93e2015-07-14 18:58:59 +0100676 uint32_t last_method_idx = Create2MethodsWithGap(method1_code,
677 ArrayRef<const LinkerPatch>(method1_patches),
678 kNopCode,
679 ArrayRef<const LinkerPatch>(),
Vladimir Marko3f311cf2015-04-02 15:28:45 +0100680 bl_offset_in_method1 + max_positive_disp);
Vladimir Marko38714e82019-02-07 15:06:30 +0000681 ASSERT_EQ(kExpectedLastMethodIdx, last_method_idx);
Vladimir Marko3f311cf2015-04-02 15:28:45 +0100682
683 uint32_t method1_offset = GetMethodOffset(1u);
684 uint32_t last_method_offset = GetMethodOffset(last_method_idx);
685 ASSERT_EQ(method1_offset + bl_offset_in_method1 + max_positive_disp, last_method_offset);
686
687 // Check linked code.
688 auto expected_code = GenNopsAndBl(1u, kBlPlusMax);
689 EXPECT_TRUE(CheckLinkedMethod(MethodRef(1u), ArrayRef<const uint8_t>(expected_code)));
690}
691
692TEST_F(Arm64RelativePatcherTestDefault, CallOtherAlmostTooFarBefore) {
693 auto last_method_raw_code = GenNopsAndBl(0u, kBlPlus0);
694 constexpr uint32_t bl_offset_in_last_method = 0u * 4u; // After NOPs.
695 ArrayRef<const uint8_t> last_method_code(last_method_raw_code);
696 ASSERT_EQ(bl_offset_in_last_method + 4u, last_method_code.size());
Vladimir Markof4f2daa2017-03-20 18:26:59 +0000697 const LinkerPatch last_method_patches[] = {
Vladimir Marko3f311cf2015-04-02 15:28:45 +0100698 LinkerPatch::RelativeCodePatch(bl_offset_in_last_method, nullptr, 1u),
699 };
700
701 constexpr uint32_t max_negative_disp = 128 * MB;
Vladimir Marko345f93e2015-07-14 18:58:59 +0100702 uint32_t last_method_idx = Create2MethodsWithGap(kNopCode,
703 ArrayRef<const LinkerPatch>(),
704 last_method_code,
705 ArrayRef<const LinkerPatch>(last_method_patches),
Vladimir Marko3f311cf2015-04-02 15:28:45 +0100706 max_negative_disp - bl_offset_in_last_method);
707 uint32_t method1_offset = GetMethodOffset(1u);
708 uint32_t last_method_offset = GetMethodOffset(last_method_idx);
709 ASSERT_EQ(method1_offset, last_method_offset + bl_offset_in_last_method - max_negative_disp);
710
711 // Check linked code.
712 auto expected_code = GenNopsAndBl(0u, kBlMinusMax);
713 EXPECT_TRUE(CheckLinkedMethod(MethodRef(last_method_idx),
714 ArrayRef<const uint8_t>(expected_code)));
715}
716
717TEST_F(Arm64RelativePatcherTestDefault, CallOtherJustTooFarAfter) {
718 auto method1_raw_code = GenNopsAndBl(0u, kBlPlus0);
719 constexpr uint32_t bl_offset_in_method1 = 0u * 4u; // After NOPs.
720 ArrayRef<const uint8_t> method1_code(method1_raw_code);
721 ASSERT_EQ(bl_offset_in_method1 + 4u, method1_code.size());
Vladimir Marko38714e82019-02-07 15:06:30 +0000722 const uint32_t kExpectedLastMethodIdx = 65u; // Based on 2MiB chunks in Create2MethodsWithGap().
Vladimir Markof4f2daa2017-03-20 18:26:59 +0000723 const LinkerPatch method1_patches[] = {
Vladimir Marko38714e82019-02-07 15:06:30 +0000724 LinkerPatch::RelativeCodePatch(bl_offset_in_method1, nullptr, kExpectedLastMethodIdx),
Vladimir Marko3f311cf2015-04-02 15:28:45 +0100725 };
726
727 constexpr uint32_t just_over_max_positive_disp = 128 * MB;
728 uint32_t last_method_idx = Create2MethodsWithGap(
Vladimir Marko345f93e2015-07-14 18:58:59 +0100729 method1_code,
730 ArrayRef<const LinkerPatch>(method1_patches),
731 kNopCode,
732 ArrayRef<const LinkerPatch>(),
Vladimir Marko3f311cf2015-04-02 15:28:45 +0100733 bl_offset_in_method1 + just_over_max_positive_disp);
Vladimir Marko38714e82019-02-07 15:06:30 +0000734 ASSERT_EQ(kExpectedLastMethodIdx, last_method_idx);
Vladimir Markodda4e8b2018-07-27 14:01:16 +0100735 uint32_t method_after_thunk_idx = last_method_idx;
736 if (sizeof(OatQuickMethodHeader) < kArm64Alignment) {
737 // The thunk needs to start on a kArm64Alignment-aligned address before the address where the
738 // last method would have been if there was no thunk. If the size of the OatQuickMethodHeader
739 // is at least kArm64Alignment, the thunk start shall fit between the previous filler method
740 // and that address. Otherwise, it shall be inserted before that filler method.
741 method_after_thunk_idx -= 1u;
742 }
Vladimir Marko3f311cf2015-04-02 15:28:45 +0100743
744 uint32_t method1_offset = GetMethodOffset(1u);
Vladimir Markodda4e8b2018-07-27 14:01:16 +0100745 uint32_t method_after_thunk_offset = GetMethodOffset(method_after_thunk_idx);
746 ASSERT_TRUE(IsAligned<kArm64Alignment>(method_after_thunk_offset));
747 uint32_t method_after_thunk_header_offset =
748 method_after_thunk_offset - sizeof(OatQuickMethodHeader);
Vladimir Markof4f2daa2017-03-20 18:26:59 +0000749 uint32_t thunk_size = MethodCallThunkSize();
Vladimir Markodda4e8b2018-07-27 14:01:16 +0100750 uint32_t thunk_offset = RoundDown(method_after_thunk_header_offset - thunk_size, kArm64Alignment);
Vladimir Markof4f2daa2017-03-20 18:26:59 +0000751 DCHECK_EQ(thunk_offset + thunk_size + CodeAlignmentSize(thunk_offset + thunk_size),
Vladimir Markodda4e8b2018-07-27 14:01:16 +0100752 method_after_thunk_header_offset);
Vladimir Marko38714e82019-02-07 15:06:30 +0000753 ASSERT_TRUE(IsAligned<kArm64Alignment>(thunk_offset));
Vladimir Marko3f311cf2015-04-02 15:28:45 +0100754 uint32_t diff = thunk_offset - (method1_offset + bl_offset_in_method1);
Vladimir Marko38714e82019-02-07 15:06:30 +0000755 ASSERT_TRUE(IsAligned<4u>(diff));
Vladimir Marko3f311cf2015-04-02 15:28:45 +0100756 ASSERT_LT(diff, 128 * MB);
757 auto expected_code = GenNopsAndBl(0u, kBlPlus0 | (diff >> 2));
758 EXPECT_TRUE(CheckLinkedMethod(MethodRef(1u), ArrayRef<const uint8_t>(expected_code)));
759 CheckThunk(thunk_offset);
760}
761
762TEST_F(Arm64RelativePatcherTestDefault, CallOtherJustTooFarBefore) {
763 auto last_method_raw_code = GenNopsAndBl(1u, kBlPlus0);
764 constexpr uint32_t bl_offset_in_last_method = 1u * 4u; // After NOPs.
765 ArrayRef<const uint8_t> last_method_code(last_method_raw_code);
766 ASSERT_EQ(bl_offset_in_last_method + 4u, last_method_code.size());
Vladimir Markof4f2daa2017-03-20 18:26:59 +0000767 const LinkerPatch last_method_patches[] = {
Vladimir Marko3f311cf2015-04-02 15:28:45 +0100768 LinkerPatch::RelativeCodePatch(bl_offset_in_last_method, nullptr, 1u),
769 };
770
771 constexpr uint32_t just_over_max_negative_disp = 128 * MB + 4;
772 uint32_t last_method_idx = Create2MethodsWithGap(
Vladimir Marko345f93e2015-07-14 18:58:59 +0100773 kNopCode, ArrayRef<const LinkerPatch>(), last_method_code,
774 ArrayRef<const LinkerPatch>(last_method_patches),
Vladimir Marko3f311cf2015-04-02 15:28:45 +0100775 just_over_max_negative_disp - bl_offset_in_last_method);
776 uint32_t method1_offset = GetMethodOffset(1u);
777 uint32_t last_method_offset = GetMethodOffset(last_method_idx);
778 ASSERT_EQ(method1_offset,
779 last_method_offset + bl_offset_in_last_method - just_over_max_negative_disp);
780
781 // Check linked code.
782 uint32_t thunk_offset =
Vladimir Marko33bff252017-11-01 14:35:42 +0000783 CompiledCode::AlignCode(last_method_offset + last_method_code.size(), InstructionSet::kArm64);
Vladimir Marko3f311cf2015-04-02 15:28:45 +0100784 uint32_t diff = thunk_offset - (last_method_offset + bl_offset_in_last_method);
Vladimir Marko38714e82019-02-07 15:06:30 +0000785 ASSERT_TRUE(IsAligned<4u>(diff));
Vladimir Marko3f311cf2015-04-02 15:28:45 +0100786 ASSERT_LT(diff, 128 * MB);
787 auto expected_code = GenNopsAndBl(1u, kBlPlus0 | (diff >> 2));
788 EXPECT_TRUE(CheckLinkedMethod(MethodRef(last_method_idx),
789 ArrayRef<const uint8_t>(expected_code)));
790 EXPECT_TRUE(CheckThunk(thunk_offset));
791}
792
Vladimir Markob74574a2018-11-15 16:05:22 +0000793TEST_F(Arm64RelativePatcherTestDefault, StringBssEntry) {
794 struct TestCase {
795 uint32_t bss_begin;
796 uint32_t string_entry_offset;
797 };
798 static const TestCase test_cases[] = {
799 { 0x12345678u, 0x1234u },
800 { -0x12345678u, 0x4444u },
801 { 0x12345000u, 0x3ffcu },
802 { 0x12345000u, 0x4000u }
803 };
804 for (const TestCase& test_case : test_cases) {
805 Reset();
806 TestNopsAdrpLdr(/*num_nops=*/ 0u, test_case.bss_begin, test_case.string_entry_offset);
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000807 }
Vladimir Markob74574a2018-11-15 16:05:22 +0000808}
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000809
Vladimir Markob74574a2018-11-15 16:05:22 +0000810TEST_F(Arm64RelativePatcherTestDefault, StringReference) {
811 for (uint32_t string_offset : { 0x12345678u, -0x12345678u, 0x12345000u, 0x12345ffcu}) {
812 Reset();
813 TestNopsAdrpAdd(/*num_nops=*/ 0u, string_offset);
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000814 }
Vladimir Markob74574a2018-11-15 16:05:22 +0000815}
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000816
Vladimir Markob74574a2018-11-15 16:05:22 +0000817template <typename Test>
818void TestForAdrpOffsets(Test test, std::initializer_list<uint32_t> args) {
819 for (uint32_t adrp_offset : { 0xff4u, 0xff8u, 0xffcu, 0x1000u }) {
820 for (uint32_t arg : args) {
821 test(adrp_offset, arg);
822 }
823 }
824}
825
826TEST_F(Arm64RelativePatcherTestDefault, StringBssEntryLdur) {
827 TestForAdrpOffsets(
828 [&](uint32_t adrp_offset, uint32_t string_entry_offset) {
829 Reset();
830 bool has_thunk = ((adrp_offset) == 0xff8u || (adrp_offset) == 0xffcu);
831 TestAdrpLdurLdr(adrp_offset, has_thunk, /*bss_begin=*/ 0x12345678u, string_entry_offset);
832 },
833 { 0x1234u, 0x1238u });
834}
835
Matteo Franchin97e2f262015-04-02 15:49:06 +0100836// LDR <Wt>, <label> is always aligned. We should never have to use a fixup.
Vladimir Markob74574a2018-11-15 16:05:22 +0000837TEST_F(Arm64RelativePatcherTestDefault, StringBssEntryWPcRel) {
838 TestForAdrpOffsets(
839 [&](uint32_t adrp_offset, uint32_t pcrel_disp) {
840 Reset();
841 TestAdrpLdrPcRelLdr(kLdrWPcRelInsn,
842 pcrel_disp,
843 adrp_offset,
844 /*has_thunk=*/ false,
845 /*bss_begin=*/ 0x12345678u,
846 /*string_entry_offset=*/ 0x1234u);
847 },
848 { 0x1234u, 0x1238u });
849}
Matteo Franchin97e2f262015-04-02 15:49:06 +0100850
851// LDR <Xt>, <label> is aligned when offset + displacement is a multiple of 8.
Vladimir Markob74574a2018-11-15 16:05:22 +0000852TEST_F(Arm64RelativePatcherTestDefault, StringBssEntryXPcRel) {
853 TestForAdrpOffsets(
854 [&](uint32_t adrp_offset, uint32_t pcrel_disp) {
855 Reset();
856 bool unaligned = !IsAligned<8u>((adrp_offset) + 4u + static_cast<uint32_t>(pcrel_disp));
857 bool has_thunk = ((adrp_offset) == 0xff8u || (adrp_offset) == 0xffcu) && unaligned;
858 TestAdrpLdrPcRelLdr(kLdrXPcRelInsn,
859 pcrel_disp,
860 adrp_offset,
861 has_thunk,
862 /*bss_begin=*/ 0x12345678u,
863 /*string_entry_offset=*/ 0x1234u);
864 },
865 { 0x1234u, 0x1238u });
866}
Matteo Franchin97e2f262015-04-02 15:49:06 +0100867
868// LDR <Wt>, [SP, #<pimm>] and LDR <Xt>, [SP, #<pimm>] are always aligned. No fixup needed.
Vladimir Markob74574a2018-11-15 16:05:22 +0000869TEST_F(Arm64RelativePatcherTestDefault, StringBssEntryWSpRel) {
870 TestForAdrpOffsets(
871 [&](uint32_t adrp_offset, uint32_t disp) {
872 Reset();
873 TestAdrpLdrSpRelLdr(kLdrWSpRelInsn,
874 /*sprel_disp_in_load_units=*/ disp >> 2,
875 adrp_offset,
876 /*has_thunk=*/ false,
877 /*bss_begin=*/ 0x12345678u,
878 /*string_entry_offset=*/ 0x1234u);
879 },
880 { 0u, 4u });
881}
Matteo Franchin97e2f262015-04-02 15:49:06 +0100882
Vladimir Markob74574a2018-11-15 16:05:22 +0000883TEST_F(Arm64RelativePatcherTestDefault, StringBssEntryXSpRel) {
884 TestForAdrpOffsets(
885 [&](uint32_t adrp_offset, uint32_t disp) {
886 Reset();
887 TestAdrpLdrSpRelLdr(kLdrXSpRelInsn,
888 /*sprel_disp_in_load_units=*/ (disp) >> 3,
889 adrp_offset,
890 /*has_thunk=*/ false,
891 /*bss_begin=*/ 0x12345678u,
892 /*string_entry_offset=*/ 0x1234u);
893 },
894 { 0u, 8u });
895}
Matteo Franchin97e2f262015-04-02 15:49:06 +0100896
Vladimir Markob74574a2018-11-15 16:05:22 +0000897TEST_F(Arm64RelativePatcherTestDefault, StringReferenceLdur) {
898 TestForAdrpOffsets(
899 [&](uint32_t adrp_offset, uint32_t string_offset) {
900 Reset();
901 bool has_thunk = ((adrp_offset) == 0xff8u || (adrp_offset) == 0xffcu);
902 TestAdrpLdurAdd(adrp_offset, has_thunk, string_offset);
903 },
904 { 0x12345678u, 0xffffc840u });
905}
Matteo Franchin97e2f262015-04-02 15:49:06 +0100906
Vladimir Markob74574a2018-11-15 16:05:22 +0000907TEST_F(Arm64RelativePatcherTestDefault, StringReferenceSubX3X2) {
908 TestForAdrpOffsets(
909 [&](uint32_t adrp_offset, uint32_t string_offset) {
910 Reset();
911 /* SUB unrelated to "ADRP x0, addr". */ \
912 uint32_t sub = kSubXInsn | (100 << 10) | (2u << 5) | 3u; /* SUB x3, x2, #100 */
913 TestAdrpInsn2Add(sub, adrp_offset, /*has_thunk=*/ false, string_offset);
914 },
915 { 0x12345678u, 0xffffc840u });
916}
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000917
Vladimir Markob74574a2018-11-15 16:05:22 +0000918TEST_F(Arm64RelativePatcherTestDefault, StringReferenceSubsX3X0) {
919 TestForAdrpOffsets(
920 [&](uint32_t adrp_offset, uint32_t string_offset) {
921 Reset();
922 /* SUBS that uses the result of "ADRP x0, addr". */ \
923 uint32_t subs = kSubsXInsn | (100 << 10) | (0u << 5) | 3u; /* SUBS x3, x0, #100 */
924 TestAdrpInsn2Add(subs, adrp_offset, /*has_thunk=*/ false, string_offset);
925 },
926 { 0x12345678u, 0xffffc840u });
927}
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000928
Vladimir Markob74574a2018-11-15 16:05:22 +0000929TEST_F(Arm64RelativePatcherTestDefault, StringReferenceAddX0X0) {
930 TestForAdrpOffsets(
931 [&](uint32_t adrp_offset, uint32_t string_offset) {
932 Reset();
933 /* ADD that uses the result register of "ADRP x0, addr" as both source and destination. */
934 uint32_t add = kSubXInsn | (100 << 10) | (0u << 5) | 0u; /* ADD x0, x0, #100 */
935 TestAdrpInsn2Add(add, adrp_offset, /*has_thunk=*/ false, string_offset);
936 },
937 { 0x12345678u, 0xffffc840 });
938}
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000939
Vladimir Markob74574a2018-11-15 16:05:22 +0000940TEST_F(Arm64RelativePatcherTestDefault, StringReferenceAddsX0X2) {
941 TestForAdrpOffsets(
942 [&](uint32_t adrp_offset, uint32_t string_offset) {
943 Reset();
944 /* ADDS that does not use the result of "ADRP x0, addr" but overwrites that register. */
945 uint32_t adds = kAddsXInsn | (100 << 10) | (2u << 5) | 0u; /* ADDS x0, x2, #100 */
946 bool has_thunk = ((adrp_offset) == 0xff8u || (adrp_offset) == 0xffcu);
947 TestAdrpInsn2Add(adds, adrp_offset, has_thunk, string_offset);
948 },
949 { 0x12345678u, 0xffffc840u });
950}
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000951
952// LDR <Wt>, <label> is always aligned. We should never have to use a fixup.
Vladimir Markob74574a2018-11-15 16:05:22 +0000953TEST_F(Arm64RelativePatcherTestDefault, StringReferenceWPcRel) {
954 TestForAdrpOffsets(
955 [&](uint32_t adrp_offset, uint32_t pcrel_disp) {
956 Reset();
957 TestAdrpLdrPcRelAdd(kLdrWPcRelInsn,
958 pcrel_disp,
959 adrp_offset,
960 /*has_thunk=*/ false,
961 /*string_offset=*/ 0x12345678u);
962 },
963 { 0x1234u, 0x1238u });
964}
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000965
966// LDR <Xt>, <label> is aligned when offset + displacement is a multiple of 8.
Vladimir Markob74574a2018-11-15 16:05:22 +0000967TEST_F(Arm64RelativePatcherTestDefault, StringReferenceXPcRel) {
968 TestForAdrpOffsets(
969 [&](uint32_t adrp_offset, uint32_t pcrel_disp) {
970 Reset();
971 bool unaligned = !IsAligned<8u>((adrp_offset) + 4u + static_cast<uint32_t>(pcrel_disp));
972 bool has_thunk = ((adrp_offset) == 0xff8u || (adrp_offset) == 0xffcu) && unaligned;
973 TestAdrpLdrPcRelAdd(kLdrXPcRelInsn,
974 pcrel_disp,
975 adrp_offset,
976 has_thunk,
977 /*string_offset=*/ 0x12345678u);
978 },
979 { 0x1234u, 0x1238u });
980}
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000981
982// LDR <Wt>, [SP, #<pimm>] and LDR <Xt>, [SP, #<pimm>] are always aligned. No fixup needed.
Vladimir Markob74574a2018-11-15 16:05:22 +0000983TEST_F(Arm64RelativePatcherTestDefault, StringReferenceWSpRel) {
984 TestForAdrpOffsets(
985 [&](uint32_t adrp_offset, uint32_t disp) {
986 Reset();
987 TestAdrpLdrSpRelAdd(kLdrWSpRelInsn,
988 /*sprel_disp_in_load_units=*/ (disp) >> 2,
989 adrp_offset,
990 /*has_thunk=*/ false,
991 /*string_offset=*/ 0x12345678u);
992 },
993 { 0u, 4u });
994}
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000995
Vladimir Markob74574a2018-11-15 16:05:22 +0000996TEST_F(Arm64RelativePatcherTestDefault, StringReferenceXSpRel) {
997 TestForAdrpOffsets(
998 [&](uint32_t adrp_offset, uint32_t disp) {
999 Reset();
1000 TestAdrpLdrSpRelAdd(kLdrXSpRelInsn,
1001 /*sprel_disp_in_load_units=*/ (disp) >> 3,
1002 adrp_offset,
1003 /*has_thunk=*/ false,
1004 /*string_offset=*/ 0x12345678u);
1005 },
1006 { 0u, 8u });
1007}
Matteo Franchin97e2f262015-04-02 15:49:06 +01001008
Vladimir Markof6675082019-05-17 12:05:28 +01001009TEST_F(Arm64RelativePatcherTestDefault, EntrypointCall) {
1010 constexpr uint32_t kEntrypointOffset = 512;
1011 const LinkerPatch patches[] = {
1012 LinkerPatch::CallEntrypointPatch(0u, kEntrypointOffset),
1013 };
1014 AddCompiledMethod(MethodRef(1u), kCallCode, ArrayRef<const LinkerPatch>(patches));
1015 Link();
1016
1017 uint32_t method_offset = GetMethodOffset(1u);
1018 uint32_t thunk_offset = CompiledCode::AlignCode(method_offset + kCallCode.size(),
1019 InstructionSet::kArm64);
1020 uint32_t diff = thunk_offset - method_offset;
1021 ASSERT_TRUE(IsAligned<4u>(diff));
1022 ASSERT_LT(diff, 128 * MB);
1023 auto expected_code = RawCode({kBlPlus0 | (diff >> 2)});
1024 EXPECT_TRUE(CheckLinkedMethod(MethodRef(1u), ArrayRef<const uint8_t>(expected_code)));
1025
1026 // Verify the thunk.
1027 uint32_t ldr_ip0_tr_offset =
1028 0xf9400000 | // LDR Xt, [Xn, #<simm>]
1029 ((kEntrypointOffset >> 3) << 10) | // imm12 = (simm >> scale), scale = 3
1030 (/* tr */ 19 << 5) | // Xn = TR
1031 /* ip0 */ 16; // Xt = ip0
1032 uint32_t br_ip0 = 0xd61f0000 | (/* ip0 */ 16 << 5);
1033 auto expected_thunk = RawCode({ ldr_ip0_tr_offset, br_ip0 });
1034 ASSERT_LE(8u, output_.size() - thunk_offset);
1035 EXPECT_EQ(ldr_ip0_tr_offset, GetOutputInsn(thunk_offset));
1036 EXPECT_EQ(br_ip0, GetOutputInsn(thunk_offset + 4u));
1037}
1038
Vladimir Marko66d691d2017-04-07 17:53:39 +01001039void Arm64RelativePatcherTest::TestBakerField(uint32_t offset, uint32_t ref_reg) {
Vladimir Markof4f2daa2017-03-20 18:26:59 +00001040 uint32_t valid_regs[] = {
1041 0, 1, 2, 3, 4, 5, 6, 7, 8, 9,
1042 10, 11, 12, 13, 14, 15, 18, 19, // IP0 and IP1 are reserved.
1043 20, 21, 22, 23, 24, 25, 26, 27, 28, 29,
1044 // LR and SP/ZR are reserved.
1045 };
1046 DCHECK_ALIGNED(offset, 4u);
1047 DCHECK_LT(offset, 16 * KB);
1048 constexpr size_t kMethodCodeSize = 8u;
1049 constexpr size_t kLiteralOffset = 0u;
1050 uint32_t method_idx = 0u;
1051 for (uint32_t base_reg : valid_regs) {
1052 for (uint32_t holder_reg : valid_regs) {
Vladimir Marko66d691d2017-04-07 17:53:39 +01001053 uint32_t ldr = kLdrWInsn | (offset << (10 - 2)) | (base_reg << 5) | ref_reg;
Vladimir Markof4f2daa2017-03-20 18:26:59 +00001054 const std::vector<uint8_t> raw_code = RawCode({kCbnzIP1Plus0Insn, ldr});
1055 ASSERT_EQ(kMethodCodeSize, raw_code.size());
1056 ArrayRef<const uint8_t> code(raw_code);
Vladimir Markoca1e0382018-04-11 09:58:41 +00001057 uint32_t encoded_data = EncodeBakerReadBarrierFieldData(base_reg, holder_reg);
Vladimir Markof4f2daa2017-03-20 18:26:59 +00001058 const LinkerPatch patches[] = {
1059 LinkerPatch::BakerReadBarrierBranchPatch(kLiteralOffset, encoded_data),
1060 };
1061 ++method_idx;
1062 AddCompiledMethod(MethodRef(method_idx), code, ArrayRef<const LinkerPatch>(patches));
1063 }
1064 }
1065 Link();
1066
1067 // All thunks are at the end.
1068 uint32_t thunk_offset = GetMethodOffset(method_idx) + RoundUp(kMethodCodeSize, kArm64Alignment);
1069 method_idx = 0u;
1070 for (uint32_t base_reg : valid_regs) {
1071 for (uint32_t holder_reg : valid_regs) {
1072 ++method_idx;
1073 uint32_t cbnz_offset = thunk_offset - (GetMethodOffset(method_idx) + kLiteralOffset);
1074 uint32_t cbnz = kCbnzIP1Plus0Insn | (cbnz_offset << (5 - 2));
Vladimir Marko66d691d2017-04-07 17:53:39 +01001075 uint32_t ldr = kLdrWInsn | (offset << (10 - 2)) | (base_reg << 5) | ref_reg;
Vladimir Markof4f2daa2017-03-20 18:26:59 +00001076 const std::vector<uint8_t> expected_code = RawCode({cbnz, ldr});
1077 ASSERT_EQ(kMethodCodeSize, expected_code.size());
1078 ASSERT_TRUE(
1079 CheckLinkedMethod(MethodRef(method_idx), ArrayRef<const uint8_t>(expected_code)));
1080
1081 std::vector<uint8_t> expected_thunk = CompileBakerOffsetThunk(base_reg, holder_reg);
1082 ASSERT_GT(output_.size(), thunk_offset);
1083 ASSERT_GE(output_.size() - thunk_offset, expected_thunk.size());
1084 ArrayRef<const uint8_t> compiled_thunk(output_.data() + thunk_offset,
1085 expected_thunk.size());
1086 if (ArrayRef<const uint8_t>(expected_thunk) != compiled_thunk) {
1087 DumpDiff(ArrayRef<const uint8_t>(expected_thunk), compiled_thunk);
1088 ASSERT_TRUE(false);
1089 }
1090
1091 size_t gray_check_offset = thunk_offset;
1092 if (holder_reg == base_reg) {
1093 // Verify that the null-check CBZ uses the correct register, i.e. holder_reg.
1094 ASSERT_GE(output_.size() - gray_check_offset, 4u);
Vladimir Marko66d691d2017-04-07 17:53:39 +01001095 ASSERT_EQ(0x34000000u | holder_reg, GetOutputInsn(thunk_offset) & 0xff00001fu);
Vladimir Markof4f2daa2017-03-20 18:26:59 +00001096 gray_check_offset +=4u;
1097 }
1098 // Verify that the lock word for gray bit check is loaded from the holder address.
1099 static constexpr size_t kGrayCheckInsns = 5;
1100 ASSERT_GE(output_.size() - gray_check_offset, 4u * kGrayCheckInsns);
1101 const uint32_t load_lock_word =
1102 kLdrWInsn |
1103 (mirror::Object::MonitorOffset().Uint32Value() << (10 - 2)) |
1104 (holder_reg << 5) |
1105 /* ip0 */ 16;
1106 EXPECT_EQ(load_lock_word, GetOutputInsn(gray_check_offset));
1107 // Verify the gray bit check.
Vladimir Marko66d691d2017-04-07 17:53:39 +01001108 const uint32_t check_gray_bit_without_offset =
1109 0x37000000u | (LockWord::kReadBarrierStateShift << 19) | /* ip0 */ 16;
1110 EXPECT_EQ(check_gray_bit_without_offset, GetOutputInsn(gray_check_offset + 4u) & 0xfff8001fu);
Vladimir Markof4f2daa2017-03-20 18:26:59 +00001111 // Verify the fake dependency.
1112 const uint32_t fake_dependency =
Vladimir Marko66d691d2017-04-07 17:53:39 +01001113 0x8b408000u | // ADD Xd, Xn, Xm, LSR 32
Vladimir Markof4f2daa2017-03-20 18:26:59 +00001114 (/* ip0 */ 16 << 16) | // Xm = ip0
1115 (base_reg << 5) | // Xn = base_reg
1116 base_reg; // Xd = base_reg
1117 EXPECT_EQ(fake_dependency, GetOutputInsn(gray_check_offset + 12u));
1118 // Do not check the rest of the implementation.
1119
1120 // The next thunk follows on the next aligned offset.
1121 thunk_offset += RoundUp(expected_thunk.size(), kArm64Alignment);
1122 }
1123 }
1124}
1125
Vladimir Markob74574a2018-11-15 16:05:22 +00001126TEST_F(Arm64RelativePatcherTestDefault, BakerOffset) {
1127 struct TestCase {
1128 uint32_t offset;
1129 uint32_t ref_reg;
1130 };
1131 static const TestCase test_cases[] = {
1132 { 0u, 0u },
1133 { 8u, 15u},
1134 { 0x3ffcu, 29u },
1135 };
1136 for (const TestCase& test_case : test_cases) {
1137 Reset();
1138 TestBakerField(test_case.offset, test_case.ref_reg);
Vladimir Markof4f2daa2017-03-20 18:26:59 +00001139 }
Vladimir Markob74574a2018-11-15 16:05:22 +00001140}
Vladimir Markof4f2daa2017-03-20 18:26:59 +00001141
Vladimir Markof4f2daa2017-03-20 18:26:59 +00001142
1143TEST_F(Arm64RelativePatcherTestDefault, BakerOffsetThunkInTheMiddle) {
1144 // One thunk in the middle with maximum distance branches to it from both sides.
Vladimir Marko66d691d2017-04-07 17:53:39 +01001145 // Use offset = 0, base_reg = 0, ref_reg = 0, the LDR is simply `kLdrWInsn`.
Vladimir Markof4f2daa2017-03-20 18:26:59 +00001146 constexpr uint32_t kLiteralOffset1 = 4;
1147 const std::vector<uint8_t> raw_code1 = RawCode({kNopInsn, kCbnzIP1Plus0Insn, kLdrWInsn});
1148 ArrayRef<const uint8_t> code1(raw_code1);
Vladimir Markoca1e0382018-04-11 09:58:41 +00001149 uint32_t encoded_data = EncodeBakerReadBarrierFieldData(/* base_reg */ 0, /* holder_reg */ 0);
Vladimir Markof4f2daa2017-03-20 18:26:59 +00001150 const LinkerPatch patches1[] = {
1151 LinkerPatch::BakerReadBarrierBranchPatch(kLiteralOffset1, encoded_data),
1152 };
1153 AddCompiledMethod(MethodRef(1u), code1, ArrayRef<const LinkerPatch>(patches1));
1154
1155 // Allow thunk at 1MiB offset from the start of the method above. Literal offset being 4
1156 // allows the branch to reach that thunk.
1157 size_t filler1_size =
1158 1 * MB - RoundUp(raw_code1.size() + sizeof(OatQuickMethodHeader), kArm64Alignment);
1159 std::vector<uint8_t> raw_filler1_code = GenNops(filler1_size / 4u);
1160 ArrayRef<const uint8_t> filler1_code(raw_filler1_code);
1161 AddCompiledMethod(MethodRef(2u), filler1_code);
1162
1163 // Enforce thunk reservation with a tiny method.
1164 AddCompiledMethod(MethodRef(3u), kNopCode);
1165
1166 // Allow reaching the thunk from the very beginning of a method 1MiB away. Backward branch
1167 // reaches the full 1MiB. Things to subtract:
1168 // - thunk size and method 3 pre-header, rounded up (padding in between if needed)
1169 // - method 3 code and method 4 pre-header, rounded up (padding in between if needed)
1170 // - method 4 header (let there be no padding between method 4 code and method 5 pre-header).
1171 size_t thunk_size = CompileBakerOffsetThunk(/* base_reg */ 0, /* holder_reg */ 0).size();
1172 size_t filler2_size =
1173 1 * MB - RoundUp(thunk_size + sizeof(OatQuickMethodHeader), kArm64Alignment)
1174 - RoundUp(kNopCode.size() + sizeof(OatQuickMethodHeader), kArm64Alignment)
1175 - sizeof(OatQuickMethodHeader);
1176 std::vector<uint8_t> raw_filler2_code = GenNops(filler2_size / 4u);
1177 ArrayRef<const uint8_t> filler2_code(raw_filler2_code);
1178 AddCompiledMethod(MethodRef(4u), filler2_code);
1179
1180 constexpr uint32_t kLiteralOffset2 = 0;
1181 const std::vector<uint8_t> raw_code2 = RawCode({kCbnzIP1Plus0Insn, kLdrWInsn});
1182 ArrayRef<const uint8_t> code2(raw_code2);
1183 const LinkerPatch patches2[] = {
1184 LinkerPatch::BakerReadBarrierBranchPatch(kLiteralOffset2, encoded_data),
1185 };
1186 AddCompiledMethod(MethodRef(5u), code2, ArrayRef<const LinkerPatch>(patches2));
1187
1188 Link();
1189
1190 uint32_t first_method_offset = GetMethodOffset(1u);
1191 uint32_t last_method_offset = GetMethodOffset(5u);
1192 EXPECT_EQ(2 * MB, last_method_offset - first_method_offset);
1193
1194 const uint32_t cbnz_max_forward = kCbnzIP1Plus0Insn | 0x007fffe0;
1195 const uint32_t cbnz_max_backward = kCbnzIP1Plus0Insn | 0x00800000;
1196 const std::vector<uint8_t> expected_code1 = RawCode({kNopInsn, cbnz_max_forward, kLdrWInsn});
1197 const std::vector<uint8_t> expected_code2 = RawCode({cbnz_max_backward, kLdrWInsn});
1198 ASSERT_TRUE(CheckLinkedMethod(MethodRef(1), ArrayRef<const uint8_t>(expected_code1)));
1199 ASSERT_TRUE(CheckLinkedMethod(MethodRef(5), ArrayRef<const uint8_t>(expected_code2)));
1200}
1201
1202TEST_F(Arm64RelativePatcherTestDefault, BakerOffsetThunkBeforeFiller) {
1203 // Based on the first part of BakerOffsetThunkInTheMiddle but the CBNZ is one instruction
1204 // earlier, so the thunk is emitted before the filler.
Vladimir Marko66d691d2017-04-07 17:53:39 +01001205 // Use offset = 0, base_reg = 0, ref_reg = 0, the LDR is simply `kLdrWInsn`.
Vladimir Markof4f2daa2017-03-20 18:26:59 +00001206 constexpr uint32_t kLiteralOffset1 = 0;
1207 const std::vector<uint8_t> raw_code1 = RawCode({kCbnzIP1Plus0Insn, kLdrWInsn, kNopInsn});
1208 ArrayRef<const uint8_t> code1(raw_code1);
Vladimir Markoca1e0382018-04-11 09:58:41 +00001209 uint32_t encoded_data = EncodeBakerReadBarrierFieldData(/* base_reg */ 0, /* holder_reg */ 0);
Vladimir Markof4f2daa2017-03-20 18:26:59 +00001210 const LinkerPatch patches1[] = {
1211 LinkerPatch::BakerReadBarrierBranchPatch(kLiteralOffset1, encoded_data),
1212 };
1213 AddCompiledMethod(MethodRef(1u), code1, ArrayRef<const LinkerPatch>(patches1));
1214
1215 // Allow thunk at 1MiB offset from the start of the method above. Literal offset being 4
1216 // allows the branch to reach that thunk.
1217 size_t filler1_size =
1218 1 * MB - RoundUp(raw_code1.size() + sizeof(OatQuickMethodHeader), kArm64Alignment);
1219 std::vector<uint8_t> raw_filler1_code = GenNops(filler1_size / 4u);
1220 ArrayRef<const uint8_t> filler1_code(raw_filler1_code);
1221 AddCompiledMethod(MethodRef(2u), filler1_code);
1222
1223 Link();
1224
1225 const uint32_t cbnz_offset = RoundUp(raw_code1.size(), kArm64Alignment) - kLiteralOffset1;
1226 const uint32_t cbnz = kCbnzIP1Plus0Insn | (cbnz_offset << (5 - 2));
1227 const std::vector<uint8_t> expected_code1 = RawCode({cbnz, kLdrWInsn, kNopInsn});
1228 ASSERT_TRUE(CheckLinkedMethod(MethodRef(1), ArrayRef<const uint8_t>(expected_code1)));
1229}
1230
1231TEST_F(Arm64RelativePatcherTestDefault, BakerOffsetThunkInTheMiddleUnreachableFromLast) {
1232 // Based on the BakerOffsetThunkInTheMiddle but the CBNZ in the last method is preceded
1233 // by NOP and cannot reach the thunk in the middle, so we emit an extra thunk at the end.
Vladimir Marko66d691d2017-04-07 17:53:39 +01001234 // Use offset = 0, base_reg = 0, ref_reg = 0, the LDR is simply `kLdrWInsn`.
Vladimir Markof4f2daa2017-03-20 18:26:59 +00001235 constexpr uint32_t kLiteralOffset1 = 4;
1236 const std::vector<uint8_t> raw_code1 = RawCode({kNopInsn, kCbnzIP1Plus0Insn, kLdrWInsn});
1237 ArrayRef<const uint8_t> code1(raw_code1);
Vladimir Markoca1e0382018-04-11 09:58:41 +00001238 uint32_t encoded_data = EncodeBakerReadBarrierFieldData(/* base_reg */ 0, /* holder_reg */ 0);
Vladimir Markof4f2daa2017-03-20 18:26:59 +00001239 const LinkerPatch patches1[] = {
1240 LinkerPatch::BakerReadBarrierBranchPatch(kLiteralOffset1, encoded_data),
1241 };
1242 AddCompiledMethod(MethodRef(1u), code1, ArrayRef<const LinkerPatch>(patches1));
1243
1244 // Allow thunk at 1MiB offset from the start of the method above. Literal offset being 4
1245 // allows the branch to reach that thunk.
1246 size_t filler1_size =
1247 1 * MB - RoundUp(raw_code1.size() + sizeof(OatQuickMethodHeader), kArm64Alignment);
1248 std::vector<uint8_t> raw_filler1_code = GenNops(filler1_size / 4u);
1249 ArrayRef<const uint8_t> filler1_code(raw_filler1_code);
1250 AddCompiledMethod(MethodRef(2u), filler1_code);
1251
1252 // Enforce thunk reservation with a tiny method.
1253 AddCompiledMethod(MethodRef(3u), kNopCode);
1254
1255 // If not for the extra NOP, this would allow reaching the thunk from the very beginning
1256 // of a method 1MiB away. Backward branch reaches the full 1MiB. Things to subtract:
1257 // - thunk size and method 3 pre-header, rounded up (padding in between if needed)
1258 // - method 3 code and method 4 pre-header, rounded up (padding in between if needed)
1259 // - method 4 header (let there be no padding between method 4 code and method 5 pre-header).
1260 size_t thunk_size = CompileBakerOffsetThunk(/* base_reg */ 0, /* holder_reg */ 0).size();
1261 size_t filler2_size =
1262 1 * MB - RoundUp(thunk_size + sizeof(OatQuickMethodHeader), kArm64Alignment)
1263 - RoundUp(kNopCode.size() + sizeof(OatQuickMethodHeader), kArm64Alignment)
1264 - sizeof(OatQuickMethodHeader);
1265 std::vector<uint8_t> raw_filler2_code = GenNops(filler2_size / 4u);
1266 ArrayRef<const uint8_t> filler2_code(raw_filler2_code);
1267 AddCompiledMethod(MethodRef(4u), filler2_code);
1268
1269 // Extra NOP compared to BakerOffsetThunkInTheMiddle.
1270 constexpr uint32_t kLiteralOffset2 = 4;
1271 const std::vector<uint8_t> raw_code2 = RawCode({kNopInsn, kCbnzIP1Plus0Insn, kLdrWInsn});
1272 ArrayRef<const uint8_t> code2(raw_code2);
1273 const LinkerPatch patches2[] = {
1274 LinkerPatch::BakerReadBarrierBranchPatch(kLiteralOffset2, encoded_data),
1275 };
1276 AddCompiledMethod(MethodRef(5u), code2, ArrayRef<const LinkerPatch>(patches2));
1277
1278 Link();
1279
1280 const uint32_t cbnz_max_forward = kCbnzIP1Plus0Insn | 0x007fffe0;
1281 const uint32_t cbnz_last_offset = RoundUp(raw_code2.size(), kArm64Alignment) - kLiteralOffset2;
1282 const uint32_t cbnz_last = kCbnzIP1Plus0Insn | (cbnz_last_offset << (5 - 2));
1283 const std::vector<uint8_t> expected_code1 = RawCode({kNopInsn, cbnz_max_forward, kLdrWInsn});
1284 const std::vector<uint8_t> expected_code2 = RawCode({kNopInsn, cbnz_last, kLdrWInsn});
1285 ASSERT_TRUE(CheckLinkedMethod(MethodRef(1), ArrayRef<const uint8_t>(expected_code1)));
1286 ASSERT_TRUE(CheckLinkedMethod(MethodRef(5), ArrayRef<const uint8_t>(expected_code2)));
1287}
1288
Vladimir Marko66d691d2017-04-07 17:53:39 +01001289TEST_F(Arm64RelativePatcherTestDefault, BakerArray) {
1290 uint32_t valid_regs[] = {
1291 0, 1, 2, 3, 4, 5, 6, 7, 8, 9,
1292 10, 11, 12, 13, 14, 15, 18, 19, // IP0 and IP1 are reserved.
1293 20, 21, 22, 23, 24, 25, 26, 27, 28, 29,
1294 // LR and SP/ZR are reserved.
1295 };
1296 auto ldr = [](uint32_t base_reg) {
1297 uint32_t index_reg = (base_reg == 0u) ? 1u : 0u;
1298 uint32_t ref_reg = (base_reg == 2) ? 3u : 2u;
1299 return kLdrWLsl2Insn | (index_reg << 16) | (base_reg << 5) | ref_reg;
1300 };
1301 constexpr size_t kMethodCodeSize = 8u;
1302 constexpr size_t kLiteralOffset = 0u;
1303 uint32_t method_idx = 0u;
1304 for (uint32_t base_reg : valid_regs) {
1305 ++method_idx;
1306 const std::vector<uint8_t> raw_code = RawCode({kCbnzIP1Plus0Insn, ldr(base_reg)});
1307 ASSERT_EQ(kMethodCodeSize, raw_code.size());
1308 ArrayRef<const uint8_t> code(raw_code);
1309 const LinkerPatch patches[] = {
1310 LinkerPatch::BakerReadBarrierBranchPatch(
Vladimir Markoca1e0382018-04-11 09:58:41 +00001311 kLiteralOffset, EncodeBakerReadBarrierArrayData(base_reg)),
Vladimir Marko66d691d2017-04-07 17:53:39 +01001312 };
1313 AddCompiledMethod(MethodRef(method_idx), code, ArrayRef<const LinkerPatch>(patches));
1314 }
1315 Link();
1316
1317 // All thunks are at the end.
1318 uint32_t thunk_offset = GetMethodOffset(method_idx) + RoundUp(kMethodCodeSize, kArm64Alignment);
1319 method_idx = 0u;
1320 for (uint32_t base_reg : valid_regs) {
1321 ++method_idx;
1322 uint32_t cbnz_offset = thunk_offset - (GetMethodOffset(method_idx) + kLiteralOffset);
1323 uint32_t cbnz = kCbnzIP1Plus0Insn | (cbnz_offset << (5 - 2));
1324 const std::vector<uint8_t> expected_code = RawCode({cbnz, ldr(base_reg)});
1325 ASSERT_EQ(kMethodCodeSize, expected_code.size());
1326 EXPECT_TRUE(CheckLinkedMethod(MethodRef(method_idx), ArrayRef<const uint8_t>(expected_code)));
1327
1328 std::vector<uint8_t> expected_thunk = CompileBakerArrayThunk(base_reg);
1329 ASSERT_GT(output_.size(), thunk_offset);
1330 ASSERT_GE(output_.size() - thunk_offset, expected_thunk.size());
1331 ArrayRef<const uint8_t> compiled_thunk(output_.data() + thunk_offset,
1332 expected_thunk.size());
1333 if (ArrayRef<const uint8_t>(expected_thunk) != compiled_thunk) {
1334 DumpDiff(ArrayRef<const uint8_t>(expected_thunk), compiled_thunk);
1335 ASSERT_TRUE(false);
1336 }
1337
1338 // Verify that the lock word for gray bit check is loaded from the correct address
1339 // before the base_reg which points to the array data.
1340 static constexpr size_t kGrayCheckInsns = 5;
1341 ASSERT_GE(output_.size() - thunk_offset, 4u * kGrayCheckInsns);
1342 int32_t data_offset =
1343 mirror::Array::DataOffset(Primitive::ComponentSize(Primitive::kPrimNot)).Int32Value();
1344 int32_t offset = mirror::Object::MonitorOffset().Int32Value() - data_offset;
1345 ASSERT_LT(offset, 0);
1346 const uint32_t load_lock_word =
1347 kLdurWInsn |
1348 ((offset & 0x1ffu) << 12) |
1349 (base_reg << 5) |
1350 /* ip0 */ 16;
1351 EXPECT_EQ(load_lock_word, GetOutputInsn(thunk_offset));
1352 // Verify the gray bit check.
1353 const uint32_t check_gray_bit_without_offset =
1354 0x37000000u | (LockWord::kReadBarrierStateShift << 19) | /* ip0 */ 16;
1355 EXPECT_EQ(check_gray_bit_without_offset, GetOutputInsn(thunk_offset + 4u) & 0xfff8001fu);
1356 // Verify the fake dependency.
1357 const uint32_t fake_dependency =
1358 0x8b408000u | // ADD Xd, Xn, Xm, LSR 32
1359 (/* ip0 */ 16 << 16) | // Xm = ip0
1360 (base_reg << 5) | // Xn = base_reg
1361 base_reg; // Xd = base_reg
1362 EXPECT_EQ(fake_dependency, GetOutputInsn(thunk_offset + 12u));
1363 // Do not check the rest of the implementation.
1364
1365 // The next thunk follows on the next aligned offset.
1366 thunk_offset += RoundUp(expected_thunk.size(), kArm64Alignment);
1367 }
1368}
1369
1370TEST_F(Arm64RelativePatcherTestDefault, BakerGcRoot) {
Vladimir Markof4f2daa2017-03-20 18:26:59 +00001371 uint32_t valid_regs[] = {
1372 0, 1, 2, 3, 4, 5, 6, 7, 8, 9,
1373 10, 11, 12, 13, 14, 15, 18, 19, // IP0 and IP1 are reserved.
1374 20, 21, 22, 23, 24, 25, 26, 27, 28, 29,
1375 // LR and SP/ZR are reserved.
1376 };
1377 constexpr size_t kMethodCodeSize = 8u;
1378 constexpr size_t kLiteralOffset = 4u;
1379 uint32_t method_idx = 0u;
1380 for (uint32_t root_reg : valid_regs) {
1381 ++method_idx;
1382 uint32_t ldr = kLdrWInsn | (/* offset */ 8 << (10 - 2)) | (/* base_reg */ 0 << 5) | root_reg;
1383 const std::vector<uint8_t> raw_code = RawCode({ldr, kCbnzIP1Plus0Insn});
1384 ASSERT_EQ(kMethodCodeSize, raw_code.size());
1385 ArrayRef<const uint8_t> code(raw_code);
1386 const LinkerPatch patches[] = {
1387 LinkerPatch::BakerReadBarrierBranchPatch(
Vladimir Markoca1e0382018-04-11 09:58:41 +00001388 kLiteralOffset, EncodeBakerReadBarrierGcRootData(root_reg)),
Vladimir Markof4f2daa2017-03-20 18:26:59 +00001389 };
1390 AddCompiledMethod(MethodRef(method_idx), code, ArrayRef<const LinkerPatch>(patches));
1391 }
1392 Link();
1393
1394 // All thunks are at the end.
1395 uint32_t thunk_offset = GetMethodOffset(method_idx) + RoundUp(kMethodCodeSize, kArm64Alignment);
1396 method_idx = 0u;
1397 for (uint32_t root_reg : valid_regs) {
1398 ++method_idx;
1399 uint32_t cbnz_offset = thunk_offset - (GetMethodOffset(method_idx) + kLiteralOffset);
1400 uint32_t cbnz = kCbnzIP1Plus0Insn | (cbnz_offset << (5 - 2));
1401 uint32_t ldr = kLdrWInsn | (/* offset */ 8 << (10 - 2)) | (/* base_reg */ 0 << 5) | root_reg;
1402 const std::vector<uint8_t> expected_code = RawCode({ldr, cbnz});
1403 ASSERT_EQ(kMethodCodeSize, expected_code.size());
1404 EXPECT_TRUE(CheckLinkedMethod(MethodRef(method_idx), ArrayRef<const uint8_t>(expected_code)));
1405
1406 std::vector<uint8_t> expected_thunk = CompileBakerGcRootThunk(root_reg);
1407 ASSERT_GT(output_.size(), thunk_offset);
1408 ASSERT_GE(output_.size() - thunk_offset, expected_thunk.size());
1409 ArrayRef<const uint8_t> compiled_thunk(output_.data() + thunk_offset,
1410 expected_thunk.size());
1411 if (ArrayRef<const uint8_t>(expected_thunk) != compiled_thunk) {
1412 DumpDiff(ArrayRef<const uint8_t>(expected_thunk), compiled_thunk);
1413 ASSERT_TRUE(false);
1414 }
1415
1416 // Verify that the fast-path null-check CBZ uses the correct register, i.e. root_reg.
1417 ASSERT_GE(output_.size() - thunk_offset, 4u);
Vladimir Marko66d691d2017-04-07 17:53:39 +01001418 ASSERT_EQ(0x34000000u | root_reg, GetOutputInsn(thunk_offset) & 0xff00001fu);
Vladimir Markof4f2daa2017-03-20 18:26:59 +00001419 // Do not check the rest of the implementation.
1420
1421 // The next thunk follows on the next aligned offset.
1422 thunk_offset += RoundUp(expected_thunk.size(), kArm64Alignment);
1423 }
1424}
1425
1426TEST_F(Arm64RelativePatcherTestDefault, BakerAndMethodCallInteraction) {
1427 // During development, there was a `DCHECK_LE(MaxNextOffset(), next_thunk.MaxNextOffset());`
1428 // in `ArmBaseRelativePatcher::ThunkData::MakeSpaceBefore()` which does not necessarily
1429 // hold when we're reserving thunks of different sizes. This test exposes the situation
1430 // by using Baker thunks and a method call thunk.
1431
1432 // Add a method call patch that can reach to method 1 offset + 128MiB.
1433 uint32_t method_idx = 0u;
1434 constexpr size_t kMethodCallLiteralOffset = 4u;
1435 constexpr uint32_t kMissingMethodIdx = 2u;
1436 const std::vector<uint8_t> raw_code1 = RawCode({kNopInsn, kBlPlus0});
1437 const LinkerPatch method1_patches[] = {
1438 LinkerPatch::RelativeCodePatch(kMethodCallLiteralOffset, nullptr, 2u),
1439 };
1440 ArrayRef<const uint8_t> code1(raw_code1);
1441 ++method_idx;
1442 AddCompiledMethod(MethodRef(1u), code1, ArrayRef<const LinkerPatch>(method1_patches));
1443
1444 // Skip kMissingMethodIdx.
1445 ++method_idx;
1446 ASSERT_EQ(kMissingMethodIdx, method_idx);
1447 // Add a method with the right size that the method code for the next one starts 1MiB
1448 // after code for method 1.
1449 size_t filler_size =
1450 1 * MB - RoundUp(raw_code1.size() + sizeof(OatQuickMethodHeader), kArm64Alignment)
1451 - sizeof(OatQuickMethodHeader);
1452 std::vector<uint8_t> filler_code = GenNops(filler_size / 4u);
1453 ++method_idx;
1454 AddCompiledMethod(MethodRef(method_idx), ArrayRef<const uint8_t>(filler_code));
1455 // Add 126 methods with 1MiB code+header, making the code for the next method start 1MiB
1456 // before the currently scheduled MaxNextOffset() for the method call thunk.
1457 for (uint32_t i = 0; i != 126; ++i) {
1458 filler_size = 1 * MB - sizeof(OatQuickMethodHeader);
1459 filler_code = GenNops(filler_size / 4u);
1460 ++method_idx;
1461 AddCompiledMethod(MethodRef(method_idx), ArrayRef<const uint8_t>(filler_code));
1462 }
1463
1464 // Add 2 Baker GC root patches to the last method, one that would allow the thunk at
1465 // 1MiB + kArm64Alignment, i.e. kArm64Alignment after the method call thunk, and the
1466 // second that needs it kArm64Alignment after that. Given the size of the GC root thunk
1467 // is more than the space required by the method call thunk plus kArm64Alignment,
1468 // this pushes the first GC root thunk's pending MaxNextOffset() before the method call
1469 // thunk's pending MaxNextOffset() which needs to be adjusted.
1470 ASSERT_LT(RoundUp(CompileMethodCallThunk().size(), kArm64Alignment) + kArm64Alignment,
1471 CompileBakerGcRootThunk(/* root_reg */ 0).size());
1472 static_assert(kArm64Alignment == 16, "Code below assumes kArm64Alignment == 16");
1473 constexpr size_t kBakerLiteralOffset1 = 4u + kArm64Alignment;
1474 constexpr size_t kBakerLiteralOffset2 = 4u + 2 * kArm64Alignment;
1475 // Use offset = 0, base_reg = 0, the LDR is simply `kLdrWInsn | root_reg`.
1476 const uint32_t ldr1 = kLdrWInsn | /* root_reg */ 1;
1477 const uint32_t ldr2 = kLdrWInsn | /* root_reg */ 2;
1478 const std::vector<uint8_t> last_method_raw_code = RawCode({
1479 kNopInsn, kNopInsn, kNopInsn, kNopInsn, // Padding before first GC root read barrier.
1480 ldr1, kCbnzIP1Plus0Insn, // First GC root LDR with read barrier.
1481 kNopInsn, kNopInsn, // Padding before second GC root read barrier.
1482 ldr2, kCbnzIP1Plus0Insn, // Second GC root LDR with read barrier.
1483 });
Vladimir Markoca1e0382018-04-11 09:58:41 +00001484 uint32_t encoded_data1 = EncodeBakerReadBarrierGcRootData(/* root_reg */ 1);
1485 uint32_t encoded_data2 = EncodeBakerReadBarrierGcRootData(/* root_reg */ 2);
Vladimir Markof4f2daa2017-03-20 18:26:59 +00001486 const LinkerPatch last_method_patches[] = {
1487 LinkerPatch::BakerReadBarrierBranchPatch(kBakerLiteralOffset1, encoded_data1),
1488 LinkerPatch::BakerReadBarrierBranchPatch(kBakerLiteralOffset2, encoded_data2),
1489 };
1490 ++method_idx;
1491 AddCompiledMethod(MethodRef(method_idx),
1492 ArrayRef<const uint8_t>(last_method_raw_code),
1493 ArrayRef<const LinkerPatch>(last_method_patches));
1494
1495 // The main purpose of the test is to check that Link() does not cause a crash.
1496 Link();
1497
1498 ASSERT_EQ(127 * MB, GetMethodOffset(method_idx) - GetMethodOffset(1u));
1499}
1500
Vladimir Marko3f311cf2015-04-02 15:28:45 +01001501} // namespace linker
1502} // namespace art