blob: c9774a7d1bd61b913915170662ba30e84db0a3d2 [file] [log] [blame]
Andreas Gampe525cde22014-04-22 15:44:50 -07001/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
Ian Rogerse63db272014-07-15 15:36:11 -070017#include <cstdio>
18
Mathieu Chartierc7853442015-03-27 14:35:38 -070019#include "art_field-inl.h"
Mathieu Chartiere401d142015-04-22 13:56:20 -070020#include "art_method-inl.h"
Andreas Gampe8228cdf2017-05-30 15:03:54 -070021#include "base/callee_save_type.h"
Andreas Gampe542451c2016-07-26 09:02:02 -070022#include "base/enums.h"
Vladimir Marko3481ba22015-04-13 12:22:36 +010023#include "class_linker-inl.h"
Vladimir Markoacb906d2018-05-30 10:23:49 +010024#include "class_root.h"
Andreas Gampe525cde22014-04-22 15:44:50 -070025#include "common_runtime_test.h"
Andreas Gampe29b38412014-08-13 00:15:43 -070026#include "entrypoints/quick/quick_entrypoints_enum.h"
Andreas Gampe75a7db62016-09-26 12:04:26 -070027#include "imt_conflict_table.h"
Vladimir Markoa3ad0cd2018-05-04 10:06:38 +010028#include "jni/jni_internal.h"
Nicolas Geoffray1004faa2016-03-23 14:28:30 +000029#include "linear_alloc.h"
Andreas Gampe70f5fd02018-10-24 19:58:37 -070030#include "mirror/class-alloc-inl.h"
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -070031#include "mirror/string-inl.h"
Andreas Gampe52ecb652018-10-24 15:18:21 -070032#include "mirror/object_array-alloc-inl.h"
Mathieu Chartier0795f232016-09-27 18:43:30 -070033#include "scoped_thread_state_change-inl.h"
Andreas Gampe525cde22014-04-22 15:44:50 -070034
35namespace art {
36
37
38class StubTest : public CommonRuntimeTest {
39 protected:
40 // We need callee-save methods set up in the Runtime for exceptions.
Roland Levillainbbc6e7e2018-08-24 16:58:47 +010041 void SetUp() override {
Andreas Gampe525cde22014-04-22 15:44:50 -070042 // Do the normal setup.
43 CommonRuntimeTest::SetUp();
44
45 {
46 // Create callee-save methods
47 ScopedObjectAccess soa(Thread::Current());
Vladimir Marko7624d252014-05-02 14:40:15 +010048 runtime_->SetInstructionSet(kRuntimeISA);
Andreas Gampe8228cdf2017-05-30 15:03:54 -070049 for (uint32_t i = 0; i < static_cast<uint32_t>(CalleeSaveType::kLastCalleeSaveType); ++i) {
50 CalleeSaveType type = CalleeSaveType(i);
Andreas Gampe525cde22014-04-22 15:44:50 -070051 if (!runtime_->HasCalleeSaveMethod(type)) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -070052 runtime_->SetCalleeSaveMethod(runtime_->CreateCalleeSaveMethod(), type);
Andreas Gampe525cde22014-04-22 15:44:50 -070053 }
54 }
55 }
56 }
57
Roland Levillainbbc6e7e2018-08-24 16:58:47 +010058 void SetUpRuntimeOptions(RuntimeOptions *options) override {
Andreas Gampe00c1e6d2014-04-25 15:47:13 -070059 // Use a smaller heap
60 for (std::pair<std::string, const void*>& pair : *options) {
61 if (pair.first.find("-Xmx") == 0) {
62 pair.first = "-Xmx4M"; // Smallest we can go.
63 }
64 }
Andreas Gampe51f76352014-05-21 08:28:48 -070065 options->push_back(std::make_pair("-Xint", nullptr));
Andreas Gampe00c1e6d2014-04-25 15:47:13 -070066 }
Andreas Gampe525cde22014-04-22 15:44:50 -070067
Mathieu Chartier119c6bd2014-05-09 14:11:47 -070068 // Helper function needed since TEST_F makes a new class.
69 Thread::tls_ptr_sized_values* GetTlsPtr(Thread* self) {
70 return &self->tlsPtr_;
71 }
72
Andreas Gampe4fc046e2014-05-06 16:56:39 -070073 public:
Andreas Gampe525cde22014-04-22 15:44:50 -070074 size_t Invoke3(size_t arg0, size_t arg1, size_t arg2, uintptr_t code, Thread* self) {
Andreas Gampe6cf80102014-05-19 11:32:41 -070075 return Invoke3WithReferrer(arg0, arg1, arg2, code, self, nullptr);
Andreas Gampe525cde22014-04-22 15:44:50 -070076 }
Andreas Gampe6e4e59c2014-05-05 20:11:02 -070077
Andreas Gampe6e4e59c2014-05-05 20:11:02 -070078 // TODO: Set up a frame according to referrer's specs.
79 size_t Invoke3WithReferrer(size_t arg0, size_t arg1, size_t arg2, uintptr_t code, Thread* self,
Mathieu Chartiere401d142015-04-22 13:56:20 -070080 ArtMethod* referrer) {
Andreas Gampe9537ba22015-10-12 14:29:38 -070081 return Invoke3WithReferrerAndHidden(arg0, arg1, arg2, code, self, referrer, 0);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -070082 }
83
Andreas Gampe51f76352014-05-21 08:28:48 -070084 // TODO: Set up a frame according to referrer's specs.
85 size_t Invoke3WithReferrerAndHidden(size_t arg0, size_t arg1, size_t arg2, uintptr_t code,
Mathieu Chartiere401d142015-04-22 13:56:20 -070086 Thread* self, ArtMethod* referrer, size_t hidden) {
Andreas Gampe51f76352014-05-21 08:28:48 -070087 // Push a transition back into managed code onto the linked list in thread.
88 ManagedStack fragment;
89 self->PushManagedStackFragment(&fragment);
90
91 size_t result;
92 size_t fpr_result = 0;
93#if defined(__i386__)
94 // TODO: Set the thread?
Andreas Gampe9537ba22015-10-12 14:29:38 -070095#define PUSH(reg) "push " # reg "\n\t .cfi_adjust_cfa_offset 4\n\t"
96#define POP(reg) "pop " # reg "\n\t .cfi_adjust_cfa_offset -4\n\t"
Andreas Gampe51f76352014-05-21 08:28:48 -070097 __asm__ __volatile__(
Andreas Gampe9537ba22015-10-12 14:29:38 -070098 "movd %[hidden], %%xmm7\n\t" // This is a memory op, so do this early. If it is off of
99 // esp, then we won't be able to access it after spilling.
100
101 // Spill 6 registers.
102 PUSH(%%ebx)
103 PUSH(%%ecx)
104 PUSH(%%edx)
105 PUSH(%%esi)
106 PUSH(%%edi)
107 PUSH(%%ebp)
108
109 // Store the inputs to the stack, but keep the referrer up top, less work.
110 PUSH(%[referrer]) // Align stack.
111 PUSH(%[referrer]) // Store referrer
112
113 PUSH(%[arg0])
114 PUSH(%[arg1])
115 PUSH(%[arg2])
116 PUSH(%[code])
117 // Now read them back into the required registers.
118 POP(%%edi)
119 POP(%%edx)
120 POP(%%ecx)
121 POP(%%eax)
122 // Call is prepared now.
123
Andreas Gampe51f76352014-05-21 08:28:48 -0700124 "call *%%edi\n\t" // Call the stub
Andreas Gampe9537ba22015-10-12 14:29:38 -0700125 "addl $8, %%esp\n\t" // Pop referrer and padding.
126 ".cfi_adjust_cfa_offset -8\n\t"
127
128 // Restore 6 registers.
129 POP(%%ebp)
130 POP(%%edi)
131 POP(%%esi)
132 POP(%%edx)
133 POP(%%ecx)
134 POP(%%ebx)
135
Andreas Gampe51f76352014-05-21 08:28:48 -0700136 : "=a" (result)
137 // Use the result from eax
Andreas Gampe9537ba22015-10-12 14:29:38 -0700138 : [arg0] "r"(arg0), [arg1] "r"(arg1), [arg2] "r"(arg2), [code] "r"(code),
139 [referrer]"r"(referrer), [hidden]"m"(hidden)
Andreas Gampe2f6e3512014-06-07 01:32:33 -0700140 // This places code into edi, arg0 into eax, arg1 into ecx, and arg2 into edx
Andreas Gampe9537ba22015-10-12 14:29:38 -0700141 : "memory", "xmm7"); // clobber.
142#undef PUSH
143#undef POP
Andreas Gampe51f76352014-05-21 08:28:48 -0700144#elif defined(__arm__)
145 __asm__ __volatile__(
146 "push {r1-r12, lr}\n\t" // Save state, 13*4B = 52B
147 ".cfi_adjust_cfa_offset 52\n\t"
148 "push {r9}\n\t"
149 ".cfi_adjust_cfa_offset 4\n\t"
150 "mov r9, %[referrer]\n\n"
151 "str r9, [sp, #-8]!\n\t" // Push referrer, +8B padding so 16B aligned
152 ".cfi_adjust_cfa_offset 8\n\t"
153 "ldr r9, [sp, #8]\n\t"
154
155 // Push everything on the stack, so we don't rely on the order. What a mess. :-(
156 "sub sp, sp, #24\n\t"
157 "str %[arg0], [sp]\n\t"
158 "str %[arg1], [sp, #4]\n\t"
159 "str %[arg2], [sp, #8]\n\t"
160 "str %[code], [sp, #12]\n\t"
161 "str %[self], [sp, #16]\n\t"
162 "str %[hidden], [sp, #20]\n\t"
163 "ldr r0, [sp]\n\t"
164 "ldr r1, [sp, #4]\n\t"
165 "ldr r2, [sp, #8]\n\t"
166 "ldr r3, [sp, #12]\n\t"
167 "ldr r9, [sp, #16]\n\t"
168 "ldr r12, [sp, #20]\n\t"
169 "add sp, sp, #24\n\t"
170
171 "blx r3\n\t" // Call the stub
Mathieu Chartier2cebb242015-04-21 16:50:40 -0700172 "add sp, sp, #12\n\t" // Pop null and padding
Andreas Gampe51f76352014-05-21 08:28:48 -0700173 ".cfi_adjust_cfa_offset -12\n\t"
174 "pop {r1-r12, lr}\n\t" // Restore state
175 ".cfi_adjust_cfa_offset -52\n\t"
176 "mov %[result], r0\n\t" // Save the result
177 : [result] "=r" (result)
178 // Use the result from r0
Andreas Gampe2f6e3512014-06-07 01:32:33 -0700179 : [arg0] "r"(arg0), [arg1] "r"(arg1), [arg2] "r"(arg2), [code] "r"(code), [self] "r"(self),
180 [referrer] "r"(referrer), [hidden] "r"(hidden)
Andreas Gampeff7b1142015-08-03 10:25:06 -0700181 : "r0", "memory"); // clobber.
Andreas Gampe51f76352014-05-21 08:28:48 -0700182#elif defined(__aarch64__)
183 __asm__ __volatile__(
Andreas Gampef39b3782014-06-03 14:38:30 -0700184 // Spill x0-x7 which we say we don't clobber. May contain args.
Bilyan Borisov970e1912016-02-10 11:51:17 +0000185 "sub sp, sp, #80\n\t"
186 ".cfi_adjust_cfa_offset 80\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700187 "stp x0, x1, [sp]\n\t"
188 "stp x2, x3, [sp, #16]\n\t"
189 "stp x4, x5, [sp, #32]\n\t"
190 "stp x6, x7, [sp, #48]\n\t"
Chih-Hung Hsieh86e68142018-02-02 10:25:55 -0800191 // To be extra defensive, store x20,x21. We do this because some of the stubs might make a
Bilyan Borisov970e1912016-02-10 11:51:17 +0000192 // transition into the runtime via the blr instruction below and *not* save x20.
Chih-Hung Hsieh86e68142018-02-02 10:25:55 -0800193 "stp x20, x21, [sp, #64]\n\t"
Andreas Gampe51f76352014-05-21 08:28:48 -0700194
Andreas Gampef39b3782014-06-03 14:38:30 -0700195 "sub sp, sp, #16\n\t" // Reserve stack space, 16B aligned
196 ".cfi_adjust_cfa_offset 16\n\t"
197 "str %[referrer], [sp]\n\t" // referrer
Andreas Gampe51f76352014-05-21 08:28:48 -0700198
199 // Push everything on the stack, so we don't rely on the order. What a mess. :-(
200 "sub sp, sp, #48\n\t"
201 ".cfi_adjust_cfa_offset 48\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700202 // All things are "r" constraints, so direct str/stp should work.
203 "stp %[arg0], %[arg1], [sp]\n\t"
204 "stp %[arg2], %[code], [sp, #16]\n\t"
205 "stp %[self], %[hidden], [sp, #32]\n\t"
Andreas Gampe51f76352014-05-21 08:28:48 -0700206
207 // Now we definitely have x0-x3 free, use it to garble d8 - d15
208 "movk x0, #0xfad0\n\t"
209 "movk x0, #0xebad, lsl #16\n\t"
210 "movk x0, #0xfad0, lsl #32\n\t"
211 "movk x0, #0xebad, lsl #48\n\t"
212 "fmov d8, x0\n\t"
213 "add x0, x0, 1\n\t"
214 "fmov d9, x0\n\t"
215 "add x0, x0, 1\n\t"
216 "fmov d10, x0\n\t"
217 "add x0, x0, 1\n\t"
218 "fmov d11, x0\n\t"
219 "add x0, x0, 1\n\t"
220 "fmov d12, x0\n\t"
221 "add x0, x0, 1\n\t"
222 "fmov d13, x0\n\t"
223 "add x0, x0, 1\n\t"
224 "fmov d14, x0\n\t"
225 "add x0, x0, 1\n\t"
226 "fmov d15, x0\n\t"
227
Andreas Gampef39b3782014-06-03 14:38:30 -0700228 // Load call params into the right registers.
229 "ldp x0, x1, [sp]\n\t"
230 "ldp x2, x3, [sp, #16]\n\t"
Serban Constantinescu9bd88b02015-04-22 16:24:46 +0100231 "ldp x19, x17, [sp, #32]\n\t"
Andreas Gampe51f76352014-05-21 08:28:48 -0700232 "add sp, sp, #48\n\t"
233 ".cfi_adjust_cfa_offset -48\n\t"
234
Andreas Gampe51f76352014-05-21 08:28:48 -0700235 "blr x3\n\t" // Call the stub
Andreas Gampef39b3782014-06-03 14:38:30 -0700236 "mov x8, x0\n\t" // Store result
237 "add sp, sp, #16\n\t" // Drop the quick "frame"
238 ".cfi_adjust_cfa_offset -16\n\t"
Andreas Gampe51f76352014-05-21 08:28:48 -0700239
240 // Test d8 - d15. We can use x1 and x2.
241 "movk x1, #0xfad0\n\t"
242 "movk x1, #0xebad, lsl #16\n\t"
243 "movk x1, #0xfad0, lsl #32\n\t"
244 "movk x1, #0xebad, lsl #48\n\t"
245 "fmov x2, d8\n\t"
246 "cmp x1, x2\n\t"
247 "b.ne 1f\n\t"
248 "add x1, x1, 1\n\t"
249
250 "fmov x2, d9\n\t"
251 "cmp x1, x2\n\t"
252 "b.ne 1f\n\t"
253 "add x1, x1, 1\n\t"
254
255 "fmov x2, d10\n\t"
256 "cmp x1, x2\n\t"
257 "b.ne 1f\n\t"
258 "add x1, x1, 1\n\t"
259
260 "fmov x2, d11\n\t"
261 "cmp x1, x2\n\t"
262 "b.ne 1f\n\t"
263 "add x1, x1, 1\n\t"
264
265 "fmov x2, d12\n\t"
266 "cmp x1, x2\n\t"
267 "b.ne 1f\n\t"
268 "add x1, x1, 1\n\t"
269
270 "fmov x2, d13\n\t"
271 "cmp x1, x2\n\t"
272 "b.ne 1f\n\t"
273 "add x1, x1, 1\n\t"
274
275 "fmov x2, d14\n\t"
276 "cmp x1, x2\n\t"
277 "b.ne 1f\n\t"
278 "add x1, x1, 1\n\t"
279
280 "fmov x2, d15\n\t"
281 "cmp x1, x2\n\t"
282 "b.ne 1f\n\t"
283
Andreas Gampef39b3782014-06-03 14:38:30 -0700284 "mov x9, #0\n\t" // Use x9 as flag, in clobber list
Andreas Gampe51f76352014-05-21 08:28:48 -0700285
286 // Finish up.
287 "2:\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700288 "ldp x0, x1, [sp]\n\t" // Restore stuff not named clobbered, may contain fpr_result
289 "ldp x2, x3, [sp, #16]\n\t"
290 "ldp x4, x5, [sp, #32]\n\t"
291 "ldp x6, x7, [sp, #48]\n\t"
Chih-Hung Hsieh86e68142018-02-02 10:25:55 -0800292 "ldp x20, x21, [sp, #64]\n\t"
Bilyan Borisov970e1912016-02-10 11:51:17 +0000293 "add sp, sp, #80\n\t" // Free stack space, now sp as on entry
294 ".cfi_adjust_cfa_offset -80\n\t"
Andreas Gampe51f76352014-05-21 08:28:48 -0700295
Andreas Gampef39b3782014-06-03 14:38:30 -0700296 "str x9, %[fpr_result]\n\t" // Store the FPR comparison result
297 "mov %[result], x8\n\t" // Store the call result
298
Andreas Gampe51f76352014-05-21 08:28:48 -0700299 "b 3f\n\t" // Goto end
300
301 // Failed fpr verification.
302 "1:\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700303 "mov x9, #1\n\t"
Andreas Gampe51f76352014-05-21 08:28:48 -0700304 "b 2b\n\t" // Goto finish-up
305
306 // End
307 "3:\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700308 : [result] "=r" (result)
309 // Use the result from r0
Andreas Gampe51f76352014-05-21 08:28:48 -0700310 : [arg0] "0"(arg0), [arg1] "r"(arg1), [arg2] "r"(arg2), [code] "r"(code), [self] "r"(self),
Andreas Gampef39b3782014-06-03 14:38:30 -0700311 [referrer] "r"(referrer), [hidden] "r"(hidden), [fpr_result] "m" (fpr_result)
Yi Kong5c89c572018-10-31 14:10:32 -0700312 // X18 is a reserved register, cannot be clobbered.
Bilyan Borisov970e1912016-02-10 11:51:17 +0000313 // Leave one register unclobbered, which is needed for compiling with
314 // -fstack-protector-strong. According to AAPCS64 registers x9-x15 are caller-saved,
315 // which means we should unclobber one of the callee-saved registers that are unused.
316 // Here we use x20.
Chih-Hung Hsieh86e68142018-02-02 10:25:55 -0800317 // http://b/72613441, Clang 7.0 asks for one more register, so we do not reserve x21.
Yi Kong5c89c572018-10-31 14:10:32 -0700318 : "x8", "x9", "x10", "x11", "x12", "x13", "x14", "x15", "x16", "x17", "x19",
Chih-Hung Hsieh86e68142018-02-02 10:25:55 -0800319 "x22", "x23", "x24", "x25", "x26", "x27", "x28", "x30",
Andreas Gampef39b3782014-06-03 14:38:30 -0700320 "d0", "d1", "d2", "d3", "d4", "d5", "d6", "d7",
321 "d8", "d9", "d10", "d11", "d12", "d13", "d14", "d15",
322 "d16", "d17", "d18", "d19", "d20", "d21", "d22", "d23",
Andreas Gampe2f6e3512014-06-07 01:32:33 -0700323 "d24", "d25", "d26", "d27", "d28", "d29", "d30", "d31",
Bilyan Borisov970e1912016-02-10 11:51:17 +0000324 "memory");
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200325#elif defined(__mips__) && !defined(__LP64__)
326 __asm__ __volatile__ (
327 // Spill a0-a3 and t0-t7 which we say we don't clobber. May contain args.
328 "addiu $sp, $sp, -64\n\t"
329 "sw $a0, 0($sp)\n\t"
330 "sw $a1, 4($sp)\n\t"
331 "sw $a2, 8($sp)\n\t"
332 "sw $a3, 12($sp)\n\t"
333 "sw $t0, 16($sp)\n\t"
334 "sw $t1, 20($sp)\n\t"
335 "sw $t2, 24($sp)\n\t"
336 "sw $t3, 28($sp)\n\t"
337 "sw $t4, 32($sp)\n\t"
338 "sw $t5, 36($sp)\n\t"
339 "sw $t6, 40($sp)\n\t"
340 "sw $t7, 44($sp)\n\t"
341 // Spill gp register since it is caller save.
342 "sw $gp, 52($sp)\n\t"
343
344 "addiu $sp, $sp, -16\n\t" // Reserve stack space, 16B aligned.
345 "sw %[referrer], 0($sp)\n\t"
346
347 // Push everything on the stack, so we don't rely on the order.
348 "addiu $sp, $sp, -24\n\t"
349 "sw %[arg0], 0($sp)\n\t"
350 "sw %[arg1], 4($sp)\n\t"
351 "sw %[arg2], 8($sp)\n\t"
352 "sw %[code], 12($sp)\n\t"
353 "sw %[self], 16($sp)\n\t"
354 "sw %[hidden], 20($sp)\n\t"
355
356 // Load call params into the right registers.
357 "lw $a0, 0($sp)\n\t"
358 "lw $a1, 4($sp)\n\t"
359 "lw $a2, 8($sp)\n\t"
360 "lw $t9, 12($sp)\n\t"
361 "lw $s1, 16($sp)\n\t"
Alexey Frunze1b8464d2016-11-12 17:22:05 -0800362 "lw $t7, 20($sp)\n\t"
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200363 "addiu $sp, $sp, 24\n\t"
364
365 "jalr $t9\n\t" // Call the stub.
366 "nop\n\t"
367 "addiu $sp, $sp, 16\n\t" // Drop the quick "frame".
368
369 // Restore stuff not named clobbered.
370 "lw $a0, 0($sp)\n\t"
371 "lw $a1, 4($sp)\n\t"
372 "lw $a2, 8($sp)\n\t"
373 "lw $a3, 12($sp)\n\t"
374 "lw $t0, 16($sp)\n\t"
375 "lw $t1, 20($sp)\n\t"
376 "lw $t2, 24($sp)\n\t"
377 "lw $t3, 28($sp)\n\t"
378 "lw $t4, 32($sp)\n\t"
379 "lw $t5, 36($sp)\n\t"
380 "lw $t6, 40($sp)\n\t"
381 "lw $t7, 44($sp)\n\t"
382 // Restore gp.
383 "lw $gp, 52($sp)\n\t"
384 "addiu $sp, $sp, 64\n\t" // Free stack space, now sp as on entry.
385
386 "move %[result], $v0\n\t" // Store the call result.
387 : [result] "=r" (result)
388 : [arg0] "r"(arg0), [arg1] "r"(arg1), [arg2] "r"(arg2), [code] "r"(code), [self] "r"(self),
389 [referrer] "r"(referrer), [hidden] "r"(hidden)
390 : "at", "v0", "v1", "s0", "s1", "s2", "s3", "s4", "s5", "s6", "s7", "t8", "t9", "k0", "k1",
391 "fp", "ra",
Nicolas Geoffrayc5b4b322015-09-15 16:36:50 +0100392 "$f0", "$f1", "$f2", "$f3", "$f4", "$f5", "$f6", "$f7", "$f8", "$f9", "$f10", "$f11",
393 "$f12", "$f13", "$f14", "$f15", "$f16", "$f17", "$f18", "$f19", "$f20", "$f21", "$f22",
394 "$f23", "$f24", "$f25", "$f26", "$f27", "$f28", "$f29", "$f30", "$f31",
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200395 "memory"); // clobber.
396#elif defined(__mips__) && defined(__LP64__)
397 __asm__ __volatile__ (
Goran Jakovljevic745f3cd2016-01-15 14:08:47 +0100398 // Spill a0-a7 which we say we don't clobber. May contain args.
399 "daddiu $sp, $sp, -64\n\t"
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200400 "sd $a0, 0($sp)\n\t"
401 "sd $a1, 8($sp)\n\t"
402 "sd $a2, 16($sp)\n\t"
403 "sd $a3, 24($sp)\n\t"
404 "sd $a4, 32($sp)\n\t"
405 "sd $a5, 40($sp)\n\t"
406 "sd $a6, 48($sp)\n\t"
407 "sd $a7, 56($sp)\n\t"
408
409 "daddiu $sp, $sp, -16\n\t" // Reserve stack space, 16B aligned.
410 "sd %[referrer], 0($sp)\n\t"
411
412 // Push everything on the stack, so we don't rely on the order.
413 "daddiu $sp, $sp, -48\n\t"
414 "sd %[arg0], 0($sp)\n\t"
415 "sd %[arg1], 8($sp)\n\t"
416 "sd %[arg2], 16($sp)\n\t"
417 "sd %[code], 24($sp)\n\t"
418 "sd %[self], 32($sp)\n\t"
419 "sd %[hidden], 40($sp)\n\t"
420
421 // Load call params into the right registers.
422 "ld $a0, 0($sp)\n\t"
423 "ld $a1, 8($sp)\n\t"
424 "ld $a2, 16($sp)\n\t"
425 "ld $t9, 24($sp)\n\t"
426 "ld $s1, 32($sp)\n\t"
427 "ld $t0, 40($sp)\n\t"
428 "daddiu $sp, $sp, 48\n\t"
429
430 "jalr $t9\n\t" // Call the stub.
431 "nop\n\t"
432 "daddiu $sp, $sp, 16\n\t" // Drop the quick "frame".
433
434 // Restore stuff not named clobbered.
435 "ld $a0, 0($sp)\n\t"
436 "ld $a1, 8($sp)\n\t"
437 "ld $a2, 16($sp)\n\t"
438 "ld $a3, 24($sp)\n\t"
439 "ld $a4, 32($sp)\n\t"
440 "ld $a5, 40($sp)\n\t"
441 "ld $a6, 48($sp)\n\t"
442 "ld $a7, 56($sp)\n\t"
Goran Jakovljevic745f3cd2016-01-15 14:08:47 +0100443 "daddiu $sp, $sp, 64\n\t"
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200444
445 "move %[result], $v0\n\t" // Store the call result.
446 : [result] "=r" (result)
447 : [arg0] "r"(arg0), [arg1] "r"(arg1), [arg2] "r"(arg2), [code] "r"(code), [self] "r"(self),
448 [referrer] "r"(referrer), [hidden] "r"(hidden)
Goran Jakovljevic745f3cd2016-01-15 14:08:47 +0100449 // Instead aliases t0-t3, register names $12-$15 has been used in the clobber list because
450 // t0-t3 are ambiguous.
451 : "at", "v0", "v1", "$12", "$13", "$14", "$15", "s0", "s1", "s2", "s3", "s4", "s5", "s6",
452 "s7", "t8", "t9", "k0", "k1", "fp", "ra",
Goran Jakovljevic4d44e532015-11-27 11:20:20 +0100453 "$f0", "$f1", "$f2", "$f3", "$f4", "$f5", "$f6", "$f7", "$f8", "$f9", "$f10", "$f11",
454 "$f12", "$f13", "$f14", "$f15", "$f16", "$f17", "$f18", "$f19", "$f20", "$f21", "$f22",
455 "$f23", "$f24", "$f25", "$f26", "$f27", "$f28", "$f29", "$f30", "$f31",
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200456 "memory"); // clobber.
Andreas Gampe9537ba22015-10-12 14:29:38 -0700457#elif defined(__x86_64__) && !defined(__APPLE__)
458#define PUSH(reg) "pushq " # reg "\n\t .cfi_adjust_cfa_offset 8\n\t"
459#define POP(reg) "popq " # reg "\n\t .cfi_adjust_cfa_offset -8\n\t"
460 // Note: Uses the native convention. We do a callee-save regimen by manually spilling and
461 // restoring almost all registers.
Andreas Gampe51f76352014-05-21 08:28:48 -0700462 // TODO: Set the thread?
463 __asm__ __volatile__(
Andreas Gampe9537ba22015-10-12 14:29:38 -0700464 // Spill almost everything (except rax, rsp). 14 registers.
465 PUSH(%%rbx)
466 PUSH(%%rcx)
467 PUSH(%%rdx)
468 PUSH(%%rsi)
469 PUSH(%%rdi)
470 PUSH(%%rbp)
471 PUSH(%%r8)
472 PUSH(%%r9)
473 PUSH(%%r10)
474 PUSH(%%r11)
475 PUSH(%%r12)
476 PUSH(%%r13)
477 PUSH(%%r14)
478 PUSH(%%r15)
479
480 PUSH(%[referrer]) // Push referrer & 16B alignment padding
481 PUSH(%[referrer])
482
483 // Now juggle the input registers.
484 PUSH(%[arg0])
485 PUSH(%[arg1])
486 PUSH(%[arg2])
487 PUSH(%[hidden])
488 PUSH(%[code])
489 POP(%%r8)
490 POP(%%rax)
491 POP(%%rdx)
492 POP(%%rsi)
493 POP(%%rdi)
494
495 "call *%%r8\n\t" // Call the stub
496 "addq $16, %%rsp\n\t" // Pop null and padding
Andreas Gampe51f76352014-05-21 08:28:48 -0700497 ".cfi_adjust_cfa_offset -16\n\t"
Andreas Gampe9537ba22015-10-12 14:29:38 -0700498
499 POP(%%r15)
500 POP(%%r14)
501 POP(%%r13)
502 POP(%%r12)
503 POP(%%r11)
504 POP(%%r10)
505 POP(%%r9)
506 POP(%%r8)
507 POP(%%rbp)
508 POP(%%rdi)
509 POP(%%rsi)
510 POP(%%rdx)
511 POP(%%rcx)
512 POP(%%rbx)
513
Andreas Gampe51f76352014-05-21 08:28:48 -0700514 : "=a" (result)
515 // Use the result from rax
Andreas Gampe9537ba22015-10-12 14:29:38 -0700516 : [arg0] "r"(arg0), [arg1] "r"(arg1), [arg2] "r"(arg2), [code] "r"(code),
517 [referrer] "r"(referrer), [hidden] "r"(hidden)
518 // This places arg0 into rdi, arg1 into rsi, arg2 into rdx, and code into some other
519 // register. We can't use "b" (rbx), as ASAN uses this for the frame pointer.
520 : "memory"); // We spill and restore (almost) all registers, so only mention memory here.
521#undef PUSH
522#undef POP
Andreas Gampe51f76352014-05-21 08:28:48 -0700523#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -0800524 UNUSED(arg0, arg1, arg2, code, referrer, hidden);
Andreas Gampe51f76352014-05-21 08:28:48 -0700525 LOG(WARNING) << "Was asked to invoke for an architecture I do not understand.";
526 result = 0;
527#endif
528 // Pop transition.
529 self->PopManagedStackFragment(fragment);
530
531 fp_result = fpr_result;
532 EXPECT_EQ(0U, fp_result);
533
534 return result;
535 }
536
Andreas Gampe29b38412014-08-13 00:15:43 -0700537 static uintptr_t GetEntrypoint(Thread* self, QuickEntrypointEnum entrypoint) {
538 int32_t offset;
Andreas Gampe542451c2016-07-26 09:02:02 -0700539 offset = GetThreadOffset<kRuntimePointerSize>(entrypoint).Int32Value();
Andreas Gampe29b38412014-08-13 00:15:43 -0700540 return *reinterpret_cast<uintptr_t*>(reinterpret_cast<uint8_t*>(self) + offset);
541 }
542
Andreas Gampe6cf80102014-05-19 11:32:41 -0700543 protected:
544 size_t fp_result;
Andreas Gampe525cde22014-04-22 15:44:50 -0700545};
546
547
Andreas Gampe525cde22014-04-22 15:44:50 -0700548TEST_F(StubTest, Memcpy) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200549#if defined(__i386__) || (defined(__x86_64__) && !defined(__APPLE__)) || defined(__mips__)
Andreas Gampe525cde22014-04-22 15:44:50 -0700550 Thread* self = Thread::Current();
551
552 uint32_t orig[20];
553 uint32_t trg[20];
554 for (size_t i = 0; i < 20; ++i) {
555 orig[i] = i;
556 trg[i] = 0;
557 }
558
559 Invoke3(reinterpret_cast<size_t>(&trg[4]), reinterpret_cast<size_t>(&orig[4]),
Andreas Gampe29b38412014-08-13 00:15:43 -0700560 10 * sizeof(uint32_t), StubTest::GetEntrypoint(self, kQuickMemcpy), self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700561
562 EXPECT_EQ(orig[0], trg[0]);
563
564 for (size_t i = 1; i < 4; ++i) {
565 EXPECT_NE(orig[i], trg[i]);
566 }
567
568 for (size_t i = 4; i < 14; ++i) {
569 EXPECT_EQ(orig[i], trg[i]);
570 }
571
572 for (size_t i = 14; i < 20; ++i) {
573 EXPECT_NE(orig[i], trg[i]);
574 }
575
576 // TODO: Test overlapping?
577
578#else
579 LOG(INFO) << "Skipping memcpy as I don't know how to do that on " << kRuntimeISA;
580 // Force-print to std::cout so it's also outside the logcat.
581 std::cout << "Skipping memcpy as I don't know how to do that on " << kRuntimeISA << std::endl;
582#endif
583}
584
Andreas Gampe525cde22014-04-22 15:44:50 -0700585TEST_F(StubTest, LockObject) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200586#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
587 (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -0700588 static constexpr size_t kThinLockLoops = 100;
589
Andreas Gampe525cde22014-04-22 15:44:50 -0700590 Thread* self = Thread::Current();
Andreas Gampe29b38412014-08-13 00:15:43 -0700591
592 const uintptr_t art_quick_lock_object = StubTest::GetEntrypoint(self, kQuickLockObject);
593
Andreas Gampe525cde22014-04-22 15:44:50 -0700594 // Create an object
595 ScopedObjectAccess soa(self);
596 // garbage is created during ClassLinker::Init
597
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700598 StackHandleScope<2> hs(soa.Self());
599 Handle<mirror::String> obj(
600 hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), "hello, world!")));
Andreas Gampe525cde22014-04-22 15:44:50 -0700601 LockWord lock = obj->GetLockWord(false);
602 LockWord::LockState old_state = lock.GetState();
603 EXPECT_EQ(LockWord::LockState::kUnlocked, old_state);
604
Andreas Gampe29b38412014-08-13 00:15:43 -0700605 Invoke3(reinterpret_cast<size_t>(obj.Get()), 0U, 0U, art_quick_lock_object, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700606
607 LockWord lock_after = obj->GetLockWord(false);
608 LockWord::LockState new_state = lock_after.GetState();
609 EXPECT_EQ(LockWord::LockState::kThinLocked, new_state);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700610 EXPECT_EQ(lock_after.ThinLockCount(), 0U); // Thin lock starts count at zero
611
612 for (size_t i = 1; i < kThinLockLoops; ++i) {
Andreas Gampe29b38412014-08-13 00:15:43 -0700613 Invoke3(reinterpret_cast<size_t>(obj.Get()), 0U, 0U, art_quick_lock_object, self);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700614
615 // Check we're at lock count i
616
617 LockWord l_inc = obj->GetLockWord(false);
618 LockWord::LockState l_inc_state = l_inc.GetState();
619 EXPECT_EQ(LockWord::LockState::kThinLocked, l_inc_state);
620 EXPECT_EQ(l_inc.ThinLockCount(), i);
621 }
622
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700623 // Force a fat lock by running identity hashcode to fill up lock word.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700624 Handle<mirror::String> obj2(hs.NewHandle(
625 mirror::String::AllocFromModifiedUtf8(soa.Self(), "hello, world!")));
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700626
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700627 obj2->IdentityHashCode();
628
Andreas Gampe29b38412014-08-13 00:15:43 -0700629 Invoke3(reinterpret_cast<size_t>(obj2.Get()), 0U, 0U, art_quick_lock_object, self);
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700630
631 LockWord lock_after2 = obj2->GetLockWord(false);
632 LockWord::LockState new_state2 = lock_after2.GetState();
633 EXPECT_EQ(LockWord::LockState::kFatLocked, new_state2);
634 EXPECT_NE(lock_after2.FatLockMonitor(), static_cast<Monitor*>(nullptr));
635
636 // Test done.
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700637#else
638 LOG(INFO) << "Skipping lock_object as I don't know how to do that on " << kRuntimeISA;
639 // Force-print to std::cout so it's also outside the logcat.
640 std::cout << "Skipping lock_object as I don't know how to do that on " << kRuntimeISA << std::endl;
641#endif
642}
643
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700644
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700645class RandGen {
646 public:
647 explicit RandGen(uint32_t seed) : val_(seed) {}
648
649 uint32_t next() {
650 val_ = val_ * 48271 % 2147483647 + 13;
651 return val_;
652 }
653
654 uint32_t val_;
655};
656
657
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700658// NO_THREAD_SAFETY_ANALYSIS as we do not want to grab exclusive mutator lock for MonitorInfo.
659static void TestUnlockObject(StubTest* test) NO_THREAD_SAFETY_ANALYSIS {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200660#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
661 (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -0700662 static constexpr size_t kThinLockLoops = 100;
663
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700664 Thread* self = Thread::Current();
Andreas Gampe29b38412014-08-13 00:15:43 -0700665
666 const uintptr_t art_quick_lock_object = StubTest::GetEntrypoint(self, kQuickLockObject);
667 const uintptr_t art_quick_unlock_object = StubTest::GetEntrypoint(self, kQuickUnlockObject);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700668 // Create an object
669 ScopedObjectAccess soa(self);
670 // garbage is created during ClassLinker::Init
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700671 static constexpr size_t kNumberOfLocks = 10; // Number of objects = lock
672 StackHandleScope<kNumberOfLocks + 1> hs(self);
673 Handle<mirror::String> obj(
674 hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), "hello, world!")));
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700675 LockWord lock = obj->GetLockWord(false);
676 LockWord::LockState old_state = lock.GetState();
677 EXPECT_EQ(LockWord::LockState::kUnlocked, old_state);
678
Andreas Gampe29b38412014-08-13 00:15:43 -0700679 test->Invoke3(reinterpret_cast<size_t>(obj.Get()), 0U, 0U, art_quick_unlock_object, self);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700680 // This should be an illegal monitor state.
681 EXPECT_TRUE(self->IsExceptionPending());
682 self->ClearException();
683
684 LockWord lock_after = obj->GetLockWord(false);
685 LockWord::LockState new_state = lock_after.GetState();
686 EXPECT_EQ(LockWord::LockState::kUnlocked, new_state);
Andreas Gampe525cde22014-04-22 15:44:50 -0700687
Andreas Gampe29b38412014-08-13 00:15:43 -0700688 test->Invoke3(reinterpret_cast<size_t>(obj.Get()), 0U, 0U, art_quick_lock_object, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700689
690 LockWord lock_after2 = obj->GetLockWord(false);
691 LockWord::LockState new_state2 = lock_after2.GetState();
692 EXPECT_EQ(LockWord::LockState::kThinLocked, new_state2);
693
Andreas Gampe29b38412014-08-13 00:15:43 -0700694 test->Invoke3(reinterpret_cast<size_t>(obj.Get()), 0U, 0U, art_quick_unlock_object, self);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700695
696 LockWord lock_after3 = obj->GetLockWord(false);
697 LockWord::LockState new_state3 = lock_after3.GetState();
698 EXPECT_EQ(LockWord::LockState::kUnlocked, new_state3);
699
700 // Stress test:
701 // Keep a number of objects and their locks in flight. Randomly lock or unlock one of them in
702 // each step.
703
704 RandGen r(0x1234);
705
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700706 constexpr size_t kIterations = 10000; // Number of iterations
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700707 constexpr size_t kMoveToFat = 1000; // Chance of 1:kMoveFat to make a lock fat.
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700708
709 size_t counts[kNumberOfLocks];
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700710 bool fat[kNumberOfLocks]; // Whether a lock should be thin or fat.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700711 Handle<mirror::String> objects[kNumberOfLocks];
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700712
713 // Initialize = allocate.
714 for (size_t i = 0; i < kNumberOfLocks; ++i) {
715 counts[i] = 0;
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700716 fat[i] = false;
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700717 objects[i] = hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), ""));
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700718 }
719
720 for (size_t i = 0; i < kIterations; ++i) {
721 // Select which lock to update.
722 size_t index = r.next() % kNumberOfLocks;
723
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700724 // Make lock fat?
725 if (!fat[index] && (r.next() % kMoveToFat == 0)) {
726 fat[index] = true;
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700727 objects[index]->IdentityHashCode();
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700728
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700729 LockWord lock_iter = objects[index]->GetLockWord(false);
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700730 LockWord::LockState iter_state = lock_iter.GetState();
731 if (counts[index] == 0) {
732 EXPECT_EQ(LockWord::LockState::kHashCode, iter_state);
733 } else {
734 EXPECT_EQ(LockWord::LockState::kFatLocked, iter_state);
735 }
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700736 } else {
Andreas Gampe277ccbd2014-11-03 21:36:10 -0800737 bool take_lock; // Whether to lock or unlock in this step.
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700738 if (counts[index] == 0) {
Andreas Gampe277ccbd2014-11-03 21:36:10 -0800739 take_lock = true;
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700740 } else if (counts[index] == kThinLockLoops) {
Andreas Gampe277ccbd2014-11-03 21:36:10 -0800741 take_lock = false;
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700742 } else {
743 // Randomly.
Andreas Gampe277ccbd2014-11-03 21:36:10 -0800744 take_lock = r.next() % 2 == 0;
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700745 }
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700746
Andreas Gampe277ccbd2014-11-03 21:36:10 -0800747 if (take_lock) {
Andreas Gampe29b38412014-08-13 00:15:43 -0700748 test->Invoke3(reinterpret_cast<size_t>(objects[index].Get()), 0U, 0U, art_quick_lock_object,
749 self);
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700750 counts[index]++;
751 } else {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700752 test->Invoke3(reinterpret_cast<size_t>(objects[index].Get()), 0U, 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -0700753 art_quick_unlock_object, self);
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700754 counts[index]--;
755 }
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700756
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700757 EXPECT_FALSE(self->IsExceptionPending());
758
759 // Check the new state.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700760 LockWord lock_iter = objects[index]->GetLockWord(true);
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700761 LockWord::LockState iter_state = lock_iter.GetState();
762 if (fat[index]) {
763 // Abuse MonitorInfo.
764 EXPECT_EQ(LockWord::LockState::kFatLocked, iter_state) << index;
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700765 MonitorInfo info(objects[index].Get());
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700766 EXPECT_EQ(counts[index], info.entry_count_) << index;
767 } else {
768 if (counts[index] > 0) {
769 EXPECT_EQ(LockWord::LockState::kThinLocked, iter_state);
770 EXPECT_EQ(counts[index] - 1, lock_iter.ThinLockCount());
771 } else {
772 EXPECT_EQ(LockWord::LockState::kUnlocked, iter_state);
773 }
774 }
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700775 }
776 }
777
778 // Unlock the remaining count times and then check it's unlocked. Then deallocate.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700779 // Go reverse order to correctly handle Handles.
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700780 for (size_t i = 0; i < kNumberOfLocks; ++i) {
781 size_t index = kNumberOfLocks - 1 - i;
782 size_t count = counts[index];
783 while (count > 0) {
Andreas Gampe29b38412014-08-13 00:15:43 -0700784 test->Invoke3(reinterpret_cast<size_t>(objects[index].Get()), 0U, 0U, art_quick_unlock_object,
785 self);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700786 count--;
787 }
788
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700789 LockWord lock_after4 = objects[index]->GetLockWord(false);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700790 LockWord::LockState new_state4 = lock_after4.GetState();
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700791 EXPECT_TRUE(LockWord::LockState::kUnlocked == new_state4
792 || LockWord::LockState::kFatLocked == new_state4);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700793 }
794
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700795 // Test done.
Andreas Gampe525cde22014-04-22 15:44:50 -0700796#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -0800797 UNUSED(test);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700798 LOG(INFO) << "Skipping unlock_object as I don't know how to do that on " << kRuntimeISA;
Andreas Gampe525cde22014-04-22 15:44:50 -0700799 // Force-print to std::cout so it's also outside the logcat.
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700800 std::cout << "Skipping unlock_object as I don't know how to do that on " << kRuntimeISA << std::endl;
Andreas Gampe525cde22014-04-22 15:44:50 -0700801#endif
802}
803
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700804TEST_F(StubTest, UnlockObject) {
Andreas Gampe369810a2015-01-14 19:53:31 -0800805 // This will lead to monitor error messages in the log.
806 ScopedLogSeverity sls(LogSeverity::FATAL);
807
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700808 TestUnlockObject(this);
809}
Andreas Gampe525cde22014-04-22 15:44:50 -0700810
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200811#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
812 (defined(__x86_64__) && !defined(__APPLE__))
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800813extern "C" void art_quick_check_instance_of(void);
Andreas Gampe525cde22014-04-22 15:44:50 -0700814#endif
815
816TEST_F(StubTest, CheckCast) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200817#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
818 (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe525cde22014-04-22 15:44:50 -0700819 Thread* self = Thread::Current();
Andreas Gampe29b38412014-08-13 00:15:43 -0700820
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800821 const uintptr_t art_quick_check_instance_of =
822 StubTest::GetEntrypoint(self, kQuickCheckInstanceOf);
Andreas Gampe29b38412014-08-13 00:15:43 -0700823
Andreas Gampe525cde22014-04-22 15:44:50 -0700824 // Find some classes.
825 ScopedObjectAccess soa(self);
826 // garbage is created during ClassLinker::Init
827
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800828 VariableSizedHandleScope hs(soa.Self());
829 Handle<mirror::Class> klass_obj(
830 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "Ljava/lang/Object;")));
831 Handle<mirror::Class> klass_str(
832 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "Ljava/lang/String;")));
833 Handle<mirror::Class> klass_list(
834 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "Ljava/util/List;")));
835 Handle<mirror::Class> klass_cloneable(
836 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "Ljava/lang/Cloneable;")));
837 Handle<mirror::Class> klass_array_list(
838 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "Ljava/util/ArrayList;")));
839 Handle<mirror::Object> obj(hs.NewHandle(klass_obj->AllocObject(soa.Self())));
840 Handle<mirror::String> string(hs.NewHandle(
841 mirror::String::AllocFromModifiedUtf8(soa.Self(), "ABCD")));
842 Handle<mirror::Object> array_list(hs.NewHandle(klass_array_list->AllocObject(soa.Self())));
Andreas Gampe525cde22014-04-22 15:44:50 -0700843
844 EXPECT_FALSE(self->IsExceptionPending());
845
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800846 Invoke3(reinterpret_cast<size_t>(obj.Get()),
847 reinterpret_cast<size_t>(klass_obj.Get()),
Mathieu Chartier2ecfd272016-11-01 10:10:05 -0700848 0U,
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800849 art_quick_check_instance_of,
Mathieu Chartier2ecfd272016-11-01 10:10:05 -0700850 self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700851 EXPECT_FALSE(self->IsExceptionPending());
852
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800853 // Expected true: Test string instance of java.lang.String.
854 Invoke3(reinterpret_cast<size_t>(string.Get()),
855 reinterpret_cast<size_t>(klass_str.Get()),
Mathieu Chartier2ecfd272016-11-01 10:10:05 -0700856 0U,
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800857 art_quick_check_instance_of,
Mathieu Chartier2ecfd272016-11-01 10:10:05 -0700858 self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700859 EXPECT_FALSE(self->IsExceptionPending());
860
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800861 // Expected true: Test string instance of java.lang.Object.
862 Invoke3(reinterpret_cast<size_t>(string.Get()),
863 reinterpret_cast<size_t>(klass_obj.Get()),
Mathieu Chartier2ecfd272016-11-01 10:10:05 -0700864 0U,
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800865 art_quick_check_instance_of,
Mathieu Chartier2ecfd272016-11-01 10:10:05 -0700866 self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700867 EXPECT_FALSE(self->IsExceptionPending());
868
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800869 // Expected false: Test object instance of java.lang.String.
870 Invoke3(reinterpret_cast<size_t>(obj.Get()),
871 reinterpret_cast<size_t>(klass_str.Get()),
Mathieu Chartier2ecfd272016-11-01 10:10:05 -0700872 0U,
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800873 art_quick_check_instance_of,
Mathieu Chartier2ecfd272016-11-01 10:10:05 -0700874 self);
875 EXPECT_TRUE(self->IsExceptionPending());
876 self->ClearException();
877
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800878 Invoke3(reinterpret_cast<size_t>(array_list.Get()),
879 reinterpret_cast<size_t>(klass_list.Get()),
Mathieu Chartier2ecfd272016-11-01 10:10:05 -0700880 0U,
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800881 art_quick_check_instance_of,
882 self);
883 EXPECT_FALSE(self->IsExceptionPending());
884
885 Invoke3(reinterpret_cast<size_t>(array_list.Get()),
886 reinterpret_cast<size_t>(klass_cloneable.Get()),
887 0U,
888 art_quick_check_instance_of,
889 self);
890 EXPECT_FALSE(self->IsExceptionPending());
891
892 Invoke3(reinterpret_cast<size_t>(string.Get()),
893 reinterpret_cast<size_t>(klass_array_list.Get()),
894 0U,
895 art_quick_check_instance_of,
896 self);
897 EXPECT_TRUE(self->IsExceptionPending());
898 self->ClearException();
899
900 Invoke3(reinterpret_cast<size_t>(string.Get()),
901 reinterpret_cast<size_t>(klass_cloneable.Get()),
902 0U,
903 art_quick_check_instance_of,
Mathieu Chartier2ecfd272016-11-01 10:10:05 -0700904 self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700905 EXPECT_TRUE(self->IsExceptionPending());
906 self->ClearException();
907
908#else
909 LOG(INFO) << "Skipping check_cast as I don't know how to do that on " << kRuntimeISA;
910 // Force-print to std::cout so it's also outside the logcat.
911 std::cout << "Skipping check_cast as I don't know how to do that on " << kRuntimeISA << std::endl;
912#endif
913}
914
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700915TEST_F(StubTest, AllocObject) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200916#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
917 (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe369810a2015-01-14 19:53:31 -0800918 // This will lead to OOM error messages in the log.
919 ScopedLogSeverity sls(LogSeverity::FATAL);
920
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700921 // TODO: Check the "Unresolved" allocation stubs
922
923 Thread* self = Thread::Current();
924 // Create an object
925 ScopedObjectAccess soa(self);
926 // garbage is created during ClassLinker::Init
927
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700928 StackHandleScope<2> hs(soa.Self());
929 Handle<mirror::Class> c(
930 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "Ljava/lang/Object;")));
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700931
932 // Play with it...
933
934 EXPECT_FALSE(self->IsExceptionPending());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700935 {
Nicolas Geoffray0d3998b2017-01-12 15:35:12 +0000936 size_t result = Invoke3(reinterpret_cast<size_t>(c.Get()), 0u, 0U,
937 StubTest::GetEntrypoint(self, kQuickAllocObjectWithChecks),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700938 self);
939
940 EXPECT_FALSE(self->IsExceptionPending());
941 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
942 mirror::Object* obj = reinterpret_cast<mirror::Object*>(result);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700943 EXPECT_EQ(c.Get(), obj->GetClass());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700944 VerifyObject(obj);
945 }
946
947 {
Mathieu Chartiere401d142015-04-22 13:56:20 -0700948 size_t result = Invoke3(reinterpret_cast<size_t>(c.Get()), 0u, 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -0700949 StubTest::GetEntrypoint(self, kQuickAllocObjectResolved),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700950 self);
951
952 EXPECT_FALSE(self->IsExceptionPending());
953 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
954 mirror::Object* obj = reinterpret_cast<mirror::Object*>(result);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700955 EXPECT_EQ(c.Get(), obj->GetClass());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700956 VerifyObject(obj);
957 }
958
959 {
Mathieu Chartiere401d142015-04-22 13:56:20 -0700960 size_t result = Invoke3(reinterpret_cast<size_t>(c.Get()), 0u, 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -0700961 StubTest::GetEntrypoint(self, kQuickAllocObjectInitialized),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700962 self);
963
964 EXPECT_FALSE(self->IsExceptionPending());
965 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
966 mirror::Object* obj = reinterpret_cast<mirror::Object*>(result);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700967 EXPECT_EQ(c.Get(), obj->GetClass());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700968 VerifyObject(obj);
969 }
970
971 // Failure tests.
972
973 // Out-of-memory.
974 {
975 Runtime::Current()->GetHeap()->SetIdealFootprint(1 * GB);
976
977 // Array helps to fill memory faster.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700978 Handle<mirror::Class> ca(
979 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "[Ljava/lang/Object;")));
980
981 // Use arbitrary large amount for now.
982 static const size_t kMaxHandles = 1000000;
Ian Rogers700a4022014-05-19 16:49:03 -0700983 std::unique_ptr<StackHandleScope<kMaxHandles>> hsp(new StackHandleScope<kMaxHandles>(self));
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700984
985 std::vector<Handle<mirror::Object>> handles;
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700986 // Start allocating with 128K
987 size_t length = 128 * KB / 4;
988 while (length > 10) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700989 Handle<mirror::Object> h(hsp->NewHandle<mirror::Object>(
990 mirror::ObjectArray<mirror::Object>::Alloc(soa.Self(), ca.Get(), length / 4)));
Andreas Gampefa4333d2017-02-14 11:10:34 -0800991 if (self->IsExceptionPending() || h == nullptr) {
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700992 self->ClearException();
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700993
994 // Try a smaller length
995 length = length / 8;
996 // Use at most half the reported free space.
997 size_t mem = Runtime::Current()->GetHeap()->GetFreeMemory();
998 if (length * 8 > mem) {
999 length = mem / 8;
1000 }
1001 } else {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001002 handles.push_back(h);
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001003 }
1004 }
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001005 LOG(INFO) << "Used " << handles.size() << " arrays to fill space.";
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001006
1007 // Allocate simple objects till it fails.
1008 while (!self->IsExceptionPending()) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001009 Handle<mirror::Object> h = hsp->NewHandle(c->AllocObject(soa.Self()));
Andreas Gampefa4333d2017-02-14 11:10:34 -08001010 if (!self->IsExceptionPending() && h != nullptr) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001011 handles.push_back(h);
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001012 }
1013 }
1014 self->ClearException();
1015
Mathieu Chartiere401d142015-04-22 13:56:20 -07001016 size_t result = Invoke3(reinterpret_cast<size_t>(c.Get()), 0u, 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001017 StubTest::GetEntrypoint(self, kQuickAllocObjectInitialized),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001018 self);
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001019 EXPECT_TRUE(self->IsExceptionPending());
1020 self->ClearException();
1021 EXPECT_EQ(reinterpret_cast<size_t>(nullptr), result);
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001022 }
1023
1024 // Tests done.
1025#else
1026 LOG(INFO) << "Skipping alloc_object as I don't know how to do that on " << kRuntimeISA;
1027 // Force-print to std::cout so it's also outside the logcat.
1028 std::cout << "Skipping alloc_object as I don't know how to do that on " << kRuntimeISA << std::endl;
1029#endif
1030}
1031
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001032TEST_F(StubTest, AllocObjectArray) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001033#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1034 (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001035 // TODO: Check the "Unresolved" allocation stubs
1036
Andreas Gampe369810a2015-01-14 19:53:31 -08001037 // This will lead to OOM error messages in the log.
1038 ScopedLogSeverity sls(LogSeverity::FATAL);
1039
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001040 Thread* self = Thread::Current();
1041 // Create an object
1042 ScopedObjectAccess soa(self);
1043 // garbage is created during ClassLinker::Init
1044
Nicolas Geoffray8d91ac32017-01-18 18:07:15 +00001045 StackHandleScope<1> hs(self);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001046 Handle<mirror::Class> c(
1047 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "[Ljava/lang/Object;")));
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001048
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001049 // Play with it...
1050
1051 EXPECT_FALSE(self->IsExceptionPending());
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001052
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001053 {
Mathieu Chartier2cebb242015-04-21 16:50:40 -07001054 // We can use null in the second argument as we do not need a method here (not used in
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001055 // resolved/initialized cases)
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08001056 size_t result = Invoke3(reinterpret_cast<size_t>(c.Get()), 10U,
1057 reinterpret_cast<size_t>(nullptr),
Nicolas Geoffray26aee502017-02-03 13:27:33 +00001058 StubTest::GetEntrypoint(self, kQuickAllocArrayResolved32),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001059 self);
David Sehr709b0702016-10-13 09:12:37 -07001060 EXPECT_FALSE(self->IsExceptionPending()) << mirror::Object::PrettyTypeOf(self->GetException());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001061 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
1062 mirror::Object* obj = reinterpret_cast<mirror::Object*>(result);
1063 EXPECT_TRUE(obj->IsArrayInstance());
1064 EXPECT_TRUE(obj->IsObjectArray());
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001065 EXPECT_EQ(c.Get(), obj->GetClass());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001066 VerifyObject(obj);
1067 mirror::Array* array = reinterpret_cast<mirror::Array*>(result);
1068 EXPECT_EQ(array->GetLength(), 10);
1069 }
1070
1071 // Failure tests.
1072
1073 // Out-of-memory.
1074 {
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08001075 size_t result = Invoke3(reinterpret_cast<size_t>(c.Get()),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001076 GB, // that should fail...
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08001077 reinterpret_cast<size_t>(nullptr),
Nicolas Geoffray26aee502017-02-03 13:27:33 +00001078 StubTest::GetEntrypoint(self, kQuickAllocArrayResolved32),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001079 self);
1080
1081 EXPECT_TRUE(self->IsExceptionPending());
1082 self->ClearException();
1083 EXPECT_EQ(reinterpret_cast<size_t>(nullptr), result);
1084 }
1085
1086 // Tests done.
1087#else
1088 LOG(INFO) << "Skipping alloc_array as I don't know how to do that on " << kRuntimeISA;
1089 // Force-print to std::cout so it's also outside the logcat.
1090 std::cout << "Skipping alloc_array as I don't know how to do that on " << kRuntimeISA << std::endl;
1091#endif
1092}
1093
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001094
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001095TEST_F(StubTest, StringCompareTo) {
jessicahandojo3aaa37b2016-07-29 14:46:37 -07001096 TEST_DISABLED_FOR_STRING_COMPRESSION();
Scott Wakelingc25cbf12016-04-18 09:00:11 +01001097 // There is no StringCompareTo runtime entrypoint for __arm__ or __aarch64__.
1098#if defined(__i386__) || defined(__mips__) || \
1099 (defined(__x86_64__) && !defined(__APPLE__))
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001100 // TODO: Check the "Unresolved" allocation stubs
1101
1102 Thread* self = Thread::Current();
Andreas Gampe29b38412014-08-13 00:15:43 -07001103
1104 const uintptr_t art_quick_string_compareto = StubTest::GetEntrypoint(self, kQuickStringCompareTo);
1105
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001106 ScopedObjectAccess soa(self);
1107 // garbage is created during ClassLinker::Init
1108
1109 // Create some strings
1110 // Use array so we can index into it and use a matrix for expected results
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -07001111 // Setup: The first half is standard. The second half uses a non-zero offset.
1112 // TODO: Shared backing arrays.
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001113 const char* c[] = { "", "", "a", "aa", "ab",
Serban Constantinescu86797a72014-06-19 16:17:56 +01001114 "aacaacaacaacaacaac", // This one's under the default limit to go to __memcmp16.
1115 "aacaacaacaacaacaacaacaacaacaacaacaac", // This one's over.
1116 "aacaacaacaacaacaacaacaacaacaacaacaaca" }; // As is this one. We need a separate one to
1117 // defeat object-equal optimizations.
Jeff Hao848f70a2014-01-15 13:49:50 -08001118 static constexpr size_t kStringCount = arraysize(c);
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001119
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001120 StackHandleScope<kStringCount> hs(self);
1121 Handle<mirror::String> s[kStringCount];
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001122
Jeff Hao848f70a2014-01-15 13:49:50 -08001123 for (size_t i = 0; i < kStringCount; ++i) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001124 s[i] = hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), c[i]));
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001125 }
1126
1127 // TODO: wide characters
1128
1129 // Matrix of expectations. First component is first parameter. Note we only check against the
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -07001130 // sign, not the value. As we are testing random offsets, we need to compute this and need to
1131 // rely on String::CompareTo being correct.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001132 int32_t expected[kStringCount][kStringCount];
1133 for (size_t x = 0; x < kStringCount; ++x) {
1134 for (size_t y = 0; y < kStringCount; ++y) {
1135 expected[x][y] = s[x]->CompareTo(s[y].Get());
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -07001136 }
1137 }
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001138
1139 // Play with it...
1140
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001141 for (size_t x = 0; x < kStringCount; ++x) {
1142 for (size_t y = 0; y < kStringCount; ++y) {
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001143 // Test string_compareto x y
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001144 size_t result = Invoke3(reinterpret_cast<size_t>(s[x].Get()),
1145 reinterpret_cast<size_t>(s[y].Get()), 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001146 art_quick_string_compareto, self);
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001147
1148 EXPECT_FALSE(self->IsExceptionPending());
1149
1150 // The result is a 32b signed integer
1151 union {
1152 size_t r;
1153 int32_t i;
1154 } conv;
1155 conv.r = result;
1156 int32_t e = expected[x][y];
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -07001157 EXPECT_TRUE(e == 0 ? conv.i == 0 : true) << "x=" << c[x] << " y=" << c[y] << " res=" <<
1158 conv.r;
1159 EXPECT_TRUE(e < 0 ? conv.i < 0 : true) << "x=" << c[x] << " y=" << c[y] << " res=" <<
1160 conv.r;
1161 EXPECT_TRUE(e > 0 ? conv.i > 0 : true) << "x=" << c[x] << " y=" << c[y] << " res=" <<
1162 conv.r;
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001163 }
1164 }
1165
Andreas Gampe7177d7c2014-05-02 12:10:02 -07001166 // TODO: Deallocate things.
1167
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001168 // Tests done.
1169#else
1170 LOG(INFO) << "Skipping string_compareto as I don't know how to do that on " << kRuntimeISA;
1171 // Force-print to std::cout so it's also outside the logcat.
1172 std::cout << "Skipping string_compareto as I don't know how to do that on " << kRuntimeISA <<
1173 std::endl;
1174#endif
1175}
1176
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001177
Mathieu Chartierc7853442015-03-27 14:35:38 -07001178static void GetSetBooleanStatic(ArtField* f, Thread* self,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001179 ArtMethod* referrer, StubTest* test)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07001180 REQUIRES_SHARED(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001181#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1182 (defined(__x86_64__) && !defined(__APPLE__))
Fred Shih37f05ef2014-07-16 18:38:08 -07001183 constexpr size_t num_values = 5;
1184 uint8_t values[num_values] = { 0, 1, 2, 128, 0xFF };
1185
1186 for (size_t i = 0; i < num_values; ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001187 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001188 static_cast<size_t>(values[i]),
1189 0U,
1190 StubTest::GetEntrypoint(self, kQuickSet8Static),
1191 self,
1192 referrer);
1193
Mathieu Chartierc7853442015-03-27 14:35:38 -07001194 size_t res = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001195 0U, 0U,
1196 StubTest::GetEntrypoint(self, kQuickGetBooleanStatic),
1197 self,
1198 referrer);
1199 // Boolean currently stores bools as uint8_t, be more zealous about asserting correct writes/gets.
1200 EXPECT_EQ(values[i], static_cast<uint8_t>(res)) << "Iteration " << i;
1201 }
1202#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001203 UNUSED(f, self, referrer, test);
Fred Shih37f05ef2014-07-16 18:38:08 -07001204 LOG(INFO) << "Skipping set_boolean_static as I don't know how to do that on " << kRuntimeISA;
1205 // Force-print to std::cout so it's also outside the logcat.
1206 std::cout << "Skipping set_boolean_static as I don't know how to do that on " << kRuntimeISA << std::endl;
1207#endif
1208}
Mathieu Chartiere401d142015-04-22 13:56:20 -07001209static void GetSetByteStatic(ArtField* f, Thread* self, ArtMethod* referrer,
Mathieu Chartierc7853442015-03-27 14:35:38 -07001210 StubTest* test)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07001211 REQUIRES_SHARED(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001212#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1213 (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001214 int8_t values[] = { -128, -64, 0, 64, 127 };
Fred Shih37f05ef2014-07-16 18:38:08 -07001215
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001216 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001217 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001218 static_cast<size_t>(values[i]),
1219 0U,
1220 StubTest::GetEntrypoint(self, kQuickSet8Static),
1221 self,
1222 referrer);
1223
Mathieu Chartierc7853442015-03-27 14:35:38 -07001224 size_t res = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001225 0U, 0U,
1226 StubTest::GetEntrypoint(self, kQuickGetByteStatic),
1227 self,
1228 referrer);
1229 EXPECT_EQ(values[i], static_cast<int8_t>(res)) << "Iteration " << i;
1230 }
1231#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001232 UNUSED(f, self, referrer, test);
Fred Shih37f05ef2014-07-16 18:38:08 -07001233 LOG(INFO) << "Skipping set_byte_static as I don't know how to do that on " << kRuntimeISA;
1234 // Force-print to std::cout so it's also outside the logcat.
1235 std::cout << "Skipping set_byte_static as I don't know how to do that on " << kRuntimeISA << std::endl;
1236#endif
1237}
1238
1239
Mathieu Chartierc7853442015-03-27 14:35:38 -07001240static void GetSetBooleanInstance(Handle<mirror::Object>* obj, ArtField* f, Thread* self,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001241 ArtMethod* referrer, StubTest* test)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07001242 REQUIRES_SHARED(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001243#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1244 (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001245 uint8_t values[] = { 0, true, 2, 128, 0xFF };
Fred Shih37f05ef2014-07-16 18:38:08 -07001246
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001247 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001248 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001249 reinterpret_cast<size_t>(obj->Get()),
1250 static_cast<size_t>(values[i]),
1251 StubTest::GetEntrypoint(self, kQuickSet8Instance),
1252 self,
1253 referrer);
1254
Mathieu Chartierc7853442015-03-27 14:35:38 -07001255 uint8_t res = f->GetBoolean(obj->Get());
Fred Shih37f05ef2014-07-16 18:38:08 -07001256 EXPECT_EQ(values[i], res) << "Iteration " << i;
1257
Mathieu Chartierc7853442015-03-27 14:35:38 -07001258 f->SetBoolean<false>(obj->Get(), res);
Fred Shih37f05ef2014-07-16 18:38:08 -07001259
Mathieu Chartierc7853442015-03-27 14:35:38 -07001260 size_t res2 = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001261 reinterpret_cast<size_t>(obj->Get()),
1262 0U,
1263 StubTest::GetEntrypoint(self, kQuickGetBooleanInstance),
1264 self,
1265 referrer);
1266 EXPECT_EQ(res, static_cast<uint8_t>(res2));
1267 }
1268#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001269 UNUSED(obj, f, self, referrer, test);
Fred Shih37f05ef2014-07-16 18:38:08 -07001270 LOG(INFO) << "Skipping set_boolean_instance as I don't know how to do that on " << kRuntimeISA;
1271 // Force-print to std::cout so it's also outside the logcat.
1272 std::cout << "Skipping set_boolean_instance as I don't know how to do that on " << kRuntimeISA << std::endl;
1273#endif
1274}
Mathieu Chartierc7853442015-03-27 14:35:38 -07001275static void GetSetByteInstance(Handle<mirror::Object>* obj, ArtField* f,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001276 Thread* self, ArtMethod* referrer, StubTest* test)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07001277 REQUIRES_SHARED(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001278#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1279 (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001280 int8_t values[] = { -128, -64, 0, 64, 127 };
Fred Shih37f05ef2014-07-16 18:38:08 -07001281
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001282 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001283 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001284 reinterpret_cast<size_t>(obj->Get()),
1285 static_cast<size_t>(values[i]),
1286 StubTest::GetEntrypoint(self, kQuickSet8Instance),
1287 self,
1288 referrer);
1289
Mathieu Chartierc7853442015-03-27 14:35:38 -07001290 int8_t res = f->GetByte(obj->Get());
Fred Shih37f05ef2014-07-16 18:38:08 -07001291 EXPECT_EQ(res, values[i]) << "Iteration " << i;
Mathieu Chartierc7853442015-03-27 14:35:38 -07001292 f->SetByte<false>(obj->Get(), ++res);
Fred Shih37f05ef2014-07-16 18:38:08 -07001293
Mathieu Chartierc7853442015-03-27 14:35:38 -07001294 size_t res2 = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001295 reinterpret_cast<size_t>(obj->Get()),
1296 0U,
1297 StubTest::GetEntrypoint(self, kQuickGetByteInstance),
1298 self,
1299 referrer);
1300 EXPECT_EQ(res, static_cast<int8_t>(res2));
1301 }
1302#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001303 UNUSED(obj, f, self, referrer, test);
Fred Shih37f05ef2014-07-16 18:38:08 -07001304 LOG(INFO) << "Skipping set_byte_instance as I don't know how to do that on " << kRuntimeISA;
1305 // Force-print to std::cout so it's also outside the logcat.
1306 std::cout << "Skipping set_byte_instance as I don't know how to do that on " << kRuntimeISA << std::endl;
1307#endif
1308}
1309
Mathieu Chartiere401d142015-04-22 13:56:20 -07001310static void GetSetCharStatic(ArtField* f, Thread* self, ArtMethod* referrer,
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001311 StubTest* test)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07001312 REQUIRES_SHARED(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001313#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1314 (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001315 uint16_t values[] = { 0, 1, 2, 255, 32768, 0xFFFF };
Fred Shih37f05ef2014-07-16 18:38:08 -07001316
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001317 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001318 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001319 static_cast<size_t>(values[i]),
1320 0U,
1321 StubTest::GetEntrypoint(self, kQuickSet16Static),
1322 self,
1323 referrer);
1324
Mathieu Chartierc7853442015-03-27 14:35:38 -07001325 size_t res = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001326 0U, 0U,
1327 StubTest::GetEntrypoint(self, kQuickGetCharStatic),
1328 self,
1329 referrer);
1330
1331 EXPECT_EQ(values[i], static_cast<uint16_t>(res)) << "Iteration " << i;
1332 }
1333#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001334 UNUSED(f, self, referrer, test);
Fred Shih37f05ef2014-07-16 18:38:08 -07001335 LOG(INFO) << "Skipping set_char_static as I don't know how to do that on " << kRuntimeISA;
1336 // Force-print to std::cout so it's also outside the logcat.
1337 std::cout << "Skipping set_char_static as I don't know how to do that on " << kRuntimeISA << std::endl;
1338#endif
1339}
Mathieu Chartierc7853442015-03-27 14:35:38 -07001340static void GetSetShortStatic(ArtField* f, Thread* self,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001341 ArtMethod* referrer, StubTest* test)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07001342 REQUIRES_SHARED(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001343#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1344 (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001345 int16_t values[] = { -0x7FFF, -32768, 0, 255, 32767, 0x7FFE };
Fred Shih37f05ef2014-07-16 18:38:08 -07001346
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001347 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001348 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001349 static_cast<size_t>(values[i]),
1350 0U,
1351 StubTest::GetEntrypoint(self, kQuickSet16Static),
1352 self,
1353 referrer);
1354
Mathieu Chartierc7853442015-03-27 14:35:38 -07001355 size_t res = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001356 0U, 0U,
1357 StubTest::GetEntrypoint(self, kQuickGetShortStatic),
1358 self,
1359 referrer);
1360
1361 EXPECT_EQ(static_cast<int16_t>(res), values[i]) << "Iteration " << i;
1362 }
1363#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001364 UNUSED(f, self, referrer, test);
Fred Shih37f05ef2014-07-16 18:38:08 -07001365 LOG(INFO) << "Skipping set_short_static as I don't know how to do that on " << kRuntimeISA;
1366 // Force-print to std::cout so it's also outside the logcat.
1367 std::cout << "Skipping set_short_static as I don't know how to do that on " << kRuntimeISA << std::endl;
1368#endif
1369}
1370
Mathieu Chartierc7853442015-03-27 14:35:38 -07001371static void GetSetCharInstance(Handle<mirror::Object>* obj, ArtField* f,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001372 Thread* self, ArtMethod* referrer, StubTest* test)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07001373 REQUIRES_SHARED(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001374#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1375 (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001376 uint16_t values[] = { 0, 1, 2, 255, 32768, 0xFFFF };
Fred Shih37f05ef2014-07-16 18:38:08 -07001377
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001378 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001379 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001380 reinterpret_cast<size_t>(obj->Get()),
1381 static_cast<size_t>(values[i]),
1382 StubTest::GetEntrypoint(self, kQuickSet16Instance),
1383 self,
1384 referrer);
1385
Mathieu Chartierc7853442015-03-27 14:35:38 -07001386 uint16_t res = f->GetChar(obj->Get());
Fred Shih37f05ef2014-07-16 18:38:08 -07001387 EXPECT_EQ(res, values[i]) << "Iteration " << i;
Mathieu Chartierc7853442015-03-27 14:35:38 -07001388 f->SetChar<false>(obj->Get(), ++res);
Fred Shih37f05ef2014-07-16 18:38:08 -07001389
Mathieu Chartierc7853442015-03-27 14:35:38 -07001390 size_t res2 = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001391 reinterpret_cast<size_t>(obj->Get()),
1392 0U,
1393 StubTest::GetEntrypoint(self, kQuickGetCharInstance),
1394 self,
1395 referrer);
1396 EXPECT_EQ(res, static_cast<uint16_t>(res2));
1397 }
1398#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001399 UNUSED(obj, f, self, referrer, test);
Fred Shih37f05ef2014-07-16 18:38:08 -07001400 LOG(INFO) << "Skipping set_char_instance as I don't know how to do that on " << kRuntimeISA;
1401 // Force-print to std::cout so it's also outside the logcat.
1402 std::cout << "Skipping set_char_instance as I don't know how to do that on " << kRuntimeISA << std::endl;
1403#endif
1404}
Mathieu Chartierc7853442015-03-27 14:35:38 -07001405static void GetSetShortInstance(Handle<mirror::Object>* obj, ArtField* f,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001406 Thread* self, ArtMethod* referrer, StubTest* test)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07001407 REQUIRES_SHARED(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001408#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1409 (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001410 int16_t values[] = { -0x7FFF, -32768, 0, 255, 32767, 0x7FFE };
Fred Shih37f05ef2014-07-16 18:38:08 -07001411
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001412 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001413 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001414 reinterpret_cast<size_t>(obj->Get()),
1415 static_cast<size_t>(values[i]),
1416 StubTest::GetEntrypoint(self, kQuickSet16Instance),
1417 self,
1418 referrer);
1419
Mathieu Chartierc7853442015-03-27 14:35:38 -07001420 int16_t res = f->GetShort(obj->Get());
Fred Shih37f05ef2014-07-16 18:38:08 -07001421 EXPECT_EQ(res, values[i]) << "Iteration " << i;
Mathieu Chartierc7853442015-03-27 14:35:38 -07001422 f->SetShort<false>(obj->Get(), ++res);
Fred Shih37f05ef2014-07-16 18:38:08 -07001423
Mathieu Chartierc7853442015-03-27 14:35:38 -07001424 size_t res2 = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001425 reinterpret_cast<size_t>(obj->Get()),
1426 0U,
1427 StubTest::GetEntrypoint(self, kQuickGetShortInstance),
1428 self,
1429 referrer);
1430 EXPECT_EQ(res, static_cast<int16_t>(res2));
1431 }
1432#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001433 UNUSED(obj, f, self, referrer, test);
Fred Shih37f05ef2014-07-16 18:38:08 -07001434 LOG(INFO) << "Skipping set_short_instance as I don't know how to do that on " << kRuntimeISA;
1435 // Force-print to std::cout so it's also outside the logcat.
1436 std::cout << "Skipping set_short_instance as I don't know how to do that on " << kRuntimeISA << std::endl;
1437#endif
1438}
1439
Mathieu Chartiere401d142015-04-22 13:56:20 -07001440static void GetSet32Static(ArtField* f, Thread* self, ArtMethod* referrer,
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001441 StubTest* test)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07001442 REQUIRES_SHARED(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001443#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1444 (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001445 uint32_t values[] = { 0, 1, 2, 255, 32768, 1000000, 0xFFFFFFFF };
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001446
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001447 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001448 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001449 static_cast<size_t>(values[i]),
1450 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001451 StubTest::GetEntrypoint(self, kQuickSet32Static),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001452 self,
1453 referrer);
1454
Mathieu Chartierc7853442015-03-27 14:35:38 -07001455 size_t res = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001456 0U, 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001457 StubTest::GetEntrypoint(self, kQuickGet32Static),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001458 self,
1459 referrer);
1460
Goran Jakovljevic04568812015-04-23 15:27:23 +02001461#if defined(__mips__) && defined(__LP64__)
1462 EXPECT_EQ(static_cast<uint32_t>(res), values[i]) << "Iteration " << i;
1463#else
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001464 EXPECT_EQ(res, values[i]) << "Iteration " << i;
Goran Jakovljevic04568812015-04-23 15:27:23 +02001465#endif
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001466 }
1467#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001468 UNUSED(f, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001469 LOG(INFO) << "Skipping set32static as I don't know how to do that on " << kRuntimeISA;
1470 // Force-print to std::cout so it's also outside the logcat.
1471 std::cout << "Skipping set32static as I don't know how to do that on " << kRuntimeISA << std::endl;
1472#endif
1473}
1474
1475
Mathieu Chartierc7853442015-03-27 14:35:38 -07001476static void GetSet32Instance(Handle<mirror::Object>* obj, ArtField* f,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001477 Thread* self, ArtMethod* referrer, StubTest* test)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07001478 REQUIRES_SHARED(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001479#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1480 (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001481 uint32_t values[] = { 0, 1, 2, 255, 32768, 1000000, 0xFFFFFFFF };
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001482
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001483 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001484 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001485 reinterpret_cast<size_t>(obj->Get()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001486 static_cast<size_t>(values[i]),
Andreas Gampe29b38412014-08-13 00:15:43 -07001487 StubTest::GetEntrypoint(self, kQuickSet32Instance),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001488 self,
1489 referrer);
1490
Mathieu Chartierc7853442015-03-27 14:35:38 -07001491 int32_t res = f->GetInt(obj->Get());
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001492 EXPECT_EQ(res, static_cast<int32_t>(values[i])) << "Iteration " << i;
1493
1494 res++;
Mathieu Chartierc7853442015-03-27 14:35:38 -07001495 f->SetInt<false>(obj->Get(), res);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001496
Mathieu Chartierc7853442015-03-27 14:35:38 -07001497 size_t res2 = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001498 reinterpret_cast<size_t>(obj->Get()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001499 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001500 StubTest::GetEntrypoint(self, kQuickGet32Instance),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001501 self,
1502 referrer);
1503 EXPECT_EQ(res, static_cast<int32_t>(res2));
1504 }
1505#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001506 UNUSED(obj, f, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001507 LOG(INFO) << "Skipping set32instance as I don't know how to do that on " << kRuntimeISA;
1508 // Force-print to std::cout so it's also outside the logcat.
1509 std::cout << "Skipping set32instance as I don't know how to do that on " << kRuntimeISA << std::endl;
1510#endif
1511}
1512
1513
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001514#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1515 (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001516
1517static void set_and_check_static(uint32_t f_idx, mirror::Object* val, Thread* self,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001518 ArtMethod* referrer, StubTest* test)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07001519 REQUIRES_SHARED(Locks::mutator_lock_) {
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001520 test->Invoke3WithReferrer(static_cast<size_t>(f_idx),
1521 reinterpret_cast<size_t>(val),
1522 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001523 StubTest::GetEntrypoint(self, kQuickSetObjStatic),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001524 self,
1525 referrer);
1526
1527 size_t res = test->Invoke3WithReferrer(static_cast<size_t>(f_idx),
1528 0U, 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001529 StubTest::GetEntrypoint(self, kQuickGetObjStatic),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001530 self,
1531 referrer);
1532
1533 EXPECT_EQ(res, reinterpret_cast<size_t>(val)) << "Value " << val;
1534}
1535#endif
1536
Mathieu Chartiere401d142015-04-22 13:56:20 -07001537static void GetSetObjStatic(ArtField* f, Thread* self, ArtMethod* referrer,
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001538 StubTest* test)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07001539 REQUIRES_SHARED(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001540#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1541 (defined(__x86_64__) && !defined(__APPLE__))
Mathieu Chartierc7853442015-03-27 14:35:38 -07001542 set_and_check_static(f->GetDexFieldIndex(), nullptr, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001543
1544 // Allocate a string object for simplicity.
1545 mirror::String* str = mirror::String::AllocFromModifiedUtf8(self, "Test");
Mathieu Chartierc7853442015-03-27 14:35:38 -07001546 set_and_check_static(f->GetDexFieldIndex(), str, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001547
Mathieu Chartierc7853442015-03-27 14:35:38 -07001548 set_and_check_static(f->GetDexFieldIndex(), nullptr, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001549#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001550 UNUSED(f, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001551 LOG(INFO) << "Skipping setObjstatic as I don't know how to do that on " << kRuntimeISA;
1552 // Force-print to std::cout so it's also outside the logcat.
1553 std::cout << "Skipping setObjstatic as I don't know how to do that on " << kRuntimeISA << std::endl;
1554#endif
1555}
1556
1557
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001558#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1559 (defined(__x86_64__) && !defined(__APPLE__))
Mathieu Chartierc7853442015-03-27 14:35:38 -07001560static void set_and_check_instance(ArtField* f, mirror::Object* trg,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001561 mirror::Object* val, Thread* self, ArtMethod* referrer,
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001562 StubTest* test)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07001563 REQUIRES_SHARED(Locks::mutator_lock_) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001564 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001565 reinterpret_cast<size_t>(trg),
1566 reinterpret_cast<size_t>(val),
Andreas Gampe29b38412014-08-13 00:15:43 -07001567 StubTest::GetEntrypoint(self, kQuickSetObjInstance),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001568 self,
1569 referrer);
1570
Mathieu Chartierc7853442015-03-27 14:35:38 -07001571 size_t res = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001572 reinterpret_cast<size_t>(trg),
1573 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001574 StubTest::GetEntrypoint(self, kQuickGetObjInstance),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001575 self,
1576 referrer);
1577
1578 EXPECT_EQ(res, reinterpret_cast<size_t>(val)) << "Value " << val;
1579
Mathieu Chartier3398c782016-09-30 10:27:43 -07001580 EXPECT_OBJ_PTR_EQ(val, f->GetObj(trg));
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001581}
1582#endif
1583
Mathieu Chartierc7853442015-03-27 14:35:38 -07001584static void GetSetObjInstance(Handle<mirror::Object>* obj, ArtField* f,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001585 Thread* self, ArtMethod* referrer, StubTest* test)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07001586 REQUIRES_SHARED(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001587#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1588 (defined(__x86_64__) && !defined(__APPLE__))
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001589 set_and_check_instance(f, obj->Get(), nullptr, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001590
1591 // Allocate a string object for simplicity.
1592 mirror::String* str = mirror::String::AllocFromModifiedUtf8(self, "Test");
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001593 set_and_check_instance(f, obj->Get(), str, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001594
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001595 set_and_check_instance(f, obj->Get(), nullptr, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001596#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001597 UNUSED(obj, f, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001598 LOG(INFO) << "Skipping setObjinstance as I don't know how to do that on " << kRuntimeISA;
1599 // Force-print to std::cout so it's also outside the logcat.
1600 std::cout << "Skipping setObjinstance as I don't know how to do that on " << kRuntimeISA << std::endl;
1601#endif
1602}
1603
1604
Calin Juravle872ab3f2015-10-02 07:27:51 +01001605// TODO: Complete these tests for 32b architectures
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001606
Mathieu Chartiere401d142015-04-22 13:56:20 -07001607static void GetSet64Static(ArtField* f, Thread* self, ArtMethod* referrer,
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001608 StubTest* test)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07001609 REQUIRES_SHARED(Locks::mutator_lock_) {
Calin Juravle6e399ac2015-10-02 23:56:06 +01001610#if (defined(__x86_64__) && !defined(__APPLE__)) || (defined(__mips__) && defined(__LP64__)) \
1611 || defined(__aarch64__)
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001612 uint64_t values[] = { 0, 1, 2, 255, 32768, 1000000, 0xFFFFFFFF, 0xFFFFFFFFFFFF };
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001613
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001614 for (size_t i = 0; i < arraysize(values); ++i) {
Calin Juravle6e399ac2015-10-02 23:56:06 +01001615 // 64 bit FieldSet stores the set value in the second register.
1616 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Calin Juravle24cc1b32015-10-06 11:46:58 +01001617 values[i],
Nicolas Geoffray5b3c6c02017-01-19 14:22:26 +00001618 0U,
Calin Juravle24cc1b32015-10-06 11:46:58 +01001619 StubTest::GetEntrypoint(self, kQuickSet64Static),
1620 self,
1621 referrer);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001622
Mathieu Chartierc7853442015-03-27 14:35:38 -07001623 size_t res = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001624 0U, 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001625 StubTest::GetEntrypoint(self, kQuickGet64Static),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001626 self,
1627 referrer);
1628
1629 EXPECT_EQ(res, values[i]) << "Iteration " << i;
1630 }
1631#else
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001632 UNUSED(f, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001633 LOG(INFO) << "Skipping set64static as I don't know how to do that on " << kRuntimeISA;
1634 // Force-print to std::cout so it's also outside the logcat.
1635 std::cout << "Skipping set64static as I don't know how to do that on " << kRuntimeISA << std::endl;
1636#endif
1637}
1638
1639
Mathieu Chartierc7853442015-03-27 14:35:38 -07001640static void GetSet64Instance(Handle<mirror::Object>* obj, ArtField* f,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001641 Thread* self, ArtMethod* referrer, StubTest* test)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07001642 REQUIRES_SHARED(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001643#if (defined(__x86_64__) && !defined(__APPLE__)) || (defined(__mips__) && defined(__LP64__)) || \
1644 defined(__aarch64__)
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001645 uint64_t values[] = { 0, 1, 2, 255, 32768, 1000000, 0xFFFFFFFF, 0xFFFFFFFFFFFF };
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001646
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001647 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001648 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001649 reinterpret_cast<size_t>(obj->Get()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001650 static_cast<size_t>(values[i]),
Andreas Gampe29b38412014-08-13 00:15:43 -07001651 StubTest::GetEntrypoint(self, kQuickSet64Instance),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001652 self,
1653 referrer);
1654
Mathieu Chartierc7853442015-03-27 14:35:38 -07001655 int64_t res = f->GetLong(obj->Get());
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001656 EXPECT_EQ(res, static_cast<int64_t>(values[i])) << "Iteration " << i;
1657
1658 res++;
Mathieu Chartierc7853442015-03-27 14:35:38 -07001659 f->SetLong<false>(obj->Get(), res);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001660
Mathieu Chartierc7853442015-03-27 14:35:38 -07001661 size_t res2 = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001662 reinterpret_cast<size_t>(obj->Get()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001663 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001664 StubTest::GetEntrypoint(self, kQuickGet64Instance),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001665 self,
1666 referrer);
1667 EXPECT_EQ(res, static_cast<int64_t>(res2));
1668 }
1669#else
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001670 UNUSED(obj, f, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001671 LOG(INFO) << "Skipping set64instance as I don't know how to do that on " << kRuntimeISA;
1672 // Force-print to std::cout so it's also outside the logcat.
1673 std::cout << "Skipping set64instance as I don't know how to do that on " << kRuntimeISA << std::endl;
1674#endif
1675}
1676
1677static void TestFields(Thread* self, StubTest* test, Primitive::Type test_type) {
1678 // garbage is created during ClassLinker::Init
1679
1680 JNIEnv* env = Thread::Current()->GetJniEnv();
1681 jclass jc = env->FindClass("AllFields");
Mathieu Chartier2cebb242015-04-21 16:50:40 -07001682 CHECK(jc != nullptr);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001683 jobject o = env->AllocObject(jc);
Mathieu Chartier2cebb242015-04-21 16:50:40 -07001684 CHECK(o != nullptr);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001685
1686 ScopedObjectAccess soa(self);
Mathieu Chartiere401d142015-04-22 13:56:20 -07001687 StackHandleScope<3> hs(self);
Mathieu Chartier0795f232016-09-27 18:43:30 -07001688 Handle<mirror::Object> obj(hs.NewHandle(soa.Decode<mirror::Object>(o)));
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001689 Handle<mirror::Class> c(hs.NewHandle(obj->GetClass()));
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001690 // Need a method as a referrer
Andreas Gampe542451c2016-07-26 09:02:02 -07001691 ArtMethod* m = c->GetDirectMethod(0, kRuntimePointerSize);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001692
1693 // Play with it...
1694
1695 // Static fields.
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001696 for (ArtField& f : c->GetSFields()) {
1697 Primitive::Type type = f.GetTypeAsPrimitiveType();
Mathieu Chartierc7853442015-03-27 14:35:38 -07001698 if (test_type != type) {
1699 continue;
1700 }
1701 switch (type) {
1702 case Primitive::Type::kPrimBoolean:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001703 GetSetBooleanStatic(&f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001704 break;
1705 case Primitive::Type::kPrimByte:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001706 GetSetByteStatic(&f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001707 break;
1708 case Primitive::Type::kPrimChar:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001709 GetSetCharStatic(&f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001710 break;
1711 case Primitive::Type::kPrimShort:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001712 GetSetShortStatic(&f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001713 break;
1714 case Primitive::Type::kPrimInt:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001715 GetSet32Static(&f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001716 break;
1717 case Primitive::Type::kPrimLong:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001718 GetSet64Static(&f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001719 break;
1720 case Primitive::Type::kPrimNot:
1721 // Don't try array.
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001722 if (f.GetTypeDescriptor()[0] != '[') {
1723 GetSetObjStatic(&f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001724 }
1725 break;
1726 default:
1727 break; // Skip.
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001728 }
1729 }
1730
1731 // Instance fields.
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001732 for (ArtField& f : c->GetIFields()) {
1733 Primitive::Type type = f.GetTypeAsPrimitiveType();
Mathieu Chartierc7853442015-03-27 14:35:38 -07001734 if (test_type != type) {
1735 continue;
1736 }
1737 switch (type) {
1738 case Primitive::Type::kPrimBoolean:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001739 GetSetBooleanInstance(&obj, &f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001740 break;
1741 case Primitive::Type::kPrimByte:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001742 GetSetByteInstance(&obj, &f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001743 break;
1744 case Primitive::Type::kPrimChar:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001745 GetSetCharInstance(&obj, &f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001746 break;
1747 case Primitive::Type::kPrimShort:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001748 GetSetShortInstance(&obj, &f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001749 break;
1750 case Primitive::Type::kPrimInt:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001751 GetSet32Instance(&obj, &f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001752 break;
1753 case Primitive::Type::kPrimLong:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001754 GetSet64Instance(&obj, &f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001755 break;
1756 case Primitive::Type::kPrimNot:
1757 // Don't try array.
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001758 if (f.GetTypeDescriptor()[0] != '[') {
1759 GetSetObjInstance(&obj, &f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001760 }
1761 break;
1762 default:
1763 break; // Skip.
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001764 }
1765 }
1766
1767 // TODO: Deallocate things.
1768}
1769
Fred Shih37f05ef2014-07-16 18:38:08 -07001770TEST_F(StubTest, Fields8) {
Fred Shih37f05ef2014-07-16 18:38:08 -07001771 Thread* self = Thread::Current();
1772
1773 self->TransitionFromSuspendedToRunnable();
1774 LoadDex("AllFields");
1775 bool started = runtime_->Start();
1776 CHECK(started);
1777
1778 TestFields(self, this, Primitive::Type::kPrimBoolean);
1779 TestFields(self, this, Primitive::Type::kPrimByte);
1780}
1781
1782TEST_F(StubTest, Fields16) {
Fred Shih37f05ef2014-07-16 18:38:08 -07001783 Thread* self = Thread::Current();
1784
1785 self->TransitionFromSuspendedToRunnable();
1786 LoadDex("AllFields");
1787 bool started = runtime_->Start();
1788 CHECK(started);
1789
1790 TestFields(self, this, Primitive::Type::kPrimChar);
1791 TestFields(self, this, Primitive::Type::kPrimShort);
1792}
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001793
1794TEST_F(StubTest, Fields32) {
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001795 Thread* self = Thread::Current();
1796
1797 self->TransitionFromSuspendedToRunnable();
1798 LoadDex("AllFields");
1799 bool started = runtime_->Start();
1800 CHECK(started);
1801
1802 TestFields(self, this, Primitive::Type::kPrimInt);
1803}
1804
1805TEST_F(StubTest, FieldsObj) {
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001806 Thread* self = Thread::Current();
1807
1808 self->TransitionFromSuspendedToRunnable();
1809 LoadDex("AllFields");
1810 bool started = runtime_->Start();
1811 CHECK(started);
1812
1813 TestFields(self, this, Primitive::Type::kPrimNot);
1814}
1815
1816TEST_F(StubTest, Fields64) {
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001817 Thread* self = Thread::Current();
1818
1819 self->TransitionFromSuspendedToRunnable();
1820 LoadDex("AllFields");
1821 bool started = runtime_->Start();
1822 CHECK(started);
1823
1824 TestFields(self, this, Primitive::Type::kPrimLong);
1825}
1826
Vladimir Marko9d07e3d2016-03-31 12:02:28 +01001827// Disabled, b/27991555 .
1828// FIXME: Hacking the entry point to point to art_quick_to_interpreter_bridge is broken.
1829// The bridge calls through to GetCalleeSaveMethodCaller() which looks up the pre-header
1830// and gets a bogus OatQuickMethodHeader* pointing into our assembly code just before
1831// the bridge and uses that to check for inlined frames, crashing in the process.
1832TEST_F(StubTest, DISABLED_IMT) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001833#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1834 (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe51f76352014-05-21 08:28:48 -07001835 Thread* self = Thread::Current();
1836
1837 ScopedObjectAccess soa(self);
1838 StackHandleScope<7> hs(self);
1839
1840 JNIEnv* env = Thread::Current()->GetJniEnv();
1841
1842 // ArrayList
1843
1844 // Load ArrayList and used methods (JNI).
1845 jclass arraylist_jclass = env->FindClass("java/util/ArrayList");
1846 ASSERT_NE(nullptr, arraylist_jclass);
1847 jmethodID arraylist_constructor = env->GetMethodID(arraylist_jclass, "<init>", "()V");
1848 ASSERT_NE(nullptr, arraylist_constructor);
Mathieu Chartiere401d142015-04-22 13:56:20 -07001849 jmethodID contains_jmethod = env->GetMethodID(
1850 arraylist_jclass, "contains", "(Ljava/lang/Object;)Z");
Andreas Gampe51f76352014-05-21 08:28:48 -07001851 ASSERT_NE(nullptr, contains_jmethod);
1852 jmethodID add_jmethod = env->GetMethodID(arraylist_jclass, "add", "(Ljava/lang/Object;)Z");
1853 ASSERT_NE(nullptr, add_jmethod);
1854
Mathieu Chartiere401d142015-04-22 13:56:20 -07001855 // Get representation.
Andreas Gampe13b27842016-11-07 16:48:23 -08001856 ArtMethod* contains_amethod = jni::DecodeArtMethod(contains_jmethod);
Andreas Gampe51f76352014-05-21 08:28:48 -07001857
1858 // Patch up ArrayList.contains.
Mathieu Chartiere401d142015-04-22 13:56:20 -07001859 if (contains_amethod->GetEntryPointFromQuickCompiledCode() == nullptr) {
1860 contains_amethod->SetEntryPointFromQuickCompiledCode(reinterpret_cast<void*>(
Andreas Gampe29b38412014-08-13 00:15:43 -07001861 StubTest::GetEntrypoint(self, kQuickQuickToInterpreterBridge)));
Andreas Gampe51f76352014-05-21 08:28:48 -07001862 }
1863
1864 // List
1865
1866 // Load List and used methods (JNI).
1867 jclass list_jclass = env->FindClass("java/util/List");
1868 ASSERT_NE(nullptr, list_jclass);
Mathieu Chartiere401d142015-04-22 13:56:20 -07001869 jmethodID inf_contains_jmethod = env->GetMethodID(
1870 list_jclass, "contains", "(Ljava/lang/Object;)Z");
Andreas Gampe51f76352014-05-21 08:28:48 -07001871 ASSERT_NE(nullptr, inf_contains_jmethod);
1872
1873 // Get mirror representation.
Andreas Gampe13b27842016-11-07 16:48:23 -08001874 ArtMethod* inf_contains = jni::DecodeArtMethod(inf_contains_jmethod);
Andreas Gampe51f76352014-05-21 08:28:48 -07001875
1876 // Object
1877
1878 jclass obj_jclass = env->FindClass("java/lang/Object");
1879 ASSERT_NE(nullptr, obj_jclass);
1880 jmethodID obj_constructor = env->GetMethodID(obj_jclass, "<init>", "()V");
1881 ASSERT_NE(nullptr, obj_constructor);
1882
Andreas Gampe51f76352014-05-21 08:28:48 -07001883 // Create instances.
1884
1885 jobject jarray_list = env->NewObject(arraylist_jclass, arraylist_constructor);
1886 ASSERT_NE(nullptr, jarray_list);
Mathieu Chartier0795f232016-09-27 18:43:30 -07001887 Handle<mirror::Object> array_list(hs.NewHandle(soa.Decode<mirror::Object>(jarray_list)));
Andreas Gampe51f76352014-05-21 08:28:48 -07001888
1889 jobject jobj = env->NewObject(obj_jclass, obj_constructor);
1890 ASSERT_NE(nullptr, jobj);
Mathieu Chartier0795f232016-09-27 18:43:30 -07001891 Handle<mirror::Object> obj(hs.NewHandle(soa.Decode<mirror::Object>(jobj)));
Andreas Gampe51f76352014-05-21 08:28:48 -07001892
Andreas Gampe1a7e2922014-05-21 15:37:53 -07001893 // Invocation tests.
1894
1895 // 1. imt_conflict
1896
1897 // Contains.
Nicolas Geoffray1004faa2016-03-23 14:28:30 +00001898
1899 // We construct the ImtConflictTable ourselves, as we cannot go into the runtime stub
1900 // that will create it: the runtime stub expects to be called by compiled code.
Nicolas Geoffray1004faa2016-03-23 14:28:30 +00001901 LinearAlloc* linear_alloc = Runtime::Current()->GetLinearAlloc();
1902 ArtMethod* conflict_method = Runtime::Current()->CreateImtConflictMethod(linear_alloc);
Mathieu Chartiere42888f2016-04-14 10:49:19 -07001903 ImtConflictTable* empty_conflict_table =
Andreas Gampe98ea9d92018-10-19 14:06:15 -07001904 Runtime::Current()->GetClassLinker()->CreateImtConflictTable(/*count=*/0u, linear_alloc);
Nicolas Geoffray1004faa2016-03-23 14:28:30 +00001905 void* data = linear_alloc->Alloc(
1906 self,
Andreas Gampe542451c2016-07-26 09:02:02 -07001907 ImtConflictTable::ComputeSizeWithOneMoreEntry(empty_conflict_table, kRuntimePointerSize));
Nicolas Geoffray1004faa2016-03-23 14:28:30 +00001908 ImtConflictTable* new_table = new (data) ImtConflictTable(
Andreas Gampe542451c2016-07-26 09:02:02 -07001909 empty_conflict_table, inf_contains, contains_amethod, kRuntimePointerSize);
1910 conflict_method->SetImtConflictTable(new_table, kRuntimePointerSize);
Nicolas Geoffray1004faa2016-03-23 14:28:30 +00001911
Andreas Gampe51f76352014-05-21 08:28:48 -07001912 size_t result =
Nicolas Geoffray1004faa2016-03-23 14:28:30 +00001913 Invoke3WithReferrerAndHidden(reinterpret_cast<size_t>(conflict_method),
1914 reinterpret_cast<size_t>(array_list.Get()),
Andreas Gampe51f76352014-05-21 08:28:48 -07001915 reinterpret_cast<size_t>(obj.Get()),
Andreas Gampe29b38412014-08-13 00:15:43 -07001916 StubTest::GetEntrypoint(self, kQuickQuickImtConflictTrampoline),
Nicolas Geoffray1004faa2016-03-23 14:28:30 +00001917 self,
1918 contains_amethod,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001919 static_cast<size_t>(inf_contains->GetDexMethodIndex()));
Andreas Gampe51f76352014-05-21 08:28:48 -07001920
1921 ASSERT_FALSE(self->IsExceptionPending());
1922 EXPECT_EQ(static_cast<size_t>(JNI_FALSE), result);
1923
1924 // Add object.
1925
1926 env->CallBooleanMethod(jarray_list, add_jmethod, jobj);
1927
David Sehr709b0702016-10-13 09:12:37 -07001928 ASSERT_FALSE(self->IsExceptionPending()) << mirror::Object::PrettyTypeOf(self->GetException());
Andreas Gampe51f76352014-05-21 08:28:48 -07001929
Andreas Gampe1a7e2922014-05-21 15:37:53 -07001930 // Contains.
Andreas Gampe51f76352014-05-21 08:28:48 -07001931
Nicolas Geoffray1004faa2016-03-23 14:28:30 +00001932 result =
1933 Invoke3WithReferrerAndHidden(reinterpret_cast<size_t>(conflict_method),
1934 reinterpret_cast<size_t>(array_list.Get()),
1935 reinterpret_cast<size_t>(obj.Get()),
1936 StubTest::GetEntrypoint(self, kQuickQuickImtConflictTrampoline),
1937 self,
1938 contains_amethod,
1939 static_cast<size_t>(inf_contains->GetDexMethodIndex()));
Andreas Gampe51f76352014-05-21 08:28:48 -07001940
1941 ASSERT_FALSE(self->IsExceptionPending());
1942 EXPECT_EQ(static_cast<size_t>(JNI_TRUE), result);
Nicolas Geoffray1004faa2016-03-23 14:28:30 +00001943
Andreas Gampe1a7e2922014-05-21 15:37:53 -07001944 // 2. regular interface trampoline
1945
Nicolas Geoffray1004faa2016-03-23 14:28:30 +00001946 result = Invoke3WithReferrer(static_cast<size_t>(inf_contains->GetDexMethodIndex()),
1947 reinterpret_cast<size_t>(array_list.Get()),
1948 reinterpret_cast<size_t>(obj.Get()),
1949 StubTest::GetEntrypoint(self,
1950 kQuickInvokeInterfaceTrampolineWithAccessCheck),
1951 self, contains_amethod);
Andreas Gampe1a7e2922014-05-21 15:37:53 -07001952
1953 ASSERT_FALSE(self->IsExceptionPending());
Nicolas Geoffray1004faa2016-03-23 14:28:30 +00001954 EXPECT_EQ(static_cast<size_t>(JNI_TRUE), result);
Andreas Gampe1a7e2922014-05-21 15:37:53 -07001955
Mathieu Chartiere401d142015-04-22 13:56:20 -07001956 result = Invoke3WithReferrer(
1957 static_cast<size_t>(inf_contains->GetDexMethodIndex()),
1958 reinterpret_cast<size_t>(array_list.Get()), reinterpret_cast<size_t>(array_list.Get()),
1959 StubTest::GetEntrypoint(self, kQuickInvokeInterfaceTrampolineWithAccessCheck), self,
1960 contains_amethod);
Andreas Gampe1a7e2922014-05-21 15:37:53 -07001961
1962 ASSERT_FALSE(self->IsExceptionPending());
1963 EXPECT_EQ(static_cast<size_t>(JNI_FALSE), result);
Andreas Gampe51f76352014-05-21 08:28:48 -07001964#else
Andreas Gampe6aac3552014-06-09 14:55:53 -07001965 LOG(INFO) << "Skipping imt as I don't know how to do that on " << kRuntimeISA;
Andreas Gampe51f76352014-05-21 08:28:48 -07001966 // Force-print to std::cout so it's also outside the logcat.
Andreas Gampe6aac3552014-06-09 14:55:53 -07001967 std::cout << "Skipping imt as I don't know how to do that on " << kRuntimeISA << std::endl;
1968#endif
1969}
1970
Andreas Gampe6aac3552014-06-09 14:55:53 -07001971TEST_F(StubTest, StringIndexOf) {
Chris Larsencf283da2016-01-19 16:45:35 -08001972#if defined(__arm__) || defined(__aarch64__) || defined(__mips__)
Andreas Gampe6aac3552014-06-09 14:55:53 -07001973 Thread* self = Thread::Current();
1974 ScopedObjectAccess soa(self);
1975 // garbage is created during ClassLinker::Init
1976
1977 // Create some strings
1978 // Use array so we can index into it and use a matrix for expected results
1979 // Setup: The first half is standard. The second half uses a non-zero offset.
1980 // TODO: Shared backing arrays.
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001981 const char* c_str[] = { "", "a", "ba", "cba", "dcba", "edcba", "asdfghjkl" };
1982 static constexpr size_t kStringCount = arraysize(c_str);
1983 const char c_char[] = { 'a', 'b', 'c', 'd', 'e' };
1984 static constexpr size_t kCharCount = arraysize(c_char);
Andreas Gampe6aac3552014-06-09 14:55:53 -07001985
1986 StackHandleScope<kStringCount> hs(self);
1987 Handle<mirror::String> s[kStringCount];
1988
1989 for (size_t i = 0; i < kStringCount; ++i) {
1990 s[i] = hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), c_str[i]));
1991 }
1992
1993 // Matrix of expectations. First component is first parameter. Note we only check against the
1994 // sign, not the value. As we are testing random offsets, we need to compute this and need to
1995 // rely on String::CompareTo being correct.
1996 static constexpr size_t kMaxLen = 9;
1997 DCHECK_LE(strlen(c_str[kStringCount-1]), kMaxLen) << "Please fix the indexof test.";
1998
1999 // Last dimension: start, offset by 1.
2000 int32_t expected[kStringCount][kCharCount][kMaxLen + 3];
2001 for (size_t x = 0; x < kStringCount; ++x) {
2002 for (size_t y = 0; y < kCharCount; ++y) {
2003 for (size_t z = 0; z <= kMaxLen + 2; ++z) {
2004 expected[x][y][z] = s[x]->FastIndexOf(c_char[y], static_cast<int32_t>(z) - 1);
2005 }
2006 }
2007 }
2008
2009 // Play with it...
2010
2011 for (size_t x = 0; x < kStringCount; ++x) {
2012 for (size_t y = 0; y < kCharCount; ++y) {
2013 for (size_t z = 0; z <= kMaxLen + 2; ++z) {
2014 int32_t start = static_cast<int32_t>(z) - 1;
2015
2016 // Test string_compareto x y
2017 size_t result = Invoke3(reinterpret_cast<size_t>(s[x].Get()), c_char[y], start,
Andreas Gampe29b38412014-08-13 00:15:43 -07002018 StubTest::GetEntrypoint(self, kQuickIndexOf), self);
Andreas Gampe6aac3552014-06-09 14:55:53 -07002019
2020 EXPECT_FALSE(self->IsExceptionPending());
2021
2022 // The result is a 32b signed integer
2023 union {
2024 size_t r;
2025 int32_t i;
2026 } conv;
2027 conv.r = result;
2028
2029 EXPECT_EQ(expected[x][y][z], conv.i) << "Wrong result for " << c_str[x] << " / " <<
2030 c_char[y] << " @ " << start;
2031 }
2032 }
2033 }
2034
2035 // TODO: Deallocate things.
2036
2037 // Tests done.
2038#else
2039 LOG(INFO) << "Skipping indexof as I don't know how to do that on " << kRuntimeISA;
2040 // Force-print to std::cout so it's also outside the logcat.
2041 std::cout << "Skipping indexof as I don't know how to do that on " << kRuntimeISA << std::endl;
Andreas Gampe51f76352014-05-21 08:28:48 -07002042#endif
2043}
2044
Roland Levillain02b75802016-07-13 11:54:35 +01002045// TODO: Exercise the ReadBarrierMarkRegX entry points.
2046
Man Cao1aee9002015-07-14 22:31:42 -07002047TEST_F(StubTest, ReadBarrier) {
2048#if defined(ART_USE_READ_BARRIER) && (defined(__i386__) || defined(__arm__) || \
2049 defined(__aarch64__) || defined(__mips__) || (defined(__x86_64__) && !defined(__APPLE__)))
2050 Thread* self = Thread::Current();
2051
2052 const uintptr_t readBarrierSlow = StubTest::GetEntrypoint(self, kQuickReadBarrierSlow);
2053
2054 // Create an object
2055 ScopedObjectAccess soa(self);
2056 // garbage is created during ClassLinker::Init
2057
2058 StackHandleScope<2> hs(soa.Self());
2059 Handle<mirror::Class> c(
2060 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "Ljava/lang/Object;")));
2061
2062 // Build an object instance
2063 Handle<mirror::Object> obj(hs.NewHandle(c->AllocObject(soa.Self())));
2064
2065 EXPECT_FALSE(self->IsExceptionPending());
2066
2067 size_t result = Invoke3(0U, reinterpret_cast<size_t>(obj.Get()),
2068 mirror::Object::ClassOffset().SizeValue(), readBarrierSlow, self);
2069
2070 EXPECT_FALSE(self->IsExceptionPending());
2071 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
2072 mirror::Class* klass = reinterpret_cast<mirror::Class*>(result);
2073 EXPECT_EQ(klass, obj->GetClass());
2074
2075 // Tests done.
2076#else
2077 LOG(INFO) << "Skipping read_barrier_slow";
2078 // Force-print to std::cout so it's also outside the logcat.
2079 std::cout << "Skipping read_barrier_slow" << std::endl;
2080#endif
2081}
2082
Roland Levillain0d5a2812015-11-13 10:07:31 +00002083TEST_F(StubTest, ReadBarrierForRoot) {
2084#if defined(ART_USE_READ_BARRIER) && (defined(__i386__) || defined(__arm__) || \
2085 defined(__aarch64__) || defined(__mips__) || (defined(__x86_64__) && !defined(__APPLE__)))
2086 Thread* self = Thread::Current();
2087
2088 const uintptr_t readBarrierForRootSlow =
2089 StubTest::GetEntrypoint(self, kQuickReadBarrierForRootSlow);
2090
2091 // Create an object
2092 ScopedObjectAccess soa(self);
2093 // garbage is created during ClassLinker::Init
2094
2095 StackHandleScope<1> hs(soa.Self());
2096
2097 Handle<mirror::String> obj(
2098 hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), "hello, world!")));
2099
2100 EXPECT_FALSE(self->IsExceptionPending());
2101
Vladimir Markoacb906d2018-05-30 10:23:49 +01002102 GcRoot<mirror::Class> root(GetClassRoot<mirror::String>());
Roland Levillain0d5a2812015-11-13 10:07:31 +00002103 size_t result = Invoke3(reinterpret_cast<size_t>(&root), 0U, 0U, readBarrierForRootSlow, self);
2104
2105 EXPECT_FALSE(self->IsExceptionPending());
2106 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
2107 mirror::Class* klass = reinterpret_cast<mirror::Class*>(result);
2108 EXPECT_EQ(klass, obj->GetClass());
2109
2110 // Tests done.
2111#else
2112 LOG(INFO) << "Skipping read_barrier_for_root_slow";
2113 // Force-print to std::cout so it's also outside the logcat.
2114 std::cout << "Skipping read_barrier_for_root_slow" << std::endl;
2115#endif
2116}
2117
Andreas Gampe525cde22014-04-22 15:44:50 -07002118} // namespace art