blob: b0c0e43e35ad115e65a8279fb2c82050e8a468df [file] [log] [blame]
Andreas Gampe525cde22014-04-22 15:44:50 -07001/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
Ian Rogerse63db272014-07-15 15:36:11 -070017#include <cstdio>
18
Mathieu Chartierc7853442015-03-27 14:35:38 -070019#include "art_field-inl.h"
Mathieu Chartiere401d142015-04-22 13:56:20 -070020#include "art_method-inl.h"
Andreas Gampe8228cdf2017-05-30 15:03:54 -070021#include "base/callee_save_type.h"
Andreas Gampe542451c2016-07-26 09:02:02 -070022#include "base/enums.h"
Vladimir Marko3481ba22015-04-13 12:22:36 +010023#include "class_linker-inl.h"
Vladimir Markoacb906d2018-05-30 10:23:49 +010024#include "class_root.h"
Andreas Gampe525cde22014-04-22 15:44:50 -070025#include "common_runtime_test.h"
Andreas Gampe29b38412014-08-13 00:15:43 -070026#include "entrypoints/quick/quick_entrypoints_enum.h"
Andreas Gampe75a7db62016-09-26 12:04:26 -070027#include "imt_conflict_table.h"
Vladimir Markoa3ad0cd2018-05-04 10:06:38 +010028#include "jni/jni_internal.h"
Nicolas Geoffray1004faa2016-03-23 14:28:30 +000029#include "linear_alloc.h"
Andreas Gampe51f76352014-05-21 08:28:48 -070030#include "mirror/class-inl.h"
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -070031#include "mirror/string-inl.h"
Mathieu Chartier0795f232016-09-27 18:43:30 -070032#include "scoped_thread_state_change-inl.h"
Andreas Gampe525cde22014-04-22 15:44:50 -070033
34namespace art {
35
36
37class StubTest : public CommonRuntimeTest {
38 protected:
39 // We need callee-save methods set up in the Runtime for exceptions.
40 void SetUp() OVERRIDE {
41 // Do the normal setup.
42 CommonRuntimeTest::SetUp();
43
44 {
45 // Create callee-save methods
46 ScopedObjectAccess soa(Thread::Current());
Vladimir Marko7624d252014-05-02 14:40:15 +010047 runtime_->SetInstructionSet(kRuntimeISA);
Andreas Gampe8228cdf2017-05-30 15:03:54 -070048 for (uint32_t i = 0; i < static_cast<uint32_t>(CalleeSaveType::kLastCalleeSaveType); ++i) {
49 CalleeSaveType type = CalleeSaveType(i);
Andreas Gampe525cde22014-04-22 15:44:50 -070050 if (!runtime_->HasCalleeSaveMethod(type)) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -070051 runtime_->SetCalleeSaveMethod(runtime_->CreateCalleeSaveMethod(), type);
Andreas Gampe525cde22014-04-22 15:44:50 -070052 }
53 }
54 }
55 }
56
Ian Rogerse63db272014-07-15 15:36:11 -070057 void SetUpRuntimeOptions(RuntimeOptions *options) OVERRIDE {
Andreas Gampe00c1e6d2014-04-25 15:47:13 -070058 // Use a smaller heap
59 for (std::pair<std::string, const void*>& pair : *options) {
60 if (pair.first.find("-Xmx") == 0) {
61 pair.first = "-Xmx4M"; // Smallest we can go.
62 }
63 }
Andreas Gampe51f76352014-05-21 08:28:48 -070064 options->push_back(std::make_pair("-Xint", nullptr));
Andreas Gampe00c1e6d2014-04-25 15:47:13 -070065 }
Andreas Gampe525cde22014-04-22 15:44:50 -070066
Mathieu Chartier119c6bd2014-05-09 14:11:47 -070067 // Helper function needed since TEST_F makes a new class.
68 Thread::tls_ptr_sized_values* GetTlsPtr(Thread* self) {
69 return &self->tlsPtr_;
70 }
71
Andreas Gampe4fc046e2014-05-06 16:56:39 -070072 public:
Andreas Gampe525cde22014-04-22 15:44:50 -070073 size_t Invoke3(size_t arg0, size_t arg1, size_t arg2, uintptr_t code, Thread* self) {
Andreas Gampe6cf80102014-05-19 11:32:41 -070074 return Invoke3WithReferrer(arg0, arg1, arg2, code, self, nullptr);
Andreas Gampe525cde22014-04-22 15:44:50 -070075 }
Andreas Gampe6e4e59c2014-05-05 20:11:02 -070076
Andreas Gampe6e4e59c2014-05-05 20:11:02 -070077 // TODO: Set up a frame according to referrer's specs.
78 size_t Invoke3WithReferrer(size_t arg0, size_t arg1, size_t arg2, uintptr_t code, Thread* self,
Mathieu Chartiere401d142015-04-22 13:56:20 -070079 ArtMethod* referrer) {
Andreas Gampe9537ba22015-10-12 14:29:38 -070080 return Invoke3WithReferrerAndHidden(arg0, arg1, arg2, code, self, referrer, 0);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -070081 }
82
Andreas Gampe51f76352014-05-21 08:28:48 -070083 // TODO: Set up a frame according to referrer's specs.
84 size_t Invoke3WithReferrerAndHidden(size_t arg0, size_t arg1, size_t arg2, uintptr_t code,
Mathieu Chartiere401d142015-04-22 13:56:20 -070085 Thread* self, ArtMethod* referrer, size_t hidden) {
Andreas Gampe51f76352014-05-21 08:28:48 -070086 // Push a transition back into managed code onto the linked list in thread.
87 ManagedStack fragment;
88 self->PushManagedStackFragment(&fragment);
89
90 size_t result;
91 size_t fpr_result = 0;
92#if defined(__i386__)
93 // TODO: Set the thread?
Andreas Gampe9537ba22015-10-12 14:29:38 -070094#define PUSH(reg) "push " # reg "\n\t .cfi_adjust_cfa_offset 4\n\t"
95#define POP(reg) "pop " # reg "\n\t .cfi_adjust_cfa_offset -4\n\t"
Andreas Gampe51f76352014-05-21 08:28:48 -070096 __asm__ __volatile__(
Andreas Gampe9537ba22015-10-12 14:29:38 -070097 "movd %[hidden], %%xmm7\n\t" // This is a memory op, so do this early. If it is off of
98 // esp, then we won't be able to access it after spilling.
99
100 // Spill 6 registers.
101 PUSH(%%ebx)
102 PUSH(%%ecx)
103 PUSH(%%edx)
104 PUSH(%%esi)
105 PUSH(%%edi)
106 PUSH(%%ebp)
107
108 // Store the inputs to the stack, but keep the referrer up top, less work.
109 PUSH(%[referrer]) // Align stack.
110 PUSH(%[referrer]) // Store referrer
111
112 PUSH(%[arg0])
113 PUSH(%[arg1])
114 PUSH(%[arg2])
115 PUSH(%[code])
116 // Now read them back into the required registers.
117 POP(%%edi)
118 POP(%%edx)
119 POP(%%ecx)
120 POP(%%eax)
121 // Call is prepared now.
122
Andreas Gampe51f76352014-05-21 08:28:48 -0700123 "call *%%edi\n\t" // Call the stub
Andreas Gampe9537ba22015-10-12 14:29:38 -0700124 "addl $8, %%esp\n\t" // Pop referrer and padding.
125 ".cfi_adjust_cfa_offset -8\n\t"
126
127 // Restore 6 registers.
128 POP(%%ebp)
129 POP(%%edi)
130 POP(%%esi)
131 POP(%%edx)
132 POP(%%ecx)
133 POP(%%ebx)
134
Andreas Gampe51f76352014-05-21 08:28:48 -0700135 : "=a" (result)
136 // Use the result from eax
Andreas Gampe9537ba22015-10-12 14:29:38 -0700137 : [arg0] "r"(arg0), [arg1] "r"(arg1), [arg2] "r"(arg2), [code] "r"(code),
138 [referrer]"r"(referrer), [hidden]"m"(hidden)
Andreas Gampe2f6e3512014-06-07 01:32:33 -0700139 // This places code into edi, arg0 into eax, arg1 into ecx, and arg2 into edx
Andreas Gampe9537ba22015-10-12 14:29:38 -0700140 : "memory", "xmm7"); // clobber.
141#undef PUSH
142#undef POP
Andreas Gampe51f76352014-05-21 08:28:48 -0700143#elif defined(__arm__)
144 __asm__ __volatile__(
145 "push {r1-r12, lr}\n\t" // Save state, 13*4B = 52B
146 ".cfi_adjust_cfa_offset 52\n\t"
147 "push {r9}\n\t"
148 ".cfi_adjust_cfa_offset 4\n\t"
149 "mov r9, %[referrer]\n\n"
150 "str r9, [sp, #-8]!\n\t" // Push referrer, +8B padding so 16B aligned
151 ".cfi_adjust_cfa_offset 8\n\t"
152 "ldr r9, [sp, #8]\n\t"
153
154 // Push everything on the stack, so we don't rely on the order. What a mess. :-(
155 "sub sp, sp, #24\n\t"
156 "str %[arg0], [sp]\n\t"
157 "str %[arg1], [sp, #4]\n\t"
158 "str %[arg2], [sp, #8]\n\t"
159 "str %[code], [sp, #12]\n\t"
160 "str %[self], [sp, #16]\n\t"
161 "str %[hidden], [sp, #20]\n\t"
162 "ldr r0, [sp]\n\t"
163 "ldr r1, [sp, #4]\n\t"
164 "ldr r2, [sp, #8]\n\t"
165 "ldr r3, [sp, #12]\n\t"
166 "ldr r9, [sp, #16]\n\t"
167 "ldr r12, [sp, #20]\n\t"
168 "add sp, sp, #24\n\t"
169
170 "blx r3\n\t" // Call the stub
Mathieu Chartier2cebb242015-04-21 16:50:40 -0700171 "add sp, sp, #12\n\t" // Pop null and padding
Andreas Gampe51f76352014-05-21 08:28:48 -0700172 ".cfi_adjust_cfa_offset -12\n\t"
173 "pop {r1-r12, lr}\n\t" // Restore state
174 ".cfi_adjust_cfa_offset -52\n\t"
175 "mov %[result], r0\n\t" // Save the result
176 : [result] "=r" (result)
177 // Use the result from r0
Andreas Gampe2f6e3512014-06-07 01:32:33 -0700178 : [arg0] "r"(arg0), [arg1] "r"(arg1), [arg2] "r"(arg2), [code] "r"(code), [self] "r"(self),
179 [referrer] "r"(referrer), [hidden] "r"(hidden)
Andreas Gampeff7b1142015-08-03 10:25:06 -0700180 : "r0", "memory"); // clobber.
Andreas Gampe51f76352014-05-21 08:28:48 -0700181#elif defined(__aarch64__)
182 __asm__ __volatile__(
Andreas Gampef39b3782014-06-03 14:38:30 -0700183 // Spill x0-x7 which we say we don't clobber. May contain args.
Bilyan Borisov970e1912016-02-10 11:51:17 +0000184 "sub sp, sp, #80\n\t"
185 ".cfi_adjust_cfa_offset 80\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700186 "stp x0, x1, [sp]\n\t"
187 "stp x2, x3, [sp, #16]\n\t"
188 "stp x4, x5, [sp, #32]\n\t"
189 "stp x6, x7, [sp, #48]\n\t"
Chih-Hung Hsieh86e68142018-02-02 10:25:55 -0800190 // To be extra defensive, store x20,x21. We do this because some of the stubs might make a
Bilyan Borisov970e1912016-02-10 11:51:17 +0000191 // transition into the runtime via the blr instruction below and *not* save x20.
Chih-Hung Hsieh86e68142018-02-02 10:25:55 -0800192 "stp x20, x21, [sp, #64]\n\t"
Andreas Gampe51f76352014-05-21 08:28:48 -0700193
Andreas Gampef39b3782014-06-03 14:38:30 -0700194 "sub sp, sp, #16\n\t" // Reserve stack space, 16B aligned
195 ".cfi_adjust_cfa_offset 16\n\t"
196 "str %[referrer], [sp]\n\t" // referrer
Andreas Gampe51f76352014-05-21 08:28:48 -0700197
198 // Push everything on the stack, so we don't rely on the order. What a mess. :-(
199 "sub sp, sp, #48\n\t"
200 ".cfi_adjust_cfa_offset 48\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700201 // All things are "r" constraints, so direct str/stp should work.
202 "stp %[arg0], %[arg1], [sp]\n\t"
203 "stp %[arg2], %[code], [sp, #16]\n\t"
204 "stp %[self], %[hidden], [sp, #32]\n\t"
Andreas Gampe51f76352014-05-21 08:28:48 -0700205
206 // Now we definitely have x0-x3 free, use it to garble d8 - d15
207 "movk x0, #0xfad0\n\t"
208 "movk x0, #0xebad, lsl #16\n\t"
209 "movk x0, #0xfad0, lsl #32\n\t"
210 "movk x0, #0xebad, lsl #48\n\t"
211 "fmov d8, x0\n\t"
212 "add x0, x0, 1\n\t"
213 "fmov d9, x0\n\t"
214 "add x0, x0, 1\n\t"
215 "fmov d10, x0\n\t"
216 "add x0, x0, 1\n\t"
217 "fmov d11, x0\n\t"
218 "add x0, x0, 1\n\t"
219 "fmov d12, x0\n\t"
220 "add x0, x0, 1\n\t"
221 "fmov d13, x0\n\t"
222 "add x0, x0, 1\n\t"
223 "fmov d14, x0\n\t"
224 "add x0, x0, 1\n\t"
225 "fmov d15, x0\n\t"
226
Andreas Gampef39b3782014-06-03 14:38:30 -0700227 // Load call params into the right registers.
228 "ldp x0, x1, [sp]\n\t"
229 "ldp x2, x3, [sp, #16]\n\t"
Serban Constantinescu9bd88b02015-04-22 16:24:46 +0100230 "ldp x19, x17, [sp, #32]\n\t"
Andreas Gampe51f76352014-05-21 08:28:48 -0700231 "add sp, sp, #48\n\t"
232 ".cfi_adjust_cfa_offset -48\n\t"
233
Andreas Gampe51f76352014-05-21 08:28:48 -0700234 "blr x3\n\t" // Call the stub
Andreas Gampef39b3782014-06-03 14:38:30 -0700235 "mov x8, x0\n\t" // Store result
236 "add sp, sp, #16\n\t" // Drop the quick "frame"
237 ".cfi_adjust_cfa_offset -16\n\t"
Andreas Gampe51f76352014-05-21 08:28:48 -0700238
239 // Test d8 - d15. We can use x1 and x2.
240 "movk x1, #0xfad0\n\t"
241 "movk x1, #0xebad, lsl #16\n\t"
242 "movk x1, #0xfad0, lsl #32\n\t"
243 "movk x1, #0xebad, lsl #48\n\t"
244 "fmov x2, d8\n\t"
245 "cmp x1, x2\n\t"
246 "b.ne 1f\n\t"
247 "add x1, x1, 1\n\t"
248
249 "fmov x2, d9\n\t"
250 "cmp x1, x2\n\t"
251 "b.ne 1f\n\t"
252 "add x1, x1, 1\n\t"
253
254 "fmov x2, d10\n\t"
255 "cmp x1, x2\n\t"
256 "b.ne 1f\n\t"
257 "add x1, x1, 1\n\t"
258
259 "fmov x2, d11\n\t"
260 "cmp x1, x2\n\t"
261 "b.ne 1f\n\t"
262 "add x1, x1, 1\n\t"
263
264 "fmov x2, d12\n\t"
265 "cmp x1, x2\n\t"
266 "b.ne 1f\n\t"
267 "add x1, x1, 1\n\t"
268
269 "fmov x2, d13\n\t"
270 "cmp x1, x2\n\t"
271 "b.ne 1f\n\t"
272 "add x1, x1, 1\n\t"
273
274 "fmov x2, d14\n\t"
275 "cmp x1, x2\n\t"
276 "b.ne 1f\n\t"
277 "add x1, x1, 1\n\t"
278
279 "fmov x2, d15\n\t"
280 "cmp x1, x2\n\t"
281 "b.ne 1f\n\t"
282
Andreas Gampef39b3782014-06-03 14:38:30 -0700283 "mov x9, #0\n\t" // Use x9 as flag, in clobber list
Andreas Gampe51f76352014-05-21 08:28:48 -0700284
285 // Finish up.
286 "2:\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700287 "ldp x0, x1, [sp]\n\t" // Restore stuff not named clobbered, may contain fpr_result
288 "ldp x2, x3, [sp, #16]\n\t"
289 "ldp x4, x5, [sp, #32]\n\t"
290 "ldp x6, x7, [sp, #48]\n\t"
Chih-Hung Hsieh86e68142018-02-02 10:25:55 -0800291 "ldp x20, x21, [sp, #64]\n\t"
Bilyan Borisov970e1912016-02-10 11:51:17 +0000292 "add sp, sp, #80\n\t" // Free stack space, now sp as on entry
293 ".cfi_adjust_cfa_offset -80\n\t"
Andreas Gampe51f76352014-05-21 08:28:48 -0700294
Andreas Gampef39b3782014-06-03 14:38:30 -0700295 "str x9, %[fpr_result]\n\t" // Store the FPR comparison result
296 "mov %[result], x8\n\t" // Store the call result
297
Andreas Gampe51f76352014-05-21 08:28:48 -0700298 "b 3f\n\t" // Goto end
299
300 // Failed fpr verification.
301 "1:\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700302 "mov x9, #1\n\t"
Andreas Gampe51f76352014-05-21 08:28:48 -0700303 "b 2b\n\t" // Goto finish-up
304
305 // End
306 "3:\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700307 : [result] "=r" (result)
308 // Use the result from r0
Andreas Gampe51f76352014-05-21 08:28:48 -0700309 : [arg0] "0"(arg0), [arg1] "r"(arg1), [arg2] "r"(arg2), [code] "r"(code), [self] "r"(self),
Andreas Gampef39b3782014-06-03 14:38:30 -0700310 [referrer] "r"(referrer), [hidden] "r"(hidden), [fpr_result] "m" (fpr_result)
Bilyan Borisov970e1912016-02-10 11:51:17 +0000311 // Leave one register unclobbered, which is needed for compiling with
312 // -fstack-protector-strong. According to AAPCS64 registers x9-x15 are caller-saved,
313 // which means we should unclobber one of the callee-saved registers that are unused.
314 // Here we use x20.
Chih-Hung Hsieh86e68142018-02-02 10:25:55 -0800315 // http://b/72613441, Clang 7.0 asks for one more register, so we do not reserve x21.
Bilyan Borisov970e1912016-02-10 11:51:17 +0000316 : "x8", "x9", "x10", "x11", "x12", "x13", "x14", "x15", "x16", "x17", "x18", "x19",
Chih-Hung Hsieh86e68142018-02-02 10:25:55 -0800317 "x22", "x23", "x24", "x25", "x26", "x27", "x28", "x30",
Andreas Gampef39b3782014-06-03 14:38:30 -0700318 "d0", "d1", "d2", "d3", "d4", "d5", "d6", "d7",
319 "d8", "d9", "d10", "d11", "d12", "d13", "d14", "d15",
320 "d16", "d17", "d18", "d19", "d20", "d21", "d22", "d23",
Andreas Gampe2f6e3512014-06-07 01:32:33 -0700321 "d24", "d25", "d26", "d27", "d28", "d29", "d30", "d31",
Bilyan Borisov970e1912016-02-10 11:51:17 +0000322 "memory");
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200323#elif defined(__mips__) && !defined(__LP64__)
324 __asm__ __volatile__ (
325 // Spill a0-a3 and t0-t7 which we say we don't clobber. May contain args.
326 "addiu $sp, $sp, -64\n\t"
327 "sw $a0, 0($sp)\n\t"
328 "sw $a1, 4($sp)\n\t"
329 "sw $a2, 8($sp)\n\t"
330 "sw $a3, 12($sp)\n\t"
331 "sw $t0, 16($sp)\n\t"
332 "sw $t1, 20($sp)\n\t"
333 "sw $t2, 24($sp)\n\t"
334 "sw $t3, 28($sp)\n\t"
335 "sw $t4, 32($sp)\n\t"
336 "sw $t5, 36($sp)\n\t"
337 "sw $t6, 40($sp)\n\t"
338 "sw $t7, 44($sp)\n\t"
339 // Spill gp register since it is caller save.
340 "sw $gp, 52($sp)\n\t"
341
342 "addiu $sp, $sp, -16\n\t" // Reserve stack space, 16B aligned.
343 "sw %[referrer], 0($sp)\n\t"
344
345 // Push everything on the stack, so we don't rely on the order.
346 "addiu $sp, $sp, -24\n\t"
347 "sw %[arg0], 0($sp)\n\t"
348 "sw %[arg1], 4($sp)\n\t"
349 "sw %[arg2], 8($sp)\n\t"
350 "sw %[code], 12($sp)\n\t"
351 "sw %[self], 16($sp)\n\t"
352 "sw %[hidden], 20($sp)\n\t"
353
354 // Load call params into the right registers.
355 "lw $a0, 0($sp)\n\t"
356 "lw $a1, 4($sp)\n\t"
357 "lw $a2, 8($sp)\n\t"
358 "lw $t9, 12($sp)\n\t"
359 "lw $s1, 16($sp)\n\t"
Alexey Frunze1b8464d2016-11-12 17:22:05 -0800360 "lw $t7, 20($sp)\n\t"
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200361 "addiu $sp, $sp, 24\n\t"
362
363 "jalr $t9\n\t" // Call the stub.
364 "nop\n\t"
365 "addiu $sp, $sp, 16\n\t" // Drop the quick "frame".
366
367 // Restore stuff not named clobbered.
368 "lw $a0, 0($sp)\n\t"
369 "lw $a1, 4($sp)\n\t"
370 "lw $a2, 8($sp)\n\t"
371 "lw $a3, 12($sp)\n\t"
372 "lw $t0, 16($sp)\n\t"
373 "lw $t1, 20($sp)\n\t"
374 "lw $t2, 24($sp)\n\t"
375 "lw $t3, 28($sp)\n\t"
376 "lw $t4, 32($sp)\n\t"
377 "lw $t5, 36($sp)\n\t"
378 "lw $t6, 40($sp)\n\t"
379 "lw $t7, 44($sp)\n\t"
380 // Restore gp.
381 "lw $gp, 52($sp)\n\t"
382 "addiu $sp, $sp, 64\n\t" // Free stack space, now sp as on entry.
383
384 "move %[result], $v0\n\t" // Store the call result.
385 : [result] "=r" (result)
386 : [arg0] "r"(arg0), [arg1] "r"(arg1), [arg2] "r"(arg2), [code] "r"(code), [self] "r"(self),
387 [referrer] "r"(referrer), [hidden] "r"(hidden)
388 : "at", "v0", "v1", "s0", "s1", "s2", "s3", "s4", "s5", "s6", "s7", "t8", "t9", "k0", "k1",
389 "fp", "ra",
Nicolas Geoffrayc5b4b322015-09-15 16:36:50 +0100390 "$f0", "$f1", "$f2", "$f3", "$f4", "$f5", "$f6", "$f7", "$f8", "$f9", "$f10", "$f11",
391 "$f12", "$f13", "$f14", "$f15", "$f16", "$f17", "$f18", "$f19", "$f20", "$f21", "$f22",
392 "$f23", "$f24", "$f25", "$f26", "$f27", "$f28", "$f29", "$f30", "$f31",
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200393 "memory"); // clobber.
394#elif defined(__mips__) && defined(__LP64__)
395 __asm__ __volatile__ (
Goran Jakovljevic745f3cd2016-01-15 14:08:47 +0100396 // Spill a0-a7 which we say we don't clobber. May contain args.
397 "daddiu $sp, $sp, -64\n\t"
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200398 "sd $a0, 0($sp)\n\t"
399 "sd $a1, 8($sp)\n\t"
400 "sd $a2, 16($sp)\n\t"
401 "sd $a3, 24($sp)\n\t"
402 "sd $a4, 32($sp)\n\t"
403 "sd $a5, 40($sp)\n\t"
404 "sd $a6, 48($sp)\n\t"
405 "sd $a7, 56($sp)\n\t"
406
407 "daddiu $sp, $sp, -16\n\t" // Reserve stack space, 16B aligned.
408 "sd %[referrer], 0($sp)\n\t"
409
410 // Push everything on the stack, so we don't rely on the order.
411 "daddiu $sp, $sp, -48\n\t"
412 "sd %[arg0], 0($sp)\n\t"
413 "sd %[arg1], 8($sp)\n\t"
414 "sd %[arg2], 16($sp)\n\t"
415 "sd %[code], 24($sp)\n\t"
416 "sd %[self], 32($sp)\n\t"
417 "sd %[hidden], 40($sp)\n\t"
418
419 // Load call params into the right registers.
420 "ld $a0, 0($sp)\n\t"
421 "ld $a1, 8($sp)\n\t"
422 "ld $a2, 16($sp)\n\t"
423 "ld $t9, 24($sp)\n\t"
424 "ld $s1, 32($sp)\n\t"
425 "ld $t0, 40($sp)\n\t"
426 "daddiu $sp, $sp, 48\n\t"
427
428 "jalr $t9\n\t" // Call the stub.
429 "nop\n\t"
430 "daddiu $sp, $sp, 16\n\t" // Drop the quick "frame".
431
432 // Restore stuff not named clobbered.
433 "ld $a0, 0($sp)\n\t"
434 "ld $a1, 8($sp)\n\t"
435 "ld $a2, 16($sp)\n\t"
436 "ld $a3, 24($sp)\n\t"
437 "ld $a4, 32($sp)\n\t"
438 "ld $a5, 40($sp)\n\t"
439 "ld $a6, 48($sp)\n\t"
440 "ld $a7, 56($sp)\n\t"
Goran Jakovljevic745f3cd2016-01-15 14:08:47 +0100441 "daddiu $sp, $sp, 64\n\t"
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200442
443 "move %[result], $v0\n\t" // Store the call result.
444 : [result] "=r" (result)
445 : [arg0] "r"(arg0), [arg1] "r"(arg1), [arg2] "r"(arg2), [code] "r"(code), [self] "r"(self),
446 [referrer] "r"(referrer), [hidden] "r"(hidden)
Goran Jakovljevic745f3cd2016-01-15 14:08:47 +0100447 // Instead aliases t0-t3, register names $12-$15 has been used in the clobber list because
448 // t0-t3 are ambiguous.
449 : "at", "v0", "v1", "$12", "$13", "$14", "$15", "s0", "s1", "s2", "s3", "s4", "s5", "s6",
450 "s7", "t8", "t9", "k0", "k1", "fp", "ra",
Goran Jakovljevic4d44e532015-11-27 11:20:20 +0100451 "$f0", "$f1", "$f2", "$f3", "$f4", "$f5", "$f6", "$f7", "$f8", "$f9", "$f10", "$f11",
452 "$f12", "$f13", "$f14", "$f15", "$f16", "$f17", "$f18", "$f19", "$f20", "$f21", "$f22",
453 "$f23", "$f24", "$f25", "$f26", "$f27", "$f28", "$f29", "$f30", "$f31",
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200454 "memory"); // clobber.
Andreas Gampe9537ba22015-10-12 14:29:38 -0700455#elif defined(__x86_64__) && !defined(__APPLE__)
456#define PUSH(reg) "pushq " # reg "\n\t .cfi_adjust_cfa_offset 8\n\t"
457#define POP(reg) "popq " # reg "\n\t .cfi_adjust_cfa_offset -8\n\t"
458 // Note: Uses the native convention. We do a callee-save regimen by manually spilling and
459 // restoring almost all registers.
Andreas Gampe51f76352014-05-21 08:28:48 -0700460 // TODO: Set the thread?
461 __asm__ __volatile__(
Andreas Gampe9537ba22015-10-12 14:29:38 -0700462 // Spill almost everything (except rax, rsp). 14 registers.
463 PUSH(%%rbx)
464 PUSH(%%rcx)
465 PUSH(%%rdx)
466 PUSH(%%rsi)
467 PUSH(%%rdi)
468 PUSH(%%rbp)
469 PUSH(%%r8)
470 PUSH(%%r9)
471 PUSH(%%r10)
472 PUSH(%%r11)
473 PUSH(%%r12)
474 PUSH(%%r13)
475 PUSH(%%r14)
476 PUSH(%%r15)
477
478 PUSH(%[referrer]) // Push referrer & 16B alignment padding
479 PUSH(%[referrer])
480
481 // Now juggle the input registers.
482 PUSH(%[arg0])
483 PUSH(%[arg1])
484 PUSH(%[arg2])
485 PUSH(%[hidden])
486 PUSH(%[code])
487 POP(%%r8)
488 POP(%%rax)
489 POP(%%rdx)
490 POP(%%rsi)
491 POP(%%rdi)
492
493 "call *%%r8\n\t" // Call the stub
494 "addq $16, %%rsp\n\t" // Pop null and padding
Andreas Gampe51f76352014-05-21 08:28:48 -0700495 ".cfi_adjust_cfa_offset -16\n\t"
Andreas Gampe9537ba22015-10-12 14:29:38 -0700496
497 POP(%%r15)
498 POP(%%r14)
499 POP(%%r13)
500 POP(%%r12)
501 POP(%%r11)
502 POP(%%r10)
503 POP(%%r9)
504 POP(%%r8)
505 POP(%%rbp)
506 POP(%%rdi)
507 POP(%%rsi)
508 POP(%%rdx)
509 POP(%%rcx)
510 POP(%%rbx)
511
Andreas Gampe51f76352014-05-21 08:28:48 -0700512 : "=a" (result)
513 // Use the result from rax
Andreas Gampe9537ba22015-10-12 14:29:38 -0700514 : [arg0] "r"(arg0), [arg1] "r"(arg1), [arg2] "r"(arg2), [code] "r"(code),
515 [referrer] "r"(referrer), [hidden] "r"(hidden)
516 // This places arg0 into rdi, arg1 into rsi, arg2 into rdx, and code into some other
517 // register. We can't use "b" (rbx), as ASAN uses this for the frame pointer.
518 : "memory"); // We spill and restore (almost) all registers, so only mention memory here.
519#undef PUSH
520#undef POP
Andreas Gampe51f76352014-05-21 08:28:48 -0700521#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -0800522 UNUSED(arg0, arg1, arg2, code, referrer, hidden);
Andreas Gampe51f76352014-05-21 08:28:48 -0700523 LOG(WARNING) << "Was asked to invoke for an architecture I do not understand.";
524 result = 0;
525#endif
526 // Pop transition.
527 self->PopManagedStackFragment(fragment);
528
529 fp_result = fpr_result;
530 EXPECT_EQ(0U, fp_result);
531
532 return result;
533 }
534
Andreas Gampe29b38412014-08-13 00:15:43 -0700535 static uintptr_t GetEntrypoint(Thread* self, QuickEntrypointEnum entrypoint) {
536 int32_t offset;
Andreas Gampe542451c2016-07-26 09:02:02 -0700537 offset = GetThreadOffset<kRuntimePointerSize>(entrypoint).Int32Value();
Andreas Gampe29b38412014-08-13 00:15:43 -0700538 return *reinterpret_cast<uintptr_t*>(reinterpret_cast<uint8_t*>(self) + offset);
539 }
540
Andreas Gampe6cf80102014-05-19 11:32:41 -0700541 protected:
542 size_t fp_result;
Andreas Gampe525cde22014-04-22 15:44:50 -0700543};
544
545
Andreas Gampe525cde22014-04-22 15:44:50 -0700546TEST_F(StubTest, Memcpy) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200547#if defined(__i386__) || (defined(__x86_64__) && !defined(__APPLE__)) || defined(__mips__)
Andreas Gampe525cde22014-04-22 15:44:50 -0700548 Thread* self = Thread::Current();
549
550 uint32_t orig[20];
551 uint32_t trg[20];
552 for (size_t i = 0; i < 20; ++i) {
553 orig[i] = i;
554 trg[i] = 0;
555 }
556
557 Invoke3(reinterpret_cast<size_t>(&trg[4]), reinterpret_cast<size_t>(&orig[4]),
Andreas Gampe29b38412014-08-13 00:15:43 -0700558 10 * sizeof(uint32_t), StubTest::GetEntrypoint(self, kQuickMemcpy), self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700559
560 EXPECT_EQ(orig[0], trg[0]);
561
562 for (size_t i = 1; i < 4; ++i) {
563 EXPECT_NE(orig[i], trg[i]);
564 }
565
566 for (size_t i = 4; i < 14; ++i) {
567 EXPECT_EQ(orig[i], trg[i]);
568 }
569
570 for (size_t i = 14; i < 20; ++i) {
571 EXPECT_NE(orig[i], trg[i]);
572 }
573
574 // TODO: Test overlapping?
575
576#else
577 LOG(INFO) << "Skipping memcpy as I don't know how to do that on " << kRuntimeISA;
578 // Force-print to std::cout so it's also outside the logcat.
579 std::cout << "Skipping memcpy as I don't know how to do that on " << kRuntimeISA << std::endl;
580#endif
581}
582
Andreas Gampe525cde22014-04-22 15:44:50 -0700583TEST_F(StubTest, LockObject) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200584#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
585 (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -0700586 static constexpr size_t kThinLockLoops = 100;
587
Andreas Gampe525cde22014-04-22 15:44:50 -0700588 Thread* self = Thread::Current();
Andreas Gampe29b38412014-08-13 00:15:43 -0700589
590 const uintptr_t art_quick_lock_object = StubTest::GetEntrypoint(self, kQuickLockObject);
591
Andreas Gampe525cde22014-04-22 15:44:50 -0700592 // Create an object
593 ScopedObjectAccess soa(self);
594 // garbage is created during ClassLinker::Init
595
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700596 StackHandleScope<2> hs(soa.Self());
597 Handle<mirror::String> obj(
598 hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), "hello, world!")));
Andreas Gampe525cde22014-04-22 15:44:50 -0700599 LockWord lock = obj->GetLockWord(false);
600 LockWord::LockState old_state = lock.GetState();
601 EXPECT_EQ(LockWord::LockState::kUnlocked, old_state);
602
Andreas Gampe29b38412014-08-13 00:15:43 -0700603 Invoke3(reinterpret_cast<size_t>(obj.Get()), 0U, 0U, art_quick_lock_object, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700604
605 LockWord lock_after = obj->GetLockWord(false);
606 LockWord::LockState new_state = lock_after.GetState();
607 EXPECT_EQ(LockWord::LockState::kThinLocked, new_state);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700608 EXPECT_EQ(lock_after.ThinLockCount(), 0U); // Thin lock starts count at zero
609
610 for (size_t i = 1; i < kThinLockLoops; ++i) {
Andreas Gampe29b38412014-08-13 00:15:43 -0700611 Invoke3(reinterpret_cast<size_t>(obj.Get()), 0U, 0U, art_quick_lock_object, self);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700612
613 // Check we're at lock count i
614
615 LockWord l_inc = obj->GetLockWord(false);
616 LockWord::LockState l_inc_state = l_inc.GetState();
617 EXPECT_EQ(LockWord::LockState::kThinLocked, l_inc_state);
618 EXPECT_EQ(l_inc.ThinLockCount(), i);
619 }
620
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700621 // Force a fat lock by running identity hashcode to fill up lock word.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700622 Handle<mirror::String> obj2(hs.NewHandle(
623 mirror::String::AllocFromModifiedUtf8(soa.Self(), "hello, world!")));
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700624
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700625 obj2->IdentityHashCode();
626
Andreas Gampe29b38412014-08-13 00:15:43 -0700627 Invoke3(reinterpret_cast<size_t>(obj2.Get()), 0U, 0U, art_quick_lock_object, self);
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700628
629 LockWord lock_after2 = obj2->GetLockWord(false);
630 LockWord::LockState new_state2 = lock_after2.GetState();
631 EXPECT_EQ(LockWord::LockState::kFatLocked, new_state2);
632 EXPECT_NE(lock_after2.FatLockMonitor(), static_cast<Monitor*>(nullptr));
633
634 // Test done.
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700635#else
636 LOG(INFO) << "Skipping lock_object as I don't know how to do that on " << kRuntimeISA;
637 // Force-print to std::cout so it's also outside the logcat.
638 std::cout << "Skipping lock_object as I don't know how to do that on " << kRuntimeISA << std::endl;
639#endif
640}
641
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700642
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700643class RandGen {
644 public:
645 explicit RandGen(uint32_t seed) : val_(seed) {}
646
647 uint32_t next() {
648 val_ = val_ * 48271 % 2147483647 + 13;
649 return val_;
650 }
651
652 uint32_t val_;
653};
654
655
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700656// NO_THREAD_SAFETY_ANALYSIS as we do not want to grab exclusive mutator lock for MonitorInfo.
657static void TestUnlockObject(StubTest* test) NO_THREAD_SAFETY_ANALYSIS {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200658#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
659 (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -0700660 static constexpr size_t kThinLockLoops = 100;
661
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700662 Thread* self = Thread::Current();
Andreas Gampe29b38412014-08-13 00:15:43 -0700663
664 const uintptr_t art_quick_lock_object = StubTest::GetEntrypoint(self, kQuickLockObject);
665 const uintptr_t art_quick_unlock_object = StubTest::GetEntrypoint(self, kQuickUnlockObject);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700666 // Create an object
667 ScopedObjectAccess soa(self);
668 // garbage is created during ClassLinker::Init
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700669 static constexpr size_t kNumberOfLocks = 10; // Number of objects = lock
670 StackHandleScope<kNumberOfLocks + 1> hs(self);
671 Handle<mirror::String> obj(
672 hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), "hello, world!")));
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700673 LockWord lock = obj->GetLockWord(false);
674 LockWord::LockState old_state = lock.GetState();
675 EXPECT_EQ(LockWord::LockState::kUnlocked, old_state);
676
Andreas Gampe29b38412014-08-13 00:15:43 -0700677 test->Invoke3(reinterpret_cast<size_t>(obj.Get()), 0U, 0U, art_quick_unlock_object, self);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700678 // This should be an illegal monitor state.
679 EXPECT_TRUE(self->IsExceptionPending());
680 self->ClearException();
681
682 LockWord lock_after = obj->GetLockWord(false);
683 LockWord::LockState new_state = lock_after.GetState();
684 EXPECT_EQ(LockWord::LockState::kUnlocked, new_state);
Andreas Gampe525cde22014-04-22 15:44:50 -0700685
Andreas Gampe29b38412014-08-13 00:15:43 -0700686 test->Invoke3(reinterpret_cast<size_t>(obj.Get()), 0U, 0U, art_quick_lock_object, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700687
688 LockWord lock_after2 = obj->GetLockWord(false);
689 LockWord::LockState new_state2 = lock_after2.GetState();
690 EXPECT_EQ(LockWord::LockState::kThinLocked, new_state2);
691
Andreas Gampe29b38412014-08-13 00:15:43 -0700692 test->Invoke3(reinterpret_cast<size_t>(obj.Get()), 0U, 0U, art_quick_unlock_object, self);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700693
694 LockWord lock_after3 = obj->GetLockWord(false);
695 LockWord::LockState new_state3 = lock_after3.GetState();
696 EXPECT_EQ(LockWord::LockState::kUnlocked, new_state3);
697
698 // Stress test:
699 // Keep a number of objects and their locks in flight. Randomly lock or unlock one of them in
700 // each step.
701
702 RandGen r(0x1234);
703
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700704 constexpr size_t kIterations = 10000; // Number of iterations
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700705 constexpr size_t kMoveToFat = 1000; // Chance of 1:kMoveFat to make a lock fat.
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700706
707 size_t counts[kNumberOfLocks];
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700708 bool fat[kNumberOfLocks]; // Whether a lock should be thin or fat.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700709 Handle<mirror::String> objects[kNumberOfLocks];
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700710
711 // Initialize = allocate.
712 for (size_t i = 0; i < kNumberOfLocks; ++i) {
713 counts[i] = 0;
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700714 fat[i] = false;
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700715 objects[i] = hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), ""));
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700716 }
717
718 for (size_t i = 0; i < kIterations; ++i) {
719 // Select which lock to update.
720 size_t index = r.next() % kNumberOfLocks;
721
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700722 // Make lock fat?
723 if (!fat[index] && (r.next() % kMoveToFat == 0)) {
724 fat[index] = true;
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700725 objects[index]->IdentityHashCode();
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700726
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700727 LockWord lock_iter = objects[index]->GetLockWord(false);
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700728 LockWord::LockState iter_state = lock_iter.GetState();
729 if (counts[index] == 0) {
730 EXPECT_EQ(LockWord::LockState::kHashCode, iter_state);
731 } else {
732 EXPECT_EQ(LockWord::LockState::kFatLocked, iter_state);
733 }
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700734 } else {
Andreas Gampe277ccbd2014-11-03 21:36:10 -0800735 bool take_lock; // Whether to lock or unlock in this step.
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700736 if (counts[index] == 0) {
Andreas Gampe277ccbd2014-11-03 21:36:10 -0800737 take_lock = true;
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700738 } else if (counts[index] == kThinLockLoops) {
Andreas Gampe277ccbd2014-11-03 21:36:10 -0800739 take_lock = false;
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700740 } else {
741 // Randomly.
Andreas Gampe277ccbd2014-11-03 21:36:10 -0800742 take_lock = r.next() % 2 == 0;
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700743 }
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700744
Andreas Gampe277ccbd2014-11-03 21:36:10 -0800745 if (take_lock) {
Andreas Gampe29b38412014-08-13 00:15:43 -0700746 test->Invoke3(reinterpret_cast<size_t>(objects[index].Get()), 0U, 0U, art_quick_lock_object,
747 self);
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700748 counts[index]++;
749 } else {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700750 test->Invoke3(reinterpret_cast<size_t>(objects[index].Get()), 0U, 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -0700751 art_quick_unlock_object, self);
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700752 counts[index]--;
753 }
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700754
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700755 EXPECT_FALSE(self->IsExceptionPending());
756
757 // Check the new state.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700758 LockWord lock_iter = objects[index]->GetLockWord(true);
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700759 LockWord::LockState iter_state = lock_iter.GetState();
760 if (fat[index]) {
761 // Abuse MonitorInfo.
762 EXPECT_EQ(LockWord::LockState::kFatLocked, iter_state) << index;
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700763 MonitorInfo info(objects[index].Get());
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700764 EXPECT_EQ(counts[index], info.entry_count_) << index;
765 } else {
766 if (counts[index] > 0) {
767 EXPECT_EQ(LockWord::LockState::kThinLocked, iter_state);
768 EXPECT_EQ(counts[index] - 1, lock_iter.ThinLockCount());
769 } else {
770 EXPECT_EQ(LockWord::LockState::kUnlocked, iter_state);
771 }
772 }
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700773 }
774 }
775
776 // Unlock the remaining count times and then check it's unlocked. Then deallocate.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700777 // Go reverse order to correctly handle Handles.
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700778 for (size_t i = 0; i < kNumberOfLocks; ++i) {
779 size_t index = kNumberOfLocks - 1 - i;
780 size_t count = counts[index];
781 while (count > 0) {
Andreas Gampe29b38412014-08-13 00:15:43 -0700782 test->Invoke3(reinterpret_cast<size_t>(objects[index].Get()), 0U, 0U, art_quick_unlock_object,
783 self);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700784 count--;
785 }
786
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700787 LockWord lock_after4 = objects[index]->GetLockWord(false);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700788 LockWord::LockState new_state4 = lock_after4.GetState();
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700789 EXPECT_TRUE(LockWord::LockState::kUnlocked == new_state4
790 || LockWord::LockState::kFatLocked == new_state4);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700791 }
792
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700793 // Test done.
Andreas Gampe525cde22014-04-22 15:44:50 -0700794#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -0800795 UNUSED(test);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700796 LOG(INFO) << "Skipping unlock_object as I don't know how to do that on " << kRuntimeISA;
Andreas Gampe525cde22014-04-22 15:44:50 -0700797 // Force-print to std::cout so it's also outside the logcat.
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700798 std::cout << "Skipping unlock_object as I don't know how to do that on " << kRuntimeISA << std::endl;
Andreas Gampe525cde22014-04-22 15:44:50 -0700799#endif
800}
801
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700802TEST_F(StubTest, UnlockObject) {
Andreas Gampe369810a2015-01-14 19:53:31 -0800803 // This will lead to monitor error messages in the log.
804 ScopedLogSeverity sls(LogSeverity::FATAL);
805
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700806 TestUnlockObject(this);
807}
Andreas Gampe525cde22014-04-22 15:44:50 -0700808
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200809#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
810 (defined(__x86_64__) && !defined(__APPLE__))
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800811extern "C" void art_quick_check_instance_of(void);
Andreas Gampe525cde22014-04-22 15:44:50 -0700812#endif
813
814TEST_F(StubTest, CheckCast) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200815#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
816 (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe525cde22014-04-22 15:44:50 -0700817 Thread* self = Thread::Current();
Andreas Gampe29b38412014-08-13 00:15:43 -0700818
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800819 const uintptr_t art_quick_check_instance_of =
820 StubTest::GetEntrypoint(self, kQuickCheckInstanceOf);
Andreas Gampe29b38412014-08-13 00:15:43 -0700821
Andreas Gampe525cde22014-04-22 15:44:50 -0700822 // Find some classes.
823 ScopedObjectAccess soa(self);
824 // garbage is created during ClassLinker::Init
825
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800826 VariableSizedHandleScope hs(soa.Self());
827 Handle<mirror::Class> klass_obj(
828 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "Ljava/lang/Object;")));
829 Handle<mirror::Class> klass_str(
830 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "Ljava/lang/String;")));
831 Handle<mirror::Class> klass_list(
832 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "Ljava/util/List;")));
833 Handle<mirror::Class> klass_cloneable(
834 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "Ljava/lang/Cloneable;")));
835 Handle<mirror::Class> klass_array_list(
836 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "Ljava/util/ArrayList;")));
837 Handle<mirror::Object> obj(hs.NewHandle(klass_obj->AllocObject(soa.Self())));
838 Handle<mirror::String> string(hs.NewHandle(
839 mirror::String::AllocFromModifiedUtf8(soa.Self(), "ABCD")));
840 Handle<mirror::Object> array_list(hs.NewHandle(klass_array_list->AllocObject(soa.Self())));
Andreas Gampe525cde22014-04-22 15:44:50 -0700841
842 EXPECT_FALSE(self->IsExceptionPending());
843
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800844 Invoke3(reinterpret_cast<size_t>(obj.Get()),
845 reinterpret_cast<size_t>(klass_obj.Get()),
Mathieu Chartier2ecfd272016-11-01 10:10:05 -0700846 0U,
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800847 art_quick_check_instance_of,
Mathieu Chartier2ecfd272016-11-01 10:10:05 -0700848 self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700849 EXPECT_FALSE(self->IsExceptionPending());
850
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800851 // Expected true: Test string instance of java.lang.String.
852 Invoke3(reinterpret_cast<size_t>(string.Get()),
853 reinterpret_cast<size_t>(klass_str.Get()),
Mathieu Chartier2ecfd272016-11-01 10:10:05 -0700854 0U,
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800855 art_quick_check_instance_of,
Mathieu Chartier2ecfd272016-11-01 10:10:05 -0700856 self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700857 EXPECT_FALSE(self->IsExceptionPending());
858
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800859 // Expected true: Test string instance of java.lang.Object.
860 Invoke3(reinterpret_cast<size_t>(string.Get()),
861 reinterpret_cast<size_t>(klass_obj.Get()),
Mathieu Chartier2ecfd272016-11-01 10:10:05 -0700862 0U,
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800863 art_quick_check_instance_of,
Mathieu Chartier2ecfd272016-11-01 10:10:05 -0700864 self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700865 EXPECT_FALSE(self->IsExceptionPending());
866
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800867 // Expected false: Test object instance of java.lang.String.
868 Invoke3(reinterpret_cast<size_t>(obj.Get()),
869 reinterpret_cast<size_t>(klass_str.Get()),
Mathieu Chartier2ecfd272016-11-01 10:10:05 -0700870 0U,
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800871 art_quick_check_instance_of,
Mathieu Chartier2ecfd272016-11-01 10:10:05 -0700872 self);
873 EXPECT_TRUE(self->IsExceptionPending());
874 self->ClearException();
875
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800876 Invoke3(reinterpret_cast<size_t>(array_list.Get()),
877 reinterpret_cast<size_t>(klass_list.Get()),
Mathieu Chartier2ecfd272016-11-01 10:10:05 -0700878 0U,
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800879 art_quick_check_instance_of,
880 self);
881 EXPECT_FALSE(self->IsExceptionPending());
882
883 Invoke3(reinterpret_cast<size_t>(array_list.Get()),
884 reinterpret_cast<size_t>(klass_cloneable.Get()),
885 0U,
886 art_quick_check_instance_of,
887 self);
888 EXPECT_FALSE(self->IsExceptionPending());
889
890 Invoke3(reinterpret_cast<size_t>(string.Get()),
891 reinterpret_cast<size_t>(klass_array_list.Get()),
892 0U,
893 art_quick_check_instance_of,
894 self);
895 EXPECT_TRUE(self->IsExceptionPending());
896 self->ClearException();
897
898 Invoke3(reinterpret_cast<size_t>(string.Get()),
899 reinterpret_cast<size_t>(klass_cloneable.Get()),
900 0U,
901 art_quick_check_instance_of,
Mathieu Chartier2ecfd272016-11-01 10:10:05 -0700902 self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700903 EXPECT_TRUE(self->IsExceptionPending());
904 self->ClearException();
905
906#else
907 LOG(INFO) << "Skipping check_cast as I don't know how to do that on " << kRuntimeISA;
908 // Force-print to std::cout so it's also outside the logcat.
909 std::cout << "Skipping check_cast as I don't know how to do that on " << kRuntimeISA << std::endl;
910#endif
911}
912
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700913TEST_F(StubTest, AllocObject) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200914#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
915 (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe369810a2015-01-14 19:53:31 -0800916 // This will lead to OOM error messages in the log.
917 ScopedLogSeverity sls(LogSeverity::FATAL);
918
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700919 // TODO: Check the "Unresolved" allocation stubs
920
921 Thread* self = Thread::Current();
922 // Create an object
923 ScopedObjectAccess soa(self);
924 // garbage is created during ClassLinker::Init
925
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700926 StackHandleScope<2> hs(soa.Self());
927 Handle<mirror::Class> c(
928 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "Ljava/lang/Object;")));
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700929
930 // Play with it...
931
932 EXPECT_FALSE(self->IsExceptionPending());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700933 {
Nicolas Geoffray0d3998b2017-01-12 15:35:12 +0000934 size_t result = Invoke3(reinterpret_cast<size_t>(c.Get()), 0u, 0U,
935 StubTest::GetEntrypoint(self, kQuickAllocObjectWithChecks),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700936 self);
937
938 EXPECT_FALSE(self->IsExceptionPending());
939 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
940 mirror::Object* obj = reinterpret_cast<mirror::Object*>(result);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700941 EXPECT_EQ(c.Get(), obj->GetClass());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700942 VerifyObject(obj);
943 }
944
945 {
Mathieu Chartiere401d142015-04-22 13:56:20 -0700946 size_t result = Invoke3(reinterpret_cast<size_t>(c.Get()), 0u, 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -0700947 StubTest::GetEntrypoint(self, kQuickAllocObjectResolved),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700948 self);
949
950 EXPECT_FALSE(self->IsExceptionPending());
951 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
952 mirror::Object* obj = reinterpret_cast<mirror::Object*>(result);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700953 EXPECT_EQ(c.Get(), obj->GetClass());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700954 VerifyObject(obj);
955 }
956
957 {
Mathieu Chartiere401d142015-04-22 13:56:20 -0700958 size_t result = Invoke3(reinterpret_cast<size_t>(c.Get()), 0u, 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -0700959 StubTest::GetEntrypoint(self, kQuickAllocObjectInitialized),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700960 self);
961
962 EXPECT_FALSE(self->IsExceptionPending());
963 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
964 mirror::Object* obj = reinterpret_cast<mirror::Object*>(result);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700965 EXPECT_EQ(c.Get(), obj->GetClass());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700966 VerifyObject(obj);
967 }
968
969 // Failure tests.
970
971 // Out-of-memory.
972 {
973 Runtime::Current()->GetHeap()->SetIdealFootprint(1 * GB);
974
975 // Array helps to fill memory faster.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700976 Handle<mirror::Class> ca(
977 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "[Ljava/lang/Object;")));
978
979 // Use arbitrary large amount for now.
980 static const size_t kMaxHandles = 1000000;
Ian Rogers700a4022014-05-19 16:49:03 -0700981 std::unique_ptr<StackHandleScope<kMaxHandles>> hsp(new StackHandleScope<kMaxHandles>(self));
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700982
983 std::vector<Handle<mirror::Object>> handles;
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700984 // Start allocating with 128K
985 size_t length = 128 * KB / 4;
986 while (length > 10) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700987 Handle<mirror::Object> h(hsp->NewHandle<mirror::Object>(
988 mirror::ObjectArray<mirror::Object>::Alloc(soa.Self(), ca.Get(), length / 4)));
Andreas Gampefa4333d2017-02-14 11:10:34 -0800989 if (self->IsExceptionPending() || h == nullptr) {
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700990 self->ClearException();
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700991
992 // Try a smaller length
993 length = length / 8;
994 // Use at most half the reported free space.
995 size_t mem = Runtime::Current()->GetHeap()->GetFreeMemory();
996 if (length * 8 > mem) {
997 length = mem / 8;
998 }
999 } else {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001000 handles.push_back(h);
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001001 }
1002 }
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001003 LOG(INFO) << "Used " << handles.size() << " arrays to fill space.";
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001004
1005 // Allocate simple objects till it fails.
1006 while (!self->IsExceptionPending()) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001007 Handle<mirror::Object> h = hsp->NewHandle(c->AllocObject(soa.Self()));
Andreas Gampefa4333d2017-02-14 11:10:34 -08001008 if (!self->IsExceptionPending() && h != nullptr) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001009 handles.push_back(h);
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001010 }
1011 }
1012 self->ClearException();
1013
Mathieu Chartiere401d142015-04-22 13:56:20 -07001014 size_t result = Invoke3(reinterpret_cast<size_t>(c.Get()), 0u, 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001015 StubTest::GetEntrypoint(self, kQuickAllocObjectInitialized),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001016 self);
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001017 EXPECT_TRUE(self->IsExceptionPending());
1018 self->ClearException();
1019 EXPECT_EQ(reinterpret_cast<size_t>(nullptr), result);
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001020 }
1021
1022 // Tests done.
1023#else
1024 LOG(INFO) << "Skipping alloc_object as I don't know how to do that on " << kRuntimeISA;
1025 // Force-print to std::cout so it's also outside the logcat.
1026 std::cout << "Skipping alloc_object as I don't know how to do that on " << kRuntimeISA << std::endl;
1027#endif
1028}
1029
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001030TEST_F(StubTest, AllocObjectArray) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001031#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1032 (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001033 // TODO: Check the "Unresolved" allocation stubs
1034
Andreas Gampe369810a2015-01-14 19:53:31 -08001035 // This will lead to OOM error messages in the log.
1036 ScopedLogSeverity sls(LogSeverity::FATAL);
1037
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001038 Thread* self = Thread::Current();
1039 // Create an object
1040 ScopedObjectAccess soa(self);
1041 // garbage is created during ClassLinker::Init
1042
Nicolas Geoffray8d91ac32017-01-18 18:07:15 +00001043 StackHandleScope<1> hs(self);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001044 Handle<mirror::Class> c(
1045 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "[Ljava/lang/Object;")));
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001046
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001047 // Play with it...
1048
1049 EXPECT_FALSE(self->IsExceptionPending());
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001050
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001051 {
Mathieu Chartier2cebb242015-04-21 16:50:40 -07001052 // We can use null in the second argument as we do not need a method here (not used in
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001053 // resolved/initialized cases)
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08001054 size_t result = Invoke3(reinterpret_cast<size_t>(c.Get()), 10U,
1055 reinterpret_cast<size_t>(nullptr),
Nicolas Geoffray26aee502017-02-03 13:27:33 +00001056 StubTest::GetEntrypoint(self, kQuickAllocArrayResolved32),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001057 self);
David Sehr709b0702016-10-13 09:12:37 -07001058 EXPECT_FALSE(self->IsExceptionPending()) << mirror::Object::PrettyTypeOf(self->GetException());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001059 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
1060 mirror::Object* obj = reinterpret_cast<mirror::Object*>(result);
1061 EXPECT_TRUE(obj->IsArrayInstance());
1062 EXPECT_TRUE(obj->IsObjectArray());
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001063 EXPECT_EQ(c.Get(), obj->GetClass());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001064 VerifyObject(obj);
1065 mirror::Array* array = reinterpret_cast<mirror::Array*>(result);
1066 EXPECT_EQ(array->GetLength(), 10);
1067 }
1068
1069 // Failure tests.
1070
1071 // Out-of-memory.
1072 {
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08001073 size_t result = Invoke3(reinterpret_cast<size_t>(c.Get()),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001074 GB, // that should fail...
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08001075 reinterpret_cast<size_t>(nullptr),
Nicolas Geoffray26aee502017-02-03 13:27:33 +00001076 StubTest::GetEntrypoint(self, kQuickAllocArrayResolved32),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001077 self);
1078
1079 EXPECT_TRUE(self->IsExceptionPending());
1080 self->ClearException();
1081 EXPECT_EQ(reinterpret_cast<size_t>(nullptr), result);
1082 }
1083
1084 // Tests done.
1085#else
1086 LOG(INFO) << "Skipping alloc_array as I don't know how to do that on " << kRuntimeISA;
1087 // Force-print to std::cout so it's also outside the logcat.
1088 std::cout << "Skipping alloc_array as I don't know how to do that on " << kRuntimeISA << std::endl;
1089#endif
1090}
1091
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001092
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001093TEST_F(StubTest, StringCompareTo) {
jessicahandojo3aaa37b2016-07-29 14:46:37 -07001094 TEST_DISABLED_FOR_STRING_COMPRESSION();
Scott Wakelingc25cbf12016-04-18 09:00:11 +01001095 // There is no StringCompareTo runtime entrypoint for __arm__ or __aarch64__.
1096#if defined(__i386__) || defined(__mips__) || \
1097 (defined(__x86_64__) && !defined(__APPLE__))
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001098 // TODO: Check the "Unresolved" allocation stubs
1099
1100 Thread* self = Thread::Current();
Andreas Gampe29b38412014-08-13 00:15:43 -07001101
1102 const uintptr_t art_quick_string_compareto = StubTest::GetEntrypoint(self, kQuickStringCompareTo);
1103
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001104 ScopedObjectAccess soa(self);
1105 // garbage is created during ClassLinker::Init
1106
1107 // Create some strings
1108 // Use array so we can index into it and use a matrix for expected results
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -07001109 // Setup: The first half is standard. The second half uses a non-zero offset.
1110 // TODO: Shared backing arrays.
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001111 const char* c[] = { "", "", "a", "aa", "ab",
Serban Constantinescu86797a72014-06-19 16:17:56 +01001112 "aacaacaacaacaacaac", // This one's under the default limit to go to __memcmp16.
1113 "aacaacaacaacaacaacaacaacaacaacaacaac", // This one's over.
1114 "aacaacaacaacaacaacaacaacaacaacaacaaca" }; // As is this one. We need a separate one to
1115 // defeat object-equal optimizations.
Jeff Hao848f70a2014-01-15 13:49:50 -08001116 static constexpr size_t kStringCount = arraysize(c);
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001117
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001118 StackHandleScope<kStringCount> hs(self);
1119 Handle<mirror::String> s[kStringCount];
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001120
Jeff Hao848f70a2014-01-15 13:49:50 -08001121 for (size_t i = 0; i < kStringCount; ++i) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001122 s[i] = hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), c[i]));
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001123 }
1124
1125 // TODO: wide characters
1126
1127 // Matrix of expectations. First component is first parameter. Note we only check against the
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -07001128 // sign, not the value. As we are testing random offsets, we need to compute this and need to
1129 // rely on String::CompareTo being correct.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001130 int32_t expected[kStringCount][kStringCount];
1131 for (size_t x = 0; x < kStringCount; ++x) {
1132 for (size_t y = 0; y < kStringCount; ++y) {
1133 expected[x][y] = s[x]->CompareTo(s[y].Get());
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -07001134 }
1135 }
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001136
1137 // Play with it...
1138
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001139 for (size_t x = 0; x < kStringCount; ++x) {
1140 for (size_t y = 0; y < kStringCount; ++y) {
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001141 // Test string_compareto x y
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001142 size_t result = Invoke3(reinterpret_cast<size_t>(s[x].Get()),
1143 reinterpret_cast<size_t>(s[y].Get()), 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001144 art_quick_string_compareto, self);
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001145
1146 EXPECT_FALSE(self->IsExceptionPending());
1147
1148 // The result is a 32b signed integer
1149 union {
1150 size_t r;
1151 int32_t i;
1152 } conv;
1153 conv.r = result;
1154 int32_t e = expected[x][y];
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -07001155 EXPECT_TRUE(e == 0 ? conv.i == 0 : true) << "x=" << c[x] << " y=" << c[y] << " res=" <<
1156 conv.r;
1157 EXPECT_TRUE(e < 0 ? conv.i < 0 : true) << "x=" << c[x] << " y=" << c[y] << " res=" <<
1158 conv.r;
1159 EXPECT_TRUE(e > 0 ? conv.i > 0 : true) << "x=" << c[x] << " y=" << c[y] << " res=" <<
1160 conv.r;
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001161 }
1162 }
1163
Andreas Gampe7177d7c2014-05-02 12:10:02 -07001164 // TODO: Deallocate things.
1165
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001166 // Tests done.
1167#else
1168 LOG(INFO) << "Skipping string_compareto as I don't know how to do that on " << kRuntimeISA;
1169 // Force-print to std::cout so it's also outside the logcat.
1170 std::cout << "Skipping string_compareto as I don't know how to do that on " << kRuntimeISA <<
1171 std::endl;
1172#endif
1173}
1174
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001175
Mathieu Chartierc7853442015-03-27 14:35:38 -07001176static void GetSetBooleanStatic(ArtField* f, Thread* self,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001177 ArtMethod* referrer, StubTest* test)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07001178 REQUIRES_SHARED(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001179#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1180 (defined(__x86_64__) && !defined(__APPLE__))
Fred Shih37f05ef2014-07-16 18:38:08 -07001181 constexpr size_t num_values = 5;
1182 uint8_t values[num_values] = { 0, 1, 2, 128, 0xFF };
1183
1184 for (size_t i = 0; i < num_values; ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001185 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001186 static_cast<size_t>(values[i]),
1187 0U,
1188 StubTest::GetEntrypoint(self, kQuickSet8Static),
1189 self,
1190 referrer);
1191
Mathieu Chartierc7853442015-03-27 14:35:38 -07001192 size_t res = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001193 0U, 0U,
1194 StubTest::GetEntrypoint(self, kQuickGetBooleanStatic),
1195 self,
1196 referrer);
1197 // Boolean currently stores bools as uint8_t, be more zealous about asserting correct writes/gets.
1198 EXPECT_EQ(values[i], static_cast<uint8_t>(res)) << "Iteration " << i;
1199 }
1200#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001201 UNUSED(f, self, referrer, test);
Fred Shih37f05ef2014-07-16 18:38:08 -07001202 LOG(INFO) << "Skipping set_boolean_static as I don't know how to do that on " << kRuntimeISA;
1203 // Force-print to std::cout so it's also outside the logcat.
1204 std::cout << "Skipping set_boolean_static as I don't know how to do that on " << kRuntimeISA << std::endl;
1205#endif
1206}
Mathieu Chartiere401d142015-04-22 13:56:20 -07001207static void GetSetByteStatic(ArtField* f, Thread* self, ArtMethod* referrer,
Mathieu Chartierc7853442015-03-27 14:35:38 -07001208 StubTest* test)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07001209 REQUIRES_SHARED(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001210#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1211 (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001212 int8_t values[] = { -128, -64, 0, 64, 127 };
Fred Shih37f05ef2014-07-16 18:38:08 -07001213
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001214 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001215 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001216 static_cast<size_t>(values[i]),
1217 0U,
1218 StubTest::GetEntrypoint(self, kQuickSet8Static),
1219 self,
1220 referrer);
1221
Mathieu Chartierc7853442015-03-27 14:35:38 -07001222 size_t res = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001223 0U, 0U,
1224 StubTest::GetEntrypoint(self, kQuickGetByteStatic),
1225 self,
1226 referrer);
1227 EXPECT_EQ(values[i], static_cast<int8_t>(res)) << "Iteration " << i;
1228 }
1229#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001230 UNUSED(f, self, referrer, test);
Fred Shih37f05ef2014-07-16 18:38:08 -07001231 LOG(INFO) << "Skipping set_byte_static as I don't know how to do that on " << kRuntimeISA;
1232 // Force-print to std::cout so it's also outside the logcat.
1233 std::cout << "Skipping set_byte_static as I don't know how to do that on " << kRuntimeISA << std::endl;
1234#endif
1235}
1236
1237
Mathieu Chartierc7853442015-03-27 14:35:38 -07001238static void GetSetBooleanInstance(Handle<mirror::Object>* obj, ArtField* f, Thread* self,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001239 ArtMethod* referrer, StubTest* test)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07001240 REQUIRES_SHARED(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001241#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1242 (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001243 uint8_t values[] = { 0, true, 2, 128, 0xFF };
Fred Shih37f05ef2014-07-16 18:38:08 -07001244
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001245 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001246 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001247 reinterpret_cast<size_t>(obj->Get()),
1248 static_cast<size_t>(values[i]),
1249 StubTest::GetEntrypoint(self, kQuickSet8Instance),
1250 self,
1251 referrer);
1252
Mathieu Chartierc7853442015-03-27 14:35:38 -07001253 uint8_t res = f->GetBoolean(obj->Get());
Fred Shih37f05ef2014-07-16 18:38:08 -07001254 EXPECT_EQ(values[i], res) << "Iteration " << i;
1255
Mathieu Chartierc7853442015-03-27 14:35:38 -07001256 f->SetBoolean<false>(obj->Get(), res);
Fred Shih37f05ef2014-07-16 18:38:08 -07001257
Mathieu Chartierc7853442015-03-27 14:35:38 -07001258 size_t res2 = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001259 reinterpret_cast<size_t>(obj->Get()),
1260 0U,
1261 StubTest::GetEntrypoint(self, kQuickGetBooleanInstance),
1262 self,
1263 referrer);
1264 EXPECT_EQ(res, static_cast<uint8_t>(res2));
1265 }
1266#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001267 UNUSED(obj, f, self, referrer, test);
Fred Shih37f05ef2014-07-16 18:38:08 -07001268 LOG(INFO) << "Skipping set_boolean_instance as I don't know how to do that on " << kRuntimeISA;
1269 // Force-print to std::cout so it's also outside the logcat.
1270 std::cout << "Skipping set_boolean_instance as I don't know how to do that on " << kRuntimeISA << std::endl;
1271#endif
1272}
Mathieu Chartierc7853442015-03-27 14:35:38 -07001273static void GetSetByteInstance(Handle<mirror::Object>* obj, ArtField* f,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001274 Thread* self, ArtMethod* referrer, StubTest* test)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07001275 REQUIRES_SHARED(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001276#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1277 (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001278 int8_t values[] = { -128, -64, 0, 64, 127 };
Fred Shih37f05ef2014-07-16 18:38:08 -07001279
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001280 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001281 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001282 reinterpret_cast<size_t>(obj->Get()),
1283 static_cast<size_t>(values[i]),
1284 StubTest::GetEntrypoint(self, kQuickSet8Instance),
1285 self,
1286 referrer);
1287
Mathieu Chartierc7853442015-03-27 14:35:38 -07001288 int8_t res = f->GetByte(obj->Get());
Fred Shih37f05ef2014-07-16 18:38:08 -07001289 EXPECT_EQ(res, values[i]) << "Iteration " << i;
Mathieu Chartierc7853442015-03-27 14:35:38 -07001290 f->SetByte<false>(obj->Get(), ++res);
Fred Shih37f05ef2014-07-16 18:38:08 -07001291
Mathieu Chartierc7853442015-03-27 14:35:38 -07001292 size_t res2 = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001293 reinterpret_cast<size_t>(obj->Get()),
1294 0U,
1295 StubTest::GetEntrypoint(self, kQuickGetByteInstance),
1296 self,
1297 referrer);
1298 EXPECT_EQ(res, static_cast<int8_t>(res2));
1299 }
1300#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001301 UNUSED(obj, f, self, referrer, test);
Fred Shih37f05ef2014-07-16 18:38:08 -07001302 LOG(INFO) << "Skipping set_byte_instance as I don't know how to do that on " << kRuntimeISA;
1303 // Force-print to std::cout so it's also outside the logcat.
1304 std::cout << "Skipping set_byte_instance as I don't know how to do that on " << kRuntimeISA << std::endl;
1305#endif
1306}
1307
Mathieu Chartiere401d142015-04-22 13:56:20 -07001308static void GetSetCharStatic(ArtField* f, Thread* self, ArtMethod* referrer,
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001309 StubTest* test)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07001310 REQUIRES_SHARED(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001311#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1312 (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001313 uint16_t values[] = { 0, 1, 2, 255, 32768, 0xFFFF };
Fred Shih37f05ef2014-07-16 18:38:08 -07001314
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001315 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001316 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001317 static_cast<size_t>(values[i]),
1318 0U,
1319 StubTest::GetEntrypoint(self, kQuickSet16Static),
1320 self,
1321 referrer);
1322
Mathieu Chartierc7853442015-03-27 14:35:38 -07001323 size_t res = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001324 0U, 0U,
1325 StubTest::GetEntrypoint(self, kQuickGetCharStatic),
1326 self,
1327 referrer);
1328
1329 EXPECT_EQ(values[i], static_cast<uint16_t>(res)) << "Iteration " << i;
1330 }
1331#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001332 UNUSED(f, self, referrer, test);
Fred Shih37f05ef2014-07-16 18:38:08 -07001333 LOG(INFO) << "Skipping set_char_static as I don't know how to do that on " << kRuntimeISA;
1334 // Force-print to std::cout so it's also outside the logcat.
1335 std::cout << "Skipping set_char_static as I don't know how to do that on " << kRuntimeISA << std::endl;
1336#endif
1337}
Mathieu Chartierc7853442015-03-27 14:35:38 -07001338static void GetSetShortStatic(ArtField* f, Thread* self,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001339 ArtMethod* referrer, StubTest* test)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07001340 REQUIRES_SHARED(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001341#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1342 (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001343 int16_t values[] = { -0x7FFF, -32768, 0, 255, 32767, 0x7FFE };
Fred Shih37f05ef2014-07-16 18:38:08 -07001344
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001345 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001346 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001347 static_cast<size_t>(values[i]),
1348 0U,
1349 StubTest::GetEntrypoint(self, kQuickSet16Static),
1350 self,
1351 referrer);
1352
Mathieu Chartierc7853442015-03-27 14:35:38 -07001353 size_t res = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001354 0U, 0U,
1355 StubTest::GetEntrypoint(self, kQuickGetShortStatic),
1356 self,
1357 referrer);
1358
1359 EXPECT_EQ(static_cast<int16_t>(res), values[i]) << "Iteration " << i;
1360 }
1361#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001362 UNUSED(f, self, referrer, test);
Fred Shih37f05ef2014-07-16 18:38:08 -07001363 LOG(INFO) << "Skipping set_short_static as I don't know how to do that on " << kRuntimeISA;
1364 // Force-print to std::cout so it's also outside the logcat.
1365 std::cout << "Skipping set_short_static as I don't know how to do that on " << kRuntimeISA << std::endl;
1366#endif
1367}
1368
Mathieu Chartierc7853442015-03-27 14:35:38 -07001369static void GetSetCharInstance(Handle<mirror::Object>* obj, ArtField* f,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001370 Thread* self, ArtMethod* referrer, StubTest* test)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07001371 REQUIRES_SHARED(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001372#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1373 (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001374 uint16_t values[] = { 0, 1, 2, 255, 32768, 0xFFFF };
Fred Shih37f05ef2014-07-16 18:38:08 -07001375
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001376 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001377 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001378 reinterpret_cast<size_t>(obj->Get()),
1379 static_cast<size_t>(values[i]),
1380 StubTest::GetEntrypoint(self, kQuickSet16Instance),
1381 self,
1382 referrer);
1383
Mathieu Chartierc7853442015-03-27 14:35:38 -07001384 uint16_t res = f->GetChar(obj->Get());
Fred Shih37f05ef2014-07-16 18:38:08 -07001385 EXPECT_EQ(res, values[i]) << "Iteration " << i;
Mathieu Chartierc7853442015-03-27 14:35:38 -07001386 f->SetChar<false>(obj->Get(), ++res);
Fred Shih37f05ef2014-07-16 18:38:08 -07001387
Mathieu Chartierc7853442015-03-27 14:35:38 -07001388 size_t res2 = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001389 reinterpret_cast<size_t>(obj->Get()),
1390 0U,
1391 StubTest::GetEntrypoint(self, kQuickGetCharInstance),
1392 self,
1393 referrer);
1394 EXPECT_EQ(res, static_cast<uint16_t>(res2));
1395 }
1396#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001397 UNUSED(obj, f, self, referrer, test);
Fred Shih37f05ef2014-07-16 18:38:08 -07001398 LOG(INFO) << "Skipping set_char_instance as I don't know how to do that on " << kRuntimeISA;
1399 // Force-print to std::cout so it's also outside the logcat.
1400 std::cout << "Skipping set_char_instance as I don't know how to do that on " << kRuntimeISA << std::endl;
1401#endif
1402}
Mathieu Chartierc7853442015-03-27 14:35:38 -07001403static void GetSetShortInstance(Handle<mirror::Object>* obj, ArtField* f,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001404 Thread* self, ArtMethod* referrer, StubTest* test)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07001405 REQUIRES_SHARED(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001406#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1407 (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001408 int16_t values[] = { -0x7FFF, -32768, 0, 255, 32767, 0x7FFE };
Fred Shih37f05ef2014-07-16 18:38:08 -07001409
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001410 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001411 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001412 reinterpret_cast<size_t>(obj->Get()),
1413 static_cast<size_t>(values[i]),
1414 StubTest::GetEntrypoint(self, kQuickSet16Instance),
1415 self,
1416 referrer);
1417
Mathieu Chartierc7853442015-03-27 14:35:38 -07001418 int16_t res = f->GetShort(obj->Get());
Fred Shih37f05ef2014-07-16 18:38:08 -07001419 EXPECT_EQ(res, values[i]) << "Iteration " << i;
Mathieu Chartierc7853442015-03-27 14:35:38 -07001420 f->SetShort<false>(obj->Get(), ++res);
Fred Shih37f05ef2014-07-16 18:38:08 -07001421
Mathieu Chartierc7853442015-03-27 14:35:38 -07001422 size_t res2 = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001423 reinterpret_cast<size_t>(obj->Get()),
1424 0U,
1425 StubTest::GetEntrypoint(self, kQuickGetShortInstance),
1426 self,
1427 referrer);
1428 EXPECT_EQ(res, static_cast<int16_t>(res2));
1429 }
1430#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001431 UNUSED(obj, f, self, referrer, test);
Fred Shih37f05ef2014-07-16 18:38:08 -07001432 LOG(INFO) << "Skipping set_short_instance as I don't know how to do that on " << kRuntimeISA;
1433 // Force-print to std::cout so it's also outside the logcat.
1434 std::cout << "Skipping set_short_instance as I don't know how to do that on " << kRuntimeISA << std::endl;
1435#endif
1436}
1437
Mathieu Chartiere401d142015-04-22 13:56:20 -07001438static void GetSet32Static(ArtField* f, Thread* self, ArtMethod* referrer,
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001439 StubTest* test)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07001440 REQUIRES_SHARED(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001441#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1442 (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001443 uint32_t values[] = { 0, 1, 2, 255, 32768, 1000000, 0xFFFFFFFF };
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001444
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001445 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001446 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001447 static_cast<size_t>(values[i]),
1448 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001449 StubTest::GetEntrypoint(self, kQuickSet32Static),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001450 self,
1451 referrer);
1452
Mathieu Chartierc7853442015-03-27 14:35:38 -07001453 size_t res = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001454 0U, 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001455 StubTest::GetEntrypoint(self, kQuickGet32Static),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001456 self,
1457 referrer);
1458
Goran Jakovljevic04568812015-04-23 15:27:23 +02001459#if defined(__mips__) && defined(__LP64__)
1460 EXPECT_EQ(static_cast<uint32_t>(res), values[i]) << "Iteration " << i;
1461#else
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001462 EXPECT_EQ(res, values[i]) << "Iteration " << i;
Goran Jakovljevic04568812015-04-23 15:27:23 +02001463#endif
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001464 }
1465#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001466 UNUSED(f, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001467 LOG(INFO) << "Skipping set32static as I don't know how to do that on " << kRuntimeISA;
1468 // Force-print to std::cout so it's also outside the logcat.
1469 std::cout << "Skipping set32static as I don't know how to do that on " << kRuntimeISA << std::endl;
1470#endif
1471}
1472
1473
Mathieu Chartierc7853442015-03-27 14:35:38 -07001474static void GetSet32Instance(Handle<mirror::Object>* obj, ArtField* f,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001475 Thread* self, ArtMethod* referrer, StubTest* test)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07001476 REQUIRES_SHARED(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001477#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1478 (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001479 uint32_t values[] = { 0, 1, 2, 255, 32768, 1000000, 0xFFFFFFFF };
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001480
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001481 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001482 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001483 reinterpret_cast<size_t>(obj->Get()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001484 static_cast<size_t>(values[i]),
Andreas Gampe29b38412014-08-13 00:15:43 -07001485 StubTest::GetEntrypoint(self, kQuickSet32Instance),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001486 self,
1487 referrer);
1488
Mathieu Chartierc7853442015-03-27 14:35:38 -07001489 int32_t res = f->GetInt(obj->Get());
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001490 EXPECT_EQ(res, static_cast<int32_t>(values[i])) << "Iteration " << i;
1491
1492 res++;
Mathieu Chartierc7853442015-03-27 14:35:38 -07001493 f->SetInt<false>(obj->Get(), res);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001494
Mathieu Chartierc7853442015-03-27 14:35:38 -07001495 size_t res2 = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001496 reinterpret_cast<size_t>(obj->Get()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001497 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001498 StubTest::GetEntrypoint(self, kQuickGet32Instance),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001499 self,
1500 referrer);
1501 EXPECT_EQ(res, static_cast<int32_t>(res2));
1502 }
1503#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001504 UNUSED(obj, f, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001505 LOG(INFO) << "Skipping set32instance as I don't know how to do that on " << kRuntimeISA;
1506 // Force-print to std::cout so it's also outside the logcat.
1507 std::cout << "Skipping set32instance as I don't know how to do that on " << kRuntimeISA << std::endl;
1508#endif
1509}
1510
1511
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001512#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1513 (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001514
1515static void set_and_check_static(uint32_t f_idx, mirror::Object* val, Thread* self,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001516 ArtMethod* referrer, StubTest* test)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07001517 REQUIRES_SHARED(Locks::mutator_lock_) {
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001518 test->Invoke3WithReferrer(static_cast<size_t>(f_idx),
1519 reinterpret_cast<size_t>(val),
1520 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001521 StubTest::GetEntrypoint(self, kQuickSetObjStatic),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001522 self,
1523 referrer);
1524
1525 size_t res = test->Invoke3WithReferrer(static_cast<size_t>(f_idx),
1526 0U, 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001527 StubTest::GetEntrypoint(self, kQuickGetObjStatic),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001528 self,
1529 referrer);
1530
1531 EXPECT_EQ(res, reinterpret_cast<size_t>(val)) << "Value " << val;
1532}
1533#endif
1534
Mathieu Chartiere401d142015-04-22 13:56:20 -07001535static void GetSetObjStatic(ArtField* f, Thread* self, ArtMethod* referrer,
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001536 StubTest* test)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07001537 REQUIRES_SHARED(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001538#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1539 (defined(__x86_64__) && !defined(__APPLE__))
Mathieu Chartierc7853442015-03-27 14:35:38 -07001540 set_and_check_static(f->GetDexFieldIndex(), nullptr, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001541
1542 // Allocate a string object for simplicity.
1543 mirror::String* str = mirror::String::AllocFromModifiedUtf8(self, "Test");
Mathieu Chartierc7853442015-03-27 14:35:38 -07001544 set_and_check_static(f->GetDexFieldIndex(), str, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001545
Mathieu Chartierc7853442015-03-27 14:35:38 -07001546 set_and_check_static(f->GetDexFieldIndex(), nullptr, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001547#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001548 UNUSED(f, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001549 LOG(INFO) << "Skipping setObjstatic as I don't know how to do that on " << kRuntimeISA;
1550 // Force-print to std::cout so it's also outside the logcat.
1551 std::cout << "Skipping setObjstatic as I don't know how to do that on " << kRuntimeISA << std::endl;
1552#endif
1553}
1554
1555
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001556#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1557 (defined(__x86_64__) && !defined(__APPLE__))
Mathieu Chartierc7853442015-03-27 14:35:38 -07001558static void set_and_check_instance(ArtField* f, mirror::Object* trg,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001559 mirror::Object* val, Thread* self, ArtMethod* referrer,
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001560 StubTest* test)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07001561 REQUIRES_SHARED(Locks::mutator_lock_) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001562 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001563 reinterpret_cast<size_t>(trg),
1564 reinterpret_cast<size_t>(val),
Andreas Gampe29b38412014-08-13 00:15:43 -07001565 StubTest::GetEntrypoint(self, kQuickSetObjInstance),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001566 self,
1567 referrer);
1568
Mathieu Chartierc7853442015-03-27 14:35:38 -07001569 size_t res = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001570 reinterpret_cast<size_t>(trg),
1571 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001572 StubTest::GetEntrypoint(self, kQuickGetObjInstance),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001573 self,
1574 referrer);
1575
1576 EXPECT_EQ(res, reinterpret_cast<size_t>(val)) << "Value " << val;
1577
Mathieu Chartier3398c782016-09-30 10:27:43 -07001578 EXPECT_OBJ_PTR_EQ(val, f->GetObj(trg));
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001579}
1580#endif
1581
Mathieu Chartierc7853442015-03-27 14:35:38 -07001582static void GetSetObjInstance(Handle<mirror::Object>* obj, ArtField* f,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001583 Thread* self, ArtMethod* referrer, StubTest* test)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07001584 REQUIRES_SHARED(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001585#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1586 (defined(__x86_64__) && !defined(__APPLE__))
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001587 set_and_check_instance(f, obj->Get(), nullptr, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001588
1589 // Allocate a string object for simplicity.
1590 mirror::String* str = mirror::String::AllocFromModifiedUtf8(self, "Test");
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001591 set_and_check_instance(f, obj->Get(), str, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001592
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001593 set_and_check_instance(f, obj->Get(), nullptr, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001594#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001595 UNUSED(obj, f, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001596 LOG(INFO) << "Skipping setObjinstance as I don't know how to do that on " << kRuntimeISA;
1597 // Force-print to std::cout so it's also outside the logcat.
1598 std::cout << "Skipping setObjinstance as I don't know how to do that on " << kRuntimeISA << std::endl;
1599#endif
1600}
1601
1602
Calin Juravle872ab3f2015-10-02 07:27:51 +01001603// TODO: Complete these tests for 32b architectures
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001604
Mathieu Chartiere401d142015-04-22 13:56:20 -07001605static void GetSet64Static(ArtField* f, Thread* self, ArtMethod* referrer,
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001606 StubTest* test)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07001607 REQUIRES_SHARED(Locks::mutator_lock_) {
Calin Juravle6e399ac2015-10-02 23:56:06 +01001608#if (defined(__x86_64__) && !defined(__APPLE__)) || (defined(__mips__) && defined(__LP64__)) \
1609 || defined(__aarch64__)
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001610 uint64_t values[] = { 0, 1, 2, 255, 32768, 1000000, 0xFFFFFFFF, 0xFFFFFFFFFFFF };
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001611
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001612 for (size_t i = 0; i < arraysize(values); ++i) {
Calin Juravle6e399ac2015-10-02 23:56:06 +01001613 // 64 bit FieldSet stores the set value in the second register.
1614 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Calin Juravle24cc1b32015-10-06 11:46:58 +01001615 values[i],
Nicolas Geoffray5b3c6c02017-01-19 14:22:26 +00001616 0U,
Calin Juravle24cc1b32015-10-06 11:46:58 +01001617 StubTest::GetEntrypoint(self, kQuickSet64Static),
1618 self,
1619 referrer);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001620
Mathieu Chartierc7853442015-03-27 14:35:38 -07001621 size_t res = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001622 0U, 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001623 StubTest::GetEntrypoint(self, kQuickGet64Static),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001624 self,
1625 referrer);
1626
1627 EXPECT_EQ(res, values[i]) << "Iteration " << i;
1628 }
1629#else
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001630 UNUSED(f, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001631 LOG(INFO) << "Skipping set64static as I don't know how to do that on " << kRuntimeISA;
1632 // Force-print to std::cout so it's also outside the logcat.
1633 std::cout << "Skipping set64static as I don't know how to do that on " << kRuntimeISA << std::endl;
1634#endif
1635}
1636
1637
Mathieu Chartierc7853442015-03-27 14:35:38 -07001638static void GetSet64Instance(Handle<mirror::Object>* obj, ArtField* f,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001639 Thread* self, ArtMethod* referrer, StubTest* test)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07001640 REQUIRES_SHARED(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001641#if (defined(__x86_64__) && !defined(__APPLE__)) || (defined(__mips__) && defined(__LP64__)) || \
1642 defined(__aarch64__)
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001643 uint64_t values[] = { 0, 1, 2, 255, 32768, 1000000, 0xFFFFFFFF, 0xFFFFFFFFFFFF };
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001644
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001645 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001646 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001647 reinterpret_cast<size_t>(obj->Get()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001648 static_cast<size_t>(values[i]),
Andreas Gampe29b38412014-08-13 00:15:43 -07001649 StubTest::GetEntrypoint(self, kQuickSet64Instance),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001650 self,
1651 referrer);
1652
Mathieu Chartierc7853442015-03-27 14:35:38 -07001653 int64_t res = f->GetLong(obj->Get());
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001654 EXPECT_EQ(res, static_cast<int64_t>(values[i])) << "Iteration " << i;
1655
1656 res++;
Mathieu Chartierc7853442015-03-27 14:35:38 -07001657 f->SetLong<false>(obj->Get(), res);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001658
Mathieu Chartierc7853442015-03-27 14:35:38 -07001659 size_t res2 = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001660 reinterpret_cast<size_t>(obj->Get()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001661 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001662 StubTest::GetEntrypoint(self, kQuickGet64Instance),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001663 self,
1664 referrer);
1665 EXPECT_EQ(res, static_cast<int64_t>(res2));
1666 }
1667#else
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001668 UNUSED(obj, f, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001669 LOG(INFO) << "Skipping set64instance as I don't know how to do that on " << kRuntimeISA;
1670 // Force-print to std::cout so it's also outside the logcat.
1671 std::cout << "Skipping set64instance as I don't know how to do that on " << kRuntimeISA << std::endl;
1672#endif
1673}
1674
1675static void TestFields(Thread* self, StubTest* test, Primitive::Type test_type) {
1676 // garbage is created during ClassLinker::Init
1677
1678 JNIEnv* env = Thread::Current()->GetJniEnv();
1679 jclass jc = env->FindClass("AllFields");
Mathieu Chartier2cebb242015-04-21 16:50:40 -07001680 CHECK(jc != nullptr);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001681 jobject o = env->AllocObject(jc);
Mathieu Chartier2cebb242015-04-21 16:50:40 -07001682 CHECK(o != nullptr);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001683
1684 ScopedObjectAccess soa(self);
Mathieu Chartiere401d142015-04-22 13:56:20 -07001685 StackHandleScope<3> hs(self);
Mathieu Chartier0795f232016-09-27 18:43:30 -07001686 Handle<mirror::Object> obj(hs.NewHandle(soa.Decode<mirror::Object>(o)));
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001687 Handle<mirror::Class> c(hs.NewHandle(obj->GetClass()));
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001688 // Need a method as a referrer
Andreas Gampe542451c2016-07-26 09:02:02 -07001689 ArtMethod* m = c->GetDirectMethod(0, kRuntimePointerSize);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001690
1691 // Play with it...
1692
1693 // Static fields.
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001694 for (ArtField& f : c->GetSFields()) {
1695 Primitive::Type type = f.GetTypeAsPrimitiveType();
Mathieu Chartierc7853442015-03-27 14:35:38 -07001696 if (test_type != type) {
1697 continue;
1698 }
1699 switch (type) {
1700 case Primitive::Type::kPrimBoolean:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001701 GetSetBooleanStatic(&f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001702 break;
1703 case Primitive::Type::kPrimByte:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001704 GetSetByteStatic(&f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001705 break;
1706 case Primitive::Type::kPrimChar:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001707 GetSetCharStatic(&f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001708 break;
1709 case Primitive::Type::kPrimShort:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001710 GetSetShortStatic(&f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001711 break;
1712 case Primitive::Type::kPrimInt:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001713 GetSet32Static(&f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001714 break;
1715 case Primitive::Type::kPrimLong:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001716 GetSet64Static(&f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001717 break;
1718 case Primitive::Type::kPrimNot:
1719 // Don't try array.
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001720 if (f.GetTypeDescriptor()[0] != '[') {
1721 GetSetObjStatic(&f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001722 }
1723 break;
1724 default:
1725 break; // Skip.
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001726 }
1727 }
1728
1729 // Instance fields.
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001730 for (ArtField& f : c->GetIFields()) {
1731 Primitive::Type type = f.GetTypeAsPrimitiveType();
Mathieu Chartierc7853442015-03-27 14:35:38 -07001732 if (test_type != type) {
1733 continue;
1734 }
1735 switch (type) {
1736 case Primitive::Type::kPrimBoolean:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001737 GetSetBooleanInstance(&obj, &f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001738 break;
1739 case Primitive::Type::kPrimByte:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001740 GetSetByteInstance(&obj, &f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001741 break;
1742 case Primitive::Type::kPrimChar:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001743 GetSetCharInstance(&obj, &f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001744 break;
1745 case Primitive::Type::kPrimShort:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001746 GetSetShortInstance(&obj, &f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001747 break;
1748 case Primitive::Type::kPrimInt:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001749 GetSet32Instance(&obj, &f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001750 break;
1751 case Primitive::Type::kPrimLong:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001752 GetSet64Instance(&obj, &f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001753 break;
1754 case Primitive::Type::kPrimNot:
1755 // Don't try array.
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001756 if (f.GetTypeDescriptor()[0] != '[') {
1757 GetSetObjInstance(&obj, &f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001758 }
1759 break;
1760 default:
1761 break; // Skip.
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001762 }
1763 }
1764
1765 // TODO: Deallocate things.
1766}
1767
Fred Shih37f05ef2014-07-16 18:38:08 -07001768TEST_F(StubTest, Fields8) {
Fred Shih37f05ef2014-07-16 18:38:08 -07001769 Thread* self = Thread::Current();
1770
1771 self->TransitionFromSuspendedToRunnable();
1772 LoadDex("AllFields");
1773 bool started = runtime_->Start();
1774 CHECK(started);
1775
1776 TestFields(self, this, Primitive::Type::kPrimBoolean);
1777 TestFields(self, this, Primitive::Type::kPrimByte);
1778}
1779
1780TEST_F(StubTest, Fields16) {
Fred Shih37f05ef2014-07-16 18:38:08 -07001781 Thread* self = Thread::Current();
1782
1783 self->TransitionFromSuspendedToRunnable();
1784 LoadDex("AllFields");
1785 bool started = runtime_->Start();
1786 CHECK(started);
1787
1788 TestFields(self, this, Primitive::Type::kPrimChar);
1789 TestFields(self, this, Primitive::Type::kPrimShort);
1790}
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001791
1792TEST_F(StubTest, Fields32) {
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001793 Thread* self = Thread::Current();
1794
1795 self->TransitionFromSuspendedToRunnable();
1796 LoadDex("AllFields");
1797 bool started = runtime_->Start();
1798 CHECK(started);
1799
1800 TestFields(self, this, Primitive::Type::kPrimInt);
1801}
1802
1803TEST_F(StubTest, FieldsObj) {
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001804 Thread* self = Thread::Current();
1805
1806 self->TransitionFromSuspendedToRunnable();
1807 LoadDex("AllFields");
1808 bool started = runtime_->Start();
1809 CHECK(started);
1810
1811 TestFields(self, this, Primitive::Type::kPrimNot);
1812}
1813
1814TEST_F(StubTest, Fields64) {
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001815 Thread* self = Thread::Current();
1816
1817 self->TransitionFromSuspendedToRunnable();
1818 LoadDex("AllFields");
1819 bool started = runtime_->Start();
1820 CHECK(started);
1821
1822 TestFields(self, this, Primitive::Type::kPrimLong);
1823}
1824
Vladimir Marko9d07e3d2016-03-31 12:02:28 +01001825// Disabled, b/27991555 .
1826// FIXME: Hacking the entry point to point to art_quick_to_interpreter_bridge is broken.
1827// The bridge calls through to GetCalleeSaveMethodCaller() which looks up the pre-header
1828// and gets a bogus OatQuickMethodHeader* pointing into our assembly code just before
1829// the bridge and uses that to check for inlined frames, crashing in the process.
1830TEST_F(StubTest, DISABLED_IMT) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001831#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1832 (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe51f76352014-05-21 08:28:48 -07001833 Thread* self = Thread::Current();
1834
1835 ScopedObjectAccess soa(self);
1836 StackHandleScope<7> hs(self);
1837
1838 JNIEnv* env = Thread::Current()->GetJniEnv();
1839
1840 // ArrayList
1841
1842 // Load ArrayList and used methods (JNI).
1843 jclass arraylist_jclass = env->FindClass("java/util/ArrayList");
1844 ASSERT_NE(nullptr, arraylist_jclass);
1845 jmethodID arraylist_constructor = env->GetMethodID(arraylist_jclass, "<init>", "()V");
1846 ASSERT_NE(nullptr, arraylist_constructor);
Mathieu Chartiere401d142015-04-22 13:56:20 -07001847 jmethodID contains_jmethod = env->GetMethodID(
1848 arraylist_jclass, "contains", "(Ljava/lang/Object;)Z");
Andreas Gampe51f76352014-05-21 08:28:48 -07001849 ASSERT_NE(nullptr, contains_jmethod);
1850 jmethodID add_jmethod = env->GetMethodID(arraylist_jclass, "add", "(Ljava/lang/Object;)Z");
1851 ASSERT_NE(nullptr, add_jmethod);
1852
Mathieu Chartiere401d142015-04-22 13:56:20 -07001853 // Get representation.
Andreas Gampe13b27842016-11-07 16:48:23 -08001854 ArtMethod* contains_amethod = jni::DecodeArtMethod(contains_jmethod);
Andreas Gampe51f76352014-05-21 08:28:48 -07001855
1856 // Patch up ArrayList.contains.
Mathieu Chartiere401d142015-04-22 13:56:20 -07001857 if (contains_amethod->GetEntryPointFromQuickCompiledCode() == nullptr) {
1858 contains_amethod->SetEntryPointFromQuickCompiledCode(reinterpret_cast<void*>(
Andreas Gampe29b38412014-08-13 00:15:43 -07001859 StubTest::GetEntrypoint(self, kQuickQuickToInterpreterBridge)));
Andreas Gampe51f76352014-05-21 08:28:48 -07001860 }
1861
1862 // List
1863
1864 // Load List and used methods (JNI).
1865 jclass list_jclass = env->FindClass("java/util/List");
1866 ASSERT_NE(nullptr, list_jclass);
Mathieu Chartiere401d142015-04-22 13:56:20 -07001867 jmethodID inf_contains_jmethod = env->GetMethodID(
1868 list_jclass, "contains", "(Ljava/lang/Object;)Z");
Andreas Gampe51f76352014-05-21 08:28:48 -07001869 ASSERT_NE(nullptr, inf_contains_jmethod);
1870
1871 // Get mirror representation.
Andreas Gampe13b27842016-11-07 16:48:23 -08001872 ArtMethod* inf_contains = jni::DecodeArtMethod(inf_contains_jmethod);
Andreas Gampe51f76352014-05-21 08:28:48 -07001873
1874 // Object
1875
1876 jclass obj_jclass = env->FindClass("java/lang/Object");
1877 ASSERT_NE(nullptr, obj_jclass);
1878 jmethodID obj_constructor = env->GetMethodID(obj_jclass, "<init>", "()V");
1879 ASSERT_NE(nullptr, obj_constructor);
1880
Andreas Gampe51f76352014-05-21 08:28:48 -07001881 // Create instances.
1882
1883 jobject jarray_list = env->NewObject(arraylist_jclass, arraylist_constructor);
1884 ASSERT_NE(nullptr, jarray_list);
Mathieu Chartier0795f232016-09-27 18:43:30 -07001885 Handle<mirror::Object> array_list(hs.NewHandle(soa.Decode<mirror::Object>(jarray_list)));
Andreas Gampe51f76352014-05-21 08:28:48 -07001886
1887 jobject jobj = env->NewObject(obj_jclass, obj_constructor);
1888 ASSERT_NE(nullptr, jobj);
Mathieu Chartier0795f232016-09-27 18:43:30 -07001889 Handle<mirror::Object> obj(hs.NewHandle(soa.Decode<mirror::Object>(jobj)));
Andreas Gampe51f76352014-05-21 08:28:48 -07001890
Andreas Gampe1a7e2922014-05-21 15:37:53 -07001891 // Invocation tests.
1892
1893 // 1. imt_conflict
1894
1895 // Contains.
Nicolas Geoffray1004faa2016-03-23 14:28:30 +00001896
1897 // We construct the ImtConflictTable ourselves, as we cannot go into the runtime stub
1898 // that will create it: the runtime stub expects to be called by compiled code.
Nicolas Geoffray1004faa2016-03-23 14:28:30 +00001899 LinearAlloc* linear_alloc = Runtime::Current()->GetLinearAlloc();
1900 ArtMethod* conflict_method = Runtime::Current()->CreateImtConflictMethod(linear_alloc);
Mathieu Chartiere42888f2016-04-14 10:49:19 -07001901 ImtConflictTable* empty_conflict_table =
1902 Runtime::Current()->GetClassLinker()->CreateImtConflictTable(/*count*/0u, linear_alloc);
Nicolas Geoffray1004faa2016-03-23 14:28:30 +00001903 void* data = linear_alloc->Alloc(
1904 self,
Andreas Gampe542451c2016-07-26 09:02:02 -07001905 ImtConflictTable::ComputeSizeWithOneMoreEntry(empty_conflict_table, kRuntimePointerSize));
Nicolas Geoffray1004faa2016-03-23 14:28:30 +00001906 ImtConflictTable* new_table = new (data) ImtConflictTable(
Andreas Gampe542451c2016-07-26 09:02:02 -07001907 empty_conflict_table, inf_contains, contains_amethod, kRuntimePointerSize);
1908 conflict_method->SetImtConflictTable(new_table, kRuntimePointerSize);
Nicolas Geoffray1004faa2016-03-23 14:28:30 +00001909
Andreas Gampe51f76352014-05-21 08:28:48 -07001910 size_t result =
Nicolas Geoffray1004faa2016-03-23 14:28:30 +00001911 Invoke3WithReferrerAndHidden(reinterpret_cast<size_t>(conflict_method),
1912 reinterpret_cast<size_t>(array_list.Get()),
Andreas Gampe51f76352014-05-21 08:28:48 -07001913 reinterpret_cast<size_t>(obj.Get()),
Andreas Gampe29b38412014-08-13 00:15:43 -07001914 StubTest::GetEntrypoint(self, kQuickQuickImtConflictTrampoline),
Nicolas Geoffray1004faa2016-03-23 14:28:30 +00001915 self,
1916 contains_amethod,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001917 static_cast<size_t>(inf_contains->GetDexMethodIndex()));
Andreas Gampe51f76352014-05-21 08:28:48 -07001918
1919 ASSERT_FALSE(self->IsExceptionPending());
1920 EXPECT_EQ(static_cast<size_t>(JNI_FALSE), result);
1921
1922 // Add object.
1923
1924 env->CallBooleanMethod(jarray_list, add_jmethod, jobj);
1925
David Sehr709b0702016-10-13 09:12:37 -07001926 ASSERT_FALSE(self->IsExceptionPending()) << mirror::Object::PrettyTypeOf(self->GetException());
Andreas Gampe51f76352014-05-21 08:28:48 -07001927
Andreas Gampe1a7e2922014-05-21 15:37:53 -07001928 // Contains.
Andreas Gampe51f76352014-05-21 08:28:48 -07001929
Nicolas Geoffray1004faa2016-03-23 14:28:30 +00001930 result =
1931 Invoke3WithReferrerAndHidden(reinterpret_cast<size_t>(conflict_method),
1932 reinterpret_cast<size_t>(array_list.Get()),
1933 reinterpret_cast<size_t>(obj.Get()),
1934 StubTest::GetEntrypoint(self, kQuickQuickImtConflictTrampoline),
1935 self,
1936 contains_amethod,
1937 static_cast<size_t>(inf_contains->GetDexMethodIndex()));
Andreas Gampe51f76352014-05-21 08:28:48 -07001938
1939 ASSERT_FALSE(self->IsExceptionPending());
1940 EXPECT_EQ(static_cast<size_t>(JNI_TRUE), result);
Nicolas Geoffray1004faa2016-03-23 14:28:30 +00001941
Andreas Gampe1a7e2922014-05-21 15:37:53 -07001942 // 2. regular interface trampoline
1943
Nicolas Geoffray1004faa2016-03-23 14:28:30 +00001944 result = Invoke3WithReferrer(static_cast<size_t>(inf_contains->GetDexMethodIndex()),
1945 reinterpret_cast<size_t>(array_list.Get()),
1946 reinterpret_cast<size_t>(obj.Get()),
1947 StubTest::GetEntrypoint(self,
1948 kQuickInvokeInterfaceTrampolineWithAccessCheck),
1949 self, contains_amethod);
Andreas Gampe1a7e2922014-05-21 15:37:53 -07001950
1951 ASSERT_FALSE(self->IsExceptionPending());
Nicolas Geoffray1004faa2016-03-23 14:28:30 +00001952 EXPECT_EQ(static_cast<size_t>(JNI_TRUE), result);
Andreas Gampe1a7e2922014-05-21 15:37:53 -07001953
Mathieu Chartiere401d142015-04-22 13:56:20 -07001954 result = Invoke3WithReferrer(
1955 static_cast<size_t>(inf_contains->GetDexMethodIndex()),
1956 reinterpret_cast<size_t>(array_list.Get()), reinterpret_cast<size_t>(array_list.Get()),
1957 StubTest::GetEntrypoint(self, kQuickInvokeInterfaceTrampolineWithAccessCheck), self,
1958 contains_amethod);
Andreas Gampe1a7e2922014-05-21 15:37:53 -07001959
1960 ASSERT_FALSE(self->IsExceptionPending());
1961 EXPECT_EQ(static_cast<size_t>(JNI_FALSE), result);
Andreas Gampe51f76352014-05-21 08:28:48 -07001962#else
Andreas Gampe6aac3552014-06-09 14:55:53 -07001963 LOG(INFO) << "Skipping imt as I don't know how to do that on " << kRuntimeISA;
Andreas Gampe51f76352014-05-21 08:28:48 -07001964 // Force-print to std::cout so it's also outside the logcat.
Andreas Gampe6aac3552014-06-09 14:55:53 -07001965 std::cout << "Skipping imt as I don't know how to do that on " << kRuntimeISA << std::endl;
1966#endif
1967}
1968
Andreas Gampe6aac3552014-06-09 14:55:53 -07001969TEST_F(StubTest, StringIndexOf) {
Chris Larsencf283da2016-01-19 16:45:35 -08001970#if defined(__arm__) || defined(__aarch64__) || defined(__mips__)
Andreas Gampe6aac3552014-06-09 14:55:53 -07001971 Thread* self = Thread::Current();
1972 ScopedObjectAccess soa(self);
1973 // garbage is created during ClassLinker::Init
1974
1975 // Create some strings
1976 // Use array so we can index into it and use a matrix for expected results
1977 // Setup: The first half is standard. The second half uses a non-zero offset.
1978 // TODO: Shared backing arrays.
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001979 const char* c_str[] = { "", "a", "ba", "cba", "dcba", "edcba", "asdfghjkl" };
1980 static constexpr size_t kStringCount = arraysize(c_str);
1981 const char c_char[] = { 'a', 'b', 'c', 'd', 'e' };
1982 static constexpr size_t kCharCount = arraysize(c_char);
Andreas Gampe6aac3552014-06-09 14:55:53 -07001983
1984 StackHandleScope<kStringCount> hs(self);
1985 Handle<mirror::String> s[kStringCount];
1986
1987 for (size_t i = 0; i < kStringCount; ++i) {
1988 s[i] = hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), c_str[i]));
1989 }
1990
1991 // Matrix of expectations. First component is first parameter. Note we only check against the
1992 // sign, not the value. As we are testing random offsets, we need to compute this and need to
1993 // rely on String::CompareTo being correct.
1994 static constexpr size_t kMaxLen = 9;
1995 DCHECK_LE(strlen(c_str[kStringCount-1]), kMaxLen) << "Please fix the indexof test.";
1996
1997 // Last dimension: start, offset by 1.
1998 int32_t expected[kStringCount][kCharCount][kMaxLen + 3];
1999 for (size_t x = 0; x < kStringCount; ++x) {
2000 for (size_t y = 0; y < kCharCount; ++y) {
2001 for (size_t z = 0; z <= kMaxLen + 2; ++z) {
2002 expected[x][y][z] = s[x]->FastIndexOf(c_char[y], static_cast<int32_t>(z) - 1);
2003 }
2004 }
2005 }
2006
2007 // Play with it...
2008
2009 for (size_t x = 0; x < kStringCount; ++x) {
2010 for (size_t y = 0; y < kCharCount; ++y) {
2011 for (size_t z = 0; z <= kMaxLen + 2; ++z) {
2012 int32_t start = static_cast<int32_t>(z) - 1;
2013
2014 // Test string_compareto x y
2015 size_t result = Invoke3(reinterpret_cast<size_t>(s[x].Get()), c_char[y], start,
Andreas Gampe29b38412014-08-13 00:15:43 -07002016 StubTest::GetEntrypoint(self, kQuickIndexOf), self);
Andreas Gampe6aac3552014-06-09 14:55:53 -07002017
2018 EXPECT_FALSE(self->IsExceptionPending());
2019
2020 // The result is a 32b signed integer
2021 union {
2022 size_t r;
2023 int32_t i;
2024 } conv;
2025 conv.r = result;
2026
2027 EXPECT_EQ(expected[x][y][z], conv.i) << "Wrong result for " << c_str[x] << " / " <<
2028 c_char[y] << " @ " << start;
2029 }
2030 }
2031 }
2032
2033 // TODO: Deallocate things.
2034
2035 // Tests done.
2036#else
2037 LOG(INFO) << "Skipping indexof as I don't know how to do that on " << kRuntimeISA;
2038 // Force-print to std::cout so it's also outside the logcat.
2039 std::cout << "Skipping indexof as I don't know how to do that on " << kRuntimeISA << std::endl;
Andreas Gampe51f76352014-05-21 08:28:48 -07002040#endif
2041}
2042
Roland Levillain02b75802016-07-13 11:54:35 +01002043// TODO: Exercise the ReadBarrierMarkRegX entry points.
2044
Man Cao1aee9002015-07-14 22:31:42 -07002045TEST_F(StubTest, ReadBarrier) {
2046#if defined(ART_USE_READ_BARRIER) && (defined(__i386__) || defined(__arm__) || \
2047 defined(__aarch64__) || defined(__mips__) || (defined(__x86_64__) && !defined(__APPLE__)))
2048 Thread* self = Thread::Current();
2049
2050 const uintptr_t readBarrierSlow = StubTest::GetEntrypoint(self, kQuickReadBarrierSlow);
2051
2052 // Create an object
2053 ScopedObjectAccess soa(self);
2054 // garbage is created during ClassLinker::Init
2055
2056 StackHandleScope<2> hs(soa.Self());
2057 Handle<mirror::Class> c(
2058 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "Ljava/lang/Object;")));
2059
2060 // Build an object instance
2061 Handle<mirror::Object> obj(hs.NewHandle(c->AllocObject(soa.Self())));
2062
2063 EXPECT_FALSE(self->IsExceptionPending());
2064
2065 size_t result = Invoke3(0U, reinterpret_cast<size_t>(obj.Get()),
2066 mirror::Object::ClassOffset().SizeValue(), readBarrierSlow, self);
2067
2068 EXPECT_FALSE(self->IsExceptionPending());
2069 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
2070 mirror::Class* klass = reinterpret_cast<mirror::Class*>(result);
2071 EXPECT_EQ(klass, obj->GetClass());
2072
2073 // Tests done.
2074#else
2075 LOG(INFO) << "Skipping read_barrier_slow";
2076 // Force-print to std::cout so it's also outside the logcat.
2077 std::cout << "Skipping read_barrier_slow" << std::endl;
2078#endif
2079}
2080
Roland Levillain0d5a2812015-11-13 10:07:31 +00002081TEST_F(StubTest, ReadBarrierForRoot) {
2082#if defined(ART_USE_READ_BARRIER) && (defined(__i386__) || defined(__arm__) || \
2083 defined(__aarch64__) || defined(__mips__) || (defined(__x86_64__) && !defined(__APPLE__)))
2084 Thread* self = Thread::Current();
2085
2086 const uintptr_t readBarrierForRootSlow =
2087 StubTest::GetEntrypoint(self, kQuickReadBarrierForRootSlow);
2088
2089 // Create an object
2090 ScopedObjectAccess soa(self);
2091 // garbage is created during ClassLinker::Init
2092
2093 StackHandleScope<1> hs(soa.Self());
2094
2095 Handle<mirror::String> obj(
2096 hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), "hello, world!")));
2097
2098 EXPECT_FALSE(self->IsExceptionPending());
2099
Vladimir Markoacb906d2018-05-30 10:23:49 +01002100 GcRoot<mirror::Class> root(GetClassRoot<mirror::String>());
Roland Levillain0d5a2812015-11-13 10:07:31 +00002101 size_t result = Invoke3(reinterpret_cast<size_t>(&root), 0U, 0U, readBarrierForRootSlow, self);
2102
2103 EXPECT_FALSE(self->IsExceptionPending());
2104 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
2105 mirror::Class* klass = reinterpret_cast<mirror::Class*>(result);
2106 EXPECT_EQ(klass, obj->GetClass());
2107
2108 // Tests done.
2109#else
2110 LOG(INFO) << "Skipping read_barrier_for_root_slow";
2111 // Force-print to std::cout so it's also outside the logcat.
2112 std::cout << "Skipping read_barrier_for_root_slow" << std::endl;
2113#endif
2114}
2115
Andreas Gampe525cde22014-04-22 15:44:50 -07002116} // namespace art