blob: 8746badf197c0f9a1f5b7774ebc821d787889182 [file] [log] [blame]
Andreas Gampe525cde22014-04-22 15:44:50 -07001/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
Ian Rogerse63db272014-07-15 15:36:11 -070017#include <cstdio>
18
Mathieu Chartierc7853442015-03-27 14:35:38 -070019#include "art_field-inl.h"
Mathieu Chartiere401d142015-04-22 13:56:20 -070020#include "art_method-inl.h"
Vladimir Marko3481ba22015-04-13 12:22:36 +010021#include "class_linker-inl.h"
Andreas Gampe525cde22014-04-22 15:44:50 -070022#include "common_runtime_test.h"
Andreas Gampe29b38412014-08-13 00:15:43 -070023#include "entrypoints/quick/quick_entrypoints_enum.h"
Andreas Gampe51f76352014-05-21 08:28:48 -070024#include "mirror/class-inl.h"
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -070025#include "mirror/string-inl.h"
Ian Rogerse63db272014-07-15 15:36:11 -070026#include "scoped_thread_state_change.h"
Andreas Gampe525cde22014-04-22 15:44:50 -070027
28namespace art {
29
30
31class StubTest : public CommonRuntimeTest {
32 protected:
33 // We need callee-save methods set up in the Runtime for exceptions.
34 void SetUp() OVERRIDE {
35 // Do the normal setup.
36 CommonRuntimeTest::SetUp();
37
38 {
39 // Create callee-save methods
40 ScopedObjectAccess soa(Thread::Current());
Vladimir Marko7624d252014-05-02 14:40:15 +010041 runtime_->SetInstructionSet(kRuntimeISA);
Andreas Gampe525cde22014-04-22 15:44:50 -070042 for (int i = 0; i < Runtime::kLastCalleeSaveType; i++) {
43 Runtime::CalleeSaveType type = Runtime::CalleeSaveType(i);
44 if (!runtime_->HasCalleeSaveMethod(type)) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -070045 runtime_->SetCalleeSaveMethod(runtime_->CreateCalleeSaveMethod(), type);
Andreas Gampe525cde22014-04-22 15:44:50 -070046 }
47 }
48 }
49 }
50
Ian Rogerse63db272014-07-15 15:36:11 -070051 void SetUpRuntimeOptions(RuntimeOptions *options) OVERRIDE {
Andreas Gampe00c1e6d2014-04-25 15:47:13 -070052 // Use a smaller heap
53 for (std::pair<std::string, const void*>& pair : *options) {
54 if (pair.first.find("-Xmx") == 0) {
55 pair.first = "-Xmx4M"; // Smallest we can go.
56 }
57 }
Andreas Gampe51f76352014-05-21 08:28:48 -070058 options->push_back(std::make_pair("-Xint", nullptr));
Andreas Gampe00c1e6d2014-04-25 15:47:13 -070059 }
Andreas Gampe525cde22014-04-22 15:44:50 -070060
Mathieu Chartier119c6bd2014-05-09 14:11:47 -070061 // Helper function needed since TEST_F makes a new class.
62 Thread::tls_ptr_sized_values* GetTlsPtr(Thread* self) {
63 return &self->tlsPtr_;
64 }
65
Andreas Gampe4fc046e2014-05-06 16:56:39 -070066 public:
Andreas Gampe525cde22014-04-22 15:44:50 -070067 size_t Invoke3(size_t arg0, size_t arg1, size_t arg2, uintptr_t code, Thread* self) {
Andreas Gampe6cf80102014-05-19 11:32:41 -070068 return Invoke3WithReferrer(arg0, arg1, arg2, code, self, nullptr);
Andreas Gampe525cde22014-04-22 15:44:50 -070069 }
Andreas Gampe6e4e59c2014-05-05 20:11:02 -070070
Andreas Gampe6e4e59c2014-05-05 20:11:02 -070071 // TODO: Set up a frame according to referrer's specs.
72 size_t Invoke3WithReferrer(size_t arg0, size_t arg1, size_t arg2, uintptr_t code, Thread* self,
Mathieu Chartiere401d142015-04-22 13:56:20 -070073 ArtMethod* referrer) {
Andreas Gampe9537ba22015-10-12 14:29:38 -070074 return Invoke3WithReferrerAndHidden(arg0, arg1, arg2, code, self, referrer, 0);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -070075 }
76
Andreas Gampe51f76352014-05-21 08:28:48 -070077 // TODO: Set up a frame according to referrer's specs.
78 size_t Invoke3WithReferrerAndHidden(size_t arg0, size_t arg1, size_t arg2, uintptr_t code,
Mathieu Chartiere401d142015-04-22 13:56:20 -070079 Thread* self, ArtMethod* referrer, size_t hidden) {
Andreas Gampe51f76352014-05-21 08:28:48 -070080 // Push a transition back into managed code onto the linked list in thread.
81 ManagedStack fragment;
82 self->PushManagedStackFragment(&fragment);
83
84 size_t result;
85 size_t fpr_result = 0;
86#if defined(__i386__)
87 // TODO: Set the thread?
Andreas Gampe9537ba22015-10-12 14:29:38 -070088#define PUSH(reg) "push " # reg "\n\t .cfi_adjust_cfa_offset 4\n\t"
89#define POP(reg) "pop " # reg "\n\t .cfi_adjust_cfa_offset -4\n\t"
Andreas Gampe51f76352014-05-21 08:28:48 -070090 __asm__ __volatile__(
Andreas Gampe9537ba22015-10-12 14:29:38 -070091 "movd %[hidden], %%xmm7\n\t" // This is a memory op, so do this early. If it is off of
92 // esp, then we won't be able to access it after spilling.
93
94 // Spill 6 registers.
95 PUSH(%%ebx)
96 PUSH(%%ecx)
97 PUSH(%%edx)
98 PUSH(%%esi)
99 PUSH(%%edi)
100 PUSH(%%ebp)
101
102 // Store the inputs to the stack, but keep the referrer up top, less work.
103 PUSH(%[referrer]) // Align stack.
104 PUSH(%[referrer]) // Store referrer
105
106 PUSH(%[arg0])
107 PUSH(%[arg1])
108 PUSH(%[arg2])
109 PUSH(%[code])
110 // Now read them back into the required registers.
111 POP(%%edi)
112 POP(%%edx)
113 POP(%%ecx)
114 POP(%%eax)
115 // Call is prepared now.
116
Andreas Gampe51f76352014-05-21 08:28:48 -0700117 "call *%%edi\n\t" // Call the stub
Andreas Gampe9537ba22015-10-12 14:29:38 -0700118 "addl $8, %%esp\n\t" // Pop referrer and padding.
119 ".cfi_adjust_cfa_offset -8\n\t"
120
121 // Restore 6 registers.
122 POP(%%ebp)
123 POP(%%edi)
124 POP(%%esi)
125 POP(%%edx)
126 POP(%%ecx)
127 POP(%%ebx)
128
Andreas Gampe51f76352014-05-21 08:28:48 -0700129 : "=a" (result)
130 // Use the result from eax
Andreas Gampe9537ba22015-10-12 14:29:38 -0700131 : [arg0] "r"(arg0), [arg1] "r"(arg1), [arg2] "r"(arg2), [code] "r"(code),
132 [referrer]"r"(referrer), [hidden]"m"(hidden)
Andreas Gampe2f6e3512014-06-07 01:32:33 -0700133 // This places code into edi, arg0 into eax, arg1 into ecx, and arg2 into edx
Andreas Gampe9537ba22015-10-12 14:29:38 -0700134 : "memory", "xmm7"); // clobber.
135#undef PUSH
136#undef POP
Andreas Gampe51f76352014-05-21 08:28:48 -0700137#elif defined(__arm__)
138 __asm__ __volatile__(
139 "push {r1-r12, lr}\n\t" // Save state, 13*4B = 52B
140 ".cfi_adjust_cfa_offset 52\n\t"
141 "push {r9}\n\t"
142 ".cfi_adjust_cfa_offset 4\n\t"
143 "mov r9, %[referrer]\n\n"
144 "str r9, [sp, #-8]!\n\t" // Push referrer, +8B padding so 16B aligned
145 ".cfi_adjust_cfa_offset 8\n\t"
146 "ldr r9, [sp, #8]\n\t"
147
148 // Push everything on the stack, so we don't rely on the order. What a mess. :-(
149 "sub sp, sp, #24\n\t"
150 "str %[arg0], [sp]\n\t"
151 "str %[arg1], [sp, #4]\n\t"
152 "str %[arg2], [sp, #8]\n\t"
153 "str %[code], [sp, #12]\n\t"
154 "str %[self], [sp, #16]\n\t"
155 "str %[hidden], [sp, #20]\n\t"
156 "ldr r0, [sp]\n\t"
157 "ldr r1, [sp, #4]\n\t"
158 "ldr r2, [sp, #8]\n\t"
159 "ldr r3, [sp, #12]\n\t"
160 "ldr r9, [sp, #16]\n\t"
161 "ldr r12, [sp, #20]\n\t"
162 "add sp, sp, #24\n\t"
163
164 "blx r3\n\t" // Call the stub
Mathieu Chartier2cebb242015-04-21 16:50:40 -0700165 "add sp, sp, #12\n\t" // Pop null and padding
Andreas Gampe51f76352014-05-21 08:28:48 -0700166 ".cfi_adjust_cfa_offset -12\n\t"
167 "pop {r1-r12, lr}\n\t" // Restore state
168 ".cfi_adjust_cfa_offset -52\n\t"
169 "mov %[result], r0\n\t" // Save the result
170 : [result] "=r" (result)
171 // Use the result from r0
Andreas Gampe2f6e3512014-06-07 01:32:33 -0700172 : [arg0] "r"(arg0), [arg1] "r"(arg1), [arg2] "r"(arg2), [code] "r"(code), [self] "r"(self),
173 [referrer] "r"(referrer), [hidden] "r"(hidden)
Andreas Gampeff7b1142015-08-03 10:25:06 -0700174 : "r0", "memory"); // clobber.
Andreas Gampe51f76352014-05-21 08:28:48 -0700175#elif defined(__aarch64__)
176 __asm__ __volatile__(
Andreas Gampef39b3782014-06-03 14:38:30 -0700177 // Spill x0-x7 which we say we don't clobber. May contain args.
Andreas Gampe51f76352014-05-21 08:28:48 -0700178 "sub sp, sp, #64\n\t"
179 ".cfi_adjust_cfa_offset 64\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700180 "stp x0, x1, [sp]\n\t"
181 "stp x2, x3, [sp, #16]\n\t"
182 "stp x4, x5, [sp, #32]\n\t"
183 "stp x6, x7, [sp, #48]\n\t"
Andreas Gampe51f76352014-05-21 08:28:48 -0700184
Andreas Gampef39b3782014-06-03 14:38:30 -0700185 "sub sp, sp, #16\n\t" // Reserve stack space, 16B aligned
186 ".cfi_adjust_cfa_offset 16\n\t"
187 "str %[referrer], [sp]\n\t" // referrer
Andreas Gampe51f76352014-05-21 08:28:48 -0700188
189 // Push everything on the stack, so we don't rely on the order. What a mess. :-(
190 "sub sp, sp, #48\n\t"
191 ".cfi_adjust_cfa_offset 48\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700192 // All things are "r" constraints, so direct str/stp should work.
193 "stp %[arg0], %[arg1], [sp]\n\t"
194 "stp %[arg2], %[code], [sp, #16]\n\t"
195 "stp %[self], %[hidden], [sp, #32]\n\t"
Andreas Gampe51f76352014-05-21 08:28:48 -0700196
197 // Now we definitely have x0-x3 free, use it to garble d8 - d15
198 "movk x0, #0xfad0\n\t"
199 "movk x0, #0xebad, lsl #16\n\t"
200 "movk x0, #0xfad0, lsl #32\n\t"
201 "movk x0, #0xebad, lsl #48\n\t"
202 "fmov d8, x0\n\t"
203 "add x0, x0, 1\n\t"
204 "fmov d9, x0\n\t"
205 "add x0, x0, 1\n\t"
206 "fmov d10, x0\n\t"
207 "add x0, x0, 1\n\t"
208 "fmov d11, x0\n\t"
209 "add x0, x0, 1\n\t"
210 "fmov d12, x0\n\t"
211 "add x0, x0, 1\n\t"
212 "fmov d13, x0\n\t"
213 "add x0, x0, 1\n\t"
214 "fmov d14, x0\n\t"
215 "add x0, x0, 1\n\t"
216 "fmov d15, x0\n\t"
217
Andreas Gampef39b3782014-06-03 14:38:30 -0700218 // Load call params into the right registers.
219 "ldp x0, x1, [sp]\n\t"
220 "ldp x2, x3, [sp, #16]\n\t"
Serban Constantinescu9bd88b02015-04-22 16:24:46 +0100221 "ldp x19, x17, [sp, #32]\n\t"
Andreas Gampe51f76352014-05-21 08:28:48 -0700222 "add sp, sp, #48\n\t"
223 ".cfi_adjust_cfa_offset -48\n\t"
224
Andreas Gampe51f76352014-05-21 08:28:48 -0700225 "blr x3\n\t" // Call the stub
Andreas Gampef39b3782014-06-03 14:38:30 -0700226 "mov x8, x0\n\t" // Store result
227 "add sp, sp, #16\n\t" // Drop the quick "frame"
228 ".cfi_adjust_cfa_offset -16\n\t"
Andreas Gampe51f76352014-05-21 08:28:48 -0700229
230 // Test d8 - d15. We can use x1 and x2.
231 "movk x1, #0xfad0\n\t"
232 "movk x1, #0xebad, lsl #16\n\t"
233 "movk x1, #0xfad0, lsl #32\n\t"
234 "movk x1, #0xebad, lsl #48\n\t"
235 "fmov x2, d8\n\t"
236 "cmp x1, x2\n\t"
237 "b.ne 1f\n\t"
238 "add x1, x1, 1\n\t"
239
240 "fmov x2, d9\n\t"
241 "cmp x1, x2\n\t"
242 "b.ne 1f\n\t"
243 "add x1, x1, 1\n\t"
244
245 "fmov x2, d10\n\t"
246 "cmp x1, x2\n\t"
247 "b.ne 1f\n\t"
248 "add x1, x1, 1\n\t"
249
250 "fmov x2, d11\n\t"
251 "cmp x1, x2\n\t"
252 "b.ne 1f\n\t"
253 "add x1, x1, 1\n\t"
254
255 "fmov x2, d12\n\t"
256 "cmp x1, x2\n\t"
257 "b.ne 1f\n\t"
258 "add x1, x1, 1\n\t"
259
260 "fmov x2, d13\n\t"
261 "cmp x1, x2\n\t"
262 "b.ne 1f\n\t"
263 "add x1, x1, 1\n\t"
264
265 "fmov x2, d14\n\t"
266 "cmp x1, x2\n\t"
267 "b.ne 1f\n\t"
268 "add x1, x1, 1\n\t"
269
270 "fmov x2, d15\n\t"
271 "cmp x1, x2\n\t"
272 "b.ne 1f\n\t"
273
Andreas Gampef39b3782014-06-03 14:38:30 -0700274 "mov x9, #0\n\t" // Use x9 as flag, in clobber list
Andreas Gampe51f76352014-05-21 08:28:48 -0700275
276 // Finish up.
277 "2:\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700278 "ldp x0, x1, [sp]\n\t" // Restore stuff not named clobbered, may contain fpr_result
279 "ldp x2, x3, [sp, #16]\n\t"
280 "ldp x4, x5, [sp, #32]\n\t"
281 "ldp x6, x7, [sp, #48]\n\t"
282 "add sp, sp, #64\n\t" // Free stack space, now sp as on entry
Andreas Gampe51f76352014-05-21 08:28:48 -0700283 ".cfi_adjust_cfa_offset -64\n\t"
284
Andreas Gampef39b3782014-06-03 14:38:30 -0700285 "str x9, %[fpr_result]\n\t" // Store the FPR comparison result
286 "mov %[result], x8\n\t" // Store the call result
287
Andreas Gampe51f76352014-05-21 08:28:48 -0700288 "b 3f\n\t" // Goto end
289
290 // Failed fpr verification.
291 "1:\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700292 "mov x9, #1\n\t"
Andreas Gampe51f76352014-05-21 08:28:48 -0700293 "b 2b\n\t" // Goto finish-up
294
295 // End
296 "3:\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700297 : [result] "=r" (result)
298 // Use the result from r0
Andreas Gampe51f76352014-05-21 08:28:48 -0700299 : [arg0] "0"(arg0), [arg1] "r"(arg1), [arg2] "r"(arg2), [code] "r"(code), [self] "r"(self),
Andreas Gampef39b3782014-06-03 14:38:30 -0700300 [referrer] "r"(referrer), [hidden] "r"(hidden), [fpr_result] "m" (fpr_result)
301 : "x8", "x9", "x10", "x11", "x12", "x13", "x14", "x15", "x16", "x17", "x18", "x19", "x20",
302 "x21", "x22", "x23", "x24", "x25", "x26", "x27", "x28", "x30",
303 "d0", "d1", "d2", "d3", "d4", "d5", "d6", "d7",
304 "d8", "d9", "d10", "d11", "d12", "d13", "d14", "d15",
305 "d16", "d17", "d18", "d19", "d20", "d21", "d22", "d23",
Andreas Gampe2f6e3512014-06-07 01:32:33 -0700306 "d24", "d25", "d26", "d27", "d28", "d29", "d30", "d31",
307 "memory"); // clobber.
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200308#elif defined(__mips__) && !defined(__LP64__)
309 __asm__ __volatile__ (
310 // Spill a0-a3 and t0-t7 which we say we don't clobber. May contain args.
311 "addiu $sp, $sp, -64\n\t"
312 "sw $a0, 0($sp)\n\t"
313 "sw $a1, 4($sp)\n\t"
314 "sw $a2, 8($sp)\n\t"
315 "sw $a3, 12($sp)\n\t"
316 "sw $t0, 16($sp)\n\t"
317 "sw $t1, 20($sp)\n\t"
318 "sw $t2, 24($sp)\n\t"
319 "sw $t3, 28($sp)\n\t"
320 "sw $t4, 32($sp)\n\t"
321 "sw $t5, 36($sp)\n\t"
322 "sw $t6, 40($sp)\n\t"
323 "sw $t7, 44($sp)\n\t"
324 // Spill gp register since it is caller save.
325 "sw $gp, 52($sp)\n\t"
326
327 "addiu $sp, $sp, -16\n\t" // Reserve stack space, 16B aligned.
328 "sw %[referrer], 0($sp)\n\t"
329
330 // Push everything on the stack, so we don't rely on the order.
331 "addiu $sp, $sp, -24\n\t"
332 "sw %[arg0], 0($sp)\n\t"
333 "sw %[arg1], 4($sp)\n\t"
334 "sw %[arg2], 8($sp)\n\t"
335 "sw %[code], 12($sp)\n\t"
336 "sw %[self], 16($sp)\n\t"
337 "sw %[hidden], 20($sp)\n\t"
338
339 // Load call params into the right registers.
340 "lw $a0, 0($sp)\n\t"
341 "lw $a1, 4($sp)\n\t"
342 "lw $a2, 8($sp)\n\t"
343 "lw $t9, 12($sp)\n\t"
344 "lw $s1, 16($sp)\n\t"
345 "lw $t0, 20($sp)\n\t"
346 "addiu $sp, $sp, 24\n\t"
347
348 "jalr $t9\n\t" // Call the stub.
349 "nop\n\t"
350 "addiu $sp, $sp, 16\n\t" // Drop the quick "frame".
351
352 // Restore stuff not named clobbered.
353 "lw $a0, 0($sp)\n\t"
354 "lw $a1, 4($sp)\n\t"
355 "lw $a2, 8($sp)\n\t"
356 "lw $a3, 12($sp)\n\t"
357 "lw $t0, 16($sp)\n\t"
358 "lw $t1, 20($sp)\n\t"
359 "lw $t2, 24($sp)\n\t"
360 "lw $t3, 28($sp)\n\t"
361 "lw $t4, 32($sp)\n\t"
362 "lw $t5, 36($sp)\n\t"
363 "lw $t6, 40($sp)\n\t"
364 "lw $t7, 44($sp)\n\t"
365 // Restore gp.
366 "lw $gp, 52($sp)\n\t"
367 "addiu $sp, $sp, 64\n\t" // Free stack space, now sp as on entry.
368
369 "move %[result], $v0\n\t" // Store the call result.
370 : [result] "=r" (result)
371 : [arg0] "r"(arg0), [arg1] "r"(arg1), [arg2] "r"(arg2), [code] "r"(code), [self] "r"(self),
372 [referrer] "r"(referrer), [hidden] "r"(hidden)
373 : "at", "v0", "v1", "s0", "s1", "s2", "s3", "s4", "s5", "s6", "s7", "t8", "t9", "k0", "k1",
374 "fp", "ra",
Nicolas Geoffrayc5b4b322015-09-15 16:36:50 +0100375 "$f0", "$f1", "$f2", "$f3", "$f4", "$f5", "$f6", "$f7", "$f8", "$f9", "$f10", "$f11",
376 "$f12", "$f13", "$f14", "$f15", "$f16", "$f17", "$f18", "$f19", "$f20", "$f21", "$f22",
377 "$f23", "$f24", "$f25", "$f26", "$f27", "$f28", "$f29", "$f30", "$f31",
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200378 "memory"); // clobber.
379#elif defined(__mips__) && defined(__LP64__)
380 __asm__ __volatile__ (
381 // Spill a0-a7 which we say we don't clobber. May contain args.
382 "daddiu $sp, $sp, -64\n\t"
383 "sd $a0, 0($sp)\n\t"
384 "sd $a1, 8($sp)\n\t"
385 "sd $a2, 16($sp)\n\t"
386 "sd $a3, 24($sp)\n\t"
387 "sd $a4, 32($sp)\n\t"
388 "sd $a5, 40($sp)\n\t"
389 "sd $a6, 48($sp)\n\t"
390 "sd $a7, 56($sp)\n\t"
391
392 "daddiu $sp, $sp, -16\n\t" // Reserve stack space, 16B aligned.
393 "sd %[referrer], 0($sp)\n\t"
394
395 // Push everything on the stack, so we don't rely on the order.
396 "daddiu $sp, $sp, -48\n\t"
397 "sd %[arg0], 0($sp)\n\t"
398 "sd %[arg1], 8($sp)\n\t"
399 "sd %[arg2], 16($sp)\n\t"
400 "sd %[code], 24($sp)\n\t"
401 "sd %[self], 32($sp)\n\t"
402 "sd %[hidden], 40($sp)\n\t"
403
404 // Load call params into the right registers.
405 "ld $a0, 0($sp)\n\t"
406 "ld $a1, 8($sp)\n\t"
407 "ld $a2, 16($sp)\n\t"
408 "ld $t9, 24($sp)\n\t"
409 "ld $s1, 32($sp)\n\t"
410 "ld $t0, 40($sp)\n\t"
411 "daddiu $sp, $sp, 48\n\t"
412
413 "jalr $t9\n\t" // Call the stub.
414 "nop\n\t"
415 "daddiu $sp, $sp, 16\n\t" // Drop the quick "frame".
416
417 // Restore stuff not named clobbered.
418 "ld $a0, 0($sp)\n\t"
419 "ld $a1, 8($sp)\n\t"
420 "ld $a2, 16($sp)\n\t"
421 "ld $a3, 24($sp)\n\t"
422 "ld $a4, 32($sp)\n\t"
423 "ld $a5, 40($sp)\n\t"
424 "ld $a6, 48($sp)\n\t"
425 "ld $a7, 56($sp)\n\t"
426 "daddiu $sp, $sp, 64\n\t"
427
428 "move %[result], $v0\n\t" // Store the call result.
429 : [result] "=r" (result)
430 : [arg0] "r"(arg0), [arg1] "r"(arg1), [arg2] "r"(arg2), [code] "r"(code), [self] "r"(self),
431 [referrer] "r"(referrer), [hidden] "r"(hidden)
432 : "at", "v0", "v1", "t0", "t1", "t2", "t3", "s0", "s1", "s2", "s3", "s4", "s5", "s6", "s7",
433 "t8", "t9", "k0", "k1", "fp", "ra",
Goran Jakovljevic4d44e532015-11-27 11:20:20 +0100434 "$f0", "$f1", "$f2", "$f3", "$f4", "$f5", "$f6", "$f7", "$f8", "$f9", "$f10", "$f11",
435 "$f12", "$f13", "$f14", "$f15", "$f16", "$f17", "$f18", "$f19", "$f20", "$f21", "$f22",
436 "$f23", "$f24", "$f25", "$f26", "$f27", "$f28", "$f29", "$f30", "$f31",
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200437 "memory"); // clobber.
Andreas Gampe9537ba22015-10-12 14:29:38 -0700438#elif defined(__x86_64__) && !defined(__APPLE__)
439#define PUSH(reg) "pushq " # reg "\n\t .cfi_adjust_cfa_offset 8\n\t"
440#define POP(reg) "popq " # reg "\n\t .cfi_adjust_cfa_offset -8\n\t"
441 // Note: Uses the native convention. We do a callee-save regimen by manually spilling and
442 // restoring almost all registers.
Andreas Gampe51f76352014-05-21 08:28:48 -0700443 // TODO: Set the thread?
444 __asm__ __volatile__(
Andreas Gampe9537ba22015-10-12 14:29:38 -0700445 // Spill almost everything (except rax, rsp). 14 registers.
446 PUSH(%%rbx)
447 PUSH(%%rcx)
448 PUSH(%%rdx)
449 PUSH(%%rsi)
450 PUSH(%%rdi)
451 PUSH(%%rbp)
452 PUSH(%%r8)
453 PUSH(%%r9)
454 PUSH(%%r10)
455 PUSH(%%r11)
456 PUSH(%%r12)
457 PUSH(%%r13)
458 PUSH(%%r14)
459 PUSH(%%r15)
460
461 PUSH(%[referrer]) // Push referrer & 16B alignment padding
462 PUSH(%[referrer])
463
464 // Now juggle the input registers.
465 PUSH(%[arg0])
466 PUSH(%[arg1])
467 PUSH(%[arg2])
468 PUSH(%[hidden])
469 PUSH(%[code])
470 POP(%%r8)
471 POP(%%rax)
472 POP(%%rdx)
473 POP(%%rsi)
474 POP(%%rdi)
475
476 "call *%%r8\n\t" // Call the stub
477 "addq $16, %%rsp\n\t" // Pop null and padding
Andreas Gampe51f76352014-05-21 08:28:48 -0700478 ".cfi_adjust_cfa_offset -16\n\t"
Andreas Gampe9537ba22015-10-12 14:29:38 -0700479
480 POP(%%r15)
481 POP(%%r14)
482 POP(%%r13)
483 POP(%%r12)
484 POP(%%r11)
485 POP(%%r10)
486 POP(%%r9)
487 POP(%%r8)
488 POP(%%rbp)
489 POP(%%rdi)
490 POP(%%rsi)
491 POP(%%rdx)
492 POP(%%rcx)
493 POP(%%rbx)
494
Andreas Gampe51f76352014-05-21 08:28:48 -0700495 : "=a" (result)
496 // Use the result from rax
Andreas Gampe9537ba22015-10-12 14:29:38 -0700497 : [arg0] "r"(arg0), [arg1] "r"(arg1), [arg2] "r"(arg2), [code] "r"(code),
498 [referrer] "r"(referrer), [hidden] "r"(hidden)
499 // This places arg0 into rdi, arg1 into rsi, arg2 into rdx, and code into some other
500 // register. We can't use "b" (rbx), as ASAN uses this for the frame pointer.
501 : "memory"); // We spill and restore (almost) all registers, so only mention memory here.
502#undef PUSH
503#undef POP
Andreas Gampe51f76352014-05-21 08:28:48 -0700504#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -0800505 UNUSED(arg0, arg1, arg2, code, referrer, hidden);
Andreas Gampe51f76352014-05-21 08:28:48 -0700506 LOG(WARNING) << "Was asked to invoke for an architecture I do not understand.";
507 result = 0;
508#endif
509 // Pop transition.
510 self->PopManagedStackFragment(fragment);
511
512 fp_result = fpr_result;
513 EXPECT_EQ(0U, fp_result);
514
515 return result;
516 }
517
Andreas Gampe29b38412014-08-13 00:15:43 -0700518 static uintptr_t GetEntrypoint(Thread* self, QuickEntrypointEnum entrypoint) {
519 int32_t offset;
520#ifdef __LP64__
521 offset = GetThreadOffset<8>(entrypoint).Int32Value();
522#else
523 offset = GetThreadOffset<4>(entrypoint).Int32Value();
524#endif
525 return *reinterpret_cast<uintptr_t*>(reinterpret_cast<uint8_t*>(self) + offset);
526 }
527
Andreas Gampe6cf80102014-05-19 11:32:41 -0700528 protected:
529 size_t fp_result;
Andreas Gampe525cde22014-04-22 15:44:50 -0700530};
531
532
Andreas Gampe525cde22014-04-22 15:44:50 -0700533TEST_F(StubTest, Memcpy) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200534#if defined(__i386__) || (defined(__x86_64__) && !defined(__APPLE__)) || defined(__mips__)
Andreas Gampe525cde22014-04-22 15:44:50 -0700535 Thread* self = Thread::Current();
536
537 uint32_t orig[20];
538 uint32_t trg[20];
539 for (size_t i = 0; i < 20; ++i) {
540 orig[i] = i;
541 trg[i] = 0;
542 }
543
544 Invoke3(reinterpret_cast<size_t>(&trg[4]), reinterpret_cast<size_t>(&orig[4]),
Andreas Gampe29b38412014-08-13 00:15:43 -0700545 10 * sizeof(uint32_t), StubTest::GetEntrypoint(self, kQuickMemcpy), self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700546
547 EXPECT_EQ(orig[0], trg[0]);
548
549 for (size_t i = 1; i < 4; ++i) {
550 EXPECT_NE(orig[i], trg[i]);
551 }
552
553 for (size_t i = 4; i < 14; ++i) {
554 EXPECT_EQ(orig[i], trg[i]);
555 }
556
557 for (size_t i = 14; i < 20; ++i) {
558 EXPECT_NE(orig[i], trg[i]);
559 }
560
561 // TODO: Test overlapping?
562
563#else
564 LOG(INFO) << "Skipping memcpy as I don't know how to do that on " << kRuntimeISA;
565 // Force-print to std::cout so it's also outside the logcat.
566 std::cout << "Skipping memcpy as I don't know how to do that on " << kRuntimeISA << std::endl;
567#endif
568}
569
Andreas Gampe525cde22014-04-22 15:44:50 -0700570TEST_F(StubTest, LockObject) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200571#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
572 (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -0700573 static constexpr size_t kThinLockLoops = 100;
574
Andreas Gampe525cde22014-04-22 15:44:50 -0700575 Thread* self = Thread::Current();
Andreas Gampe29b38412014-08-13 00:15:43 -0700576
577 const uintptr_t art_quick_lock_object = StubTest::GetEntrypoint(self, kQuickLockObject);
578
Andreas Gampe525cde22014-04-22 15:44:50 -0700579 // Create an object
580 ScopedObjectAccess soa(self);
581 // garbage is created during ClassLinker::Init
582
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700583 StackHandleScope<2> hs(soa.Self());
584 Handle<mirror::String> obj(
585 hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), "hello, world!")));
Andreas Gampe525cde22014-04-22 15:44:50 -0700586 LockWord lock = obj->GetLockWord(false);
587 LockWord::LockState old_state = lock.GetState();
588 EXPECT_EQ(LockWord::LockState::kUnlocked, old_state);
589
Andreas Gampe29b38412014-08-13 00:15:43 -0700590 Invoke3(reinterpret_cast<size_t>(obj.Get()), 0U, 0U, art_quick_lock_object, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700591
592 LockWord lock_after = obj->GetLockWord(false);
593 LockWord::LockState new_state = lock_after.GetState();
594 EXPECT_EQ(LockWord::LockState::kThinLocked, new_state);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700595 EXPECT_EQ(lock_after.ThinLockCount(), 0U); // Thin lock starts count at zero
596
597 for (size_t i = 1; i < kThinLockLoops; ++i) {
Andreas Gampe29b38412014-08-13 00:15:43 -0700598 Invoke3(reinterpret_cast<size_t>(obj.Get()), 0U, 0U, art_quick_lock_object, self);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700599
600 // Check we're at lock count i
601
602 LockWord l_inc = obj->GetLockWord(false);
603 LockWord::LockState l_inc_state = l_inc.GetState();
604 EXPECT_EQ(LockWord::LockState::kThinLocked, l_inc_state);
605 EXPECT_EQ(l_inc.ThinLockCount(), i);
606 }
607
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700608 // Force a fat lock by running identity hashcode to fill up lock word.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700609 Handle<mirror::String> obj2(hs.NewHandle(
610 mirror::String::AllocFromModifiedUtf8(soa.Self(), "hello, world!")));
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700611
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700612 obj2->IdentityHashCode();
613
Andreas Gampe29b38412014-08-13 00:15:43 -0700614 Invoke3(reinterpret_cast<size_t>(obj2.Get()), 0U, 0U, art_quick_lock_object, self);
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700615
616 LockWord lock_after2 = obj2->GetLockWord(false);
617 LockWord::LockState new_state2 = lock_after2.GetState();
618 EXPECT_EQ(LockWord::LockState::kFatLocked, new_state2);
619 EXPECT_NE(lock_after2.FatLockMonitor(), static_cast<Monitor*>(nullptr));
620
621 // Test done.
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700622#else
623 LOG(INFO) << "Skipping lock_object as I don't know how to do that on " << kRuntimeISA;
624 // Force-print to std::cout so it's also outside the logcat.
625 std::cout << "Skipping lock_object as I don't know how to do that on " << kRuntimeISA << std::endl;
626#endif
627}
628
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700629
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700630class RandGen {
631 public:
632 explicit RandGen(uint32_t seed) : val_(seed) {}
633
634 uint32_t next() {
635 val_ = val_ * 48271 % 2147483647 + 13;
636 return val_;
637 }
638
639 uint32_t val_;
640};
641
642
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700643// NO_THREAD_SAFETY_ANALYSIS as we do not want to grab exclusive mutator lock for MonitorInfo.
644static void TestUnlockObject(StubTest* test) NO_THREAD_SAFETY_ANALYSIS {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200645#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
646 (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -0700647 static constexpr size_t kThinLockLoops = 100;
648
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700649 Thread* self = Thread::Current();
Andreas Gampe29b38412014-08-13 00:15:43 -0700650
651 const uintptr_t art_quick_lock_object = StubTest::GetEntrypoint(self, kQuickLockObject);
652 const uintptr_t art_quick_unlock_object = StubTest::GetEntrypoint(self, kQuickUnlockObject);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700653 // Create an object
654 ScopedObjectAccess soa(self);
655 // garbage is created during ClassLinker::Init
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700656 static constexpr size_t kNumberOfLocks = 10; // Number of objects = lock
657 StackHandleScope<kNumberOfLocks + 1> hs(self);
658 Handle<mirror::String> obj(
659 hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), "hello, world!")));
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700660 LockWord lock = obj->GetLockWord(false);
661 LockWord::LockState old_state = lock.GetState();
662 EXPECT_EQ(LockWord::LockState::kUnlocked, old_state);
663
Andreas Gampe29b38412014-08-13 00:15:43 -0700664 test->Invoke3(reinterpret_cast<size_t>(obj.Get()), 0U, 0U, art_quick_unlock_object, self);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700665 // This should be an illegal monitor state.
666 EXPECT_TRUE(self->IsExceptionPending());
667 self->ClearException();
668
669 LockWord lock_after = obj->GetLockWord(false);
670 LockWord::LockState new_state = lock_after.GetState();
671 EXPECT_EQ(LockWord::LockState::kUnlocked, new_state);
Andreas Gampe525cde22014-04-22 15:44:50 -0700672
Andreas Gampe29b38412014-08-13 00:15:43 -0700673 test->Invoke3(reinterpret_cast<size_t>(obj.Get()), 0U, 0U, art_quick_lock_object, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700674
675 LockWord lock_after2 = obj->GetLockWord(false);
676 LockWord::LockState new_state2 = lock_after2.GetState();
677 EXPECT_EQ(LockWord::LockState::kThinLocked, new_state2);
678
Andreas Gampe29b38412014-08-13 00:15:43 -0700679 test->Invoke3(reinterpret_cast<size_t>(obj.Get()), 0U, 0U, art_quick_unlock_object, self);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700680
681 LockWord lock_after3 = obj->GetLockWord(false);
682 LockWord::LockState new_state3 = lock_after3.GetState();
683 EXPECT_EQ(LockWord::LockState::kUnlocked, new_state3);
684
685 // Stress test:
686 // Keep a number of objects and their locks in flight. Randomly lock or unlock one of them in
687 // each step.
688
689 RandGen r(0x1234);
690
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700691 constexpr size_t kIterations = 10000; // Number of iterations
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700692 constexpr size_t kMoveToFat = 1000; // Chance of 1:kMoveFat to make a lock fat.
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700693
694 size_t counts[kNumberOfLocks];
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700695 bool fat[kNumberOfLocks]; // Whether a lock should be thin or fat.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700696 Handle<mirror::String> objects[kNumberOfLocks];
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700697
698 // Initialize = allocate.
699 for (size_t i = 0; i < kNumberOfLocks; ++i) {
700 counts[i] = 0;
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700701 fat[i] = false;
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700702 objects[i] = hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), ""));
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700703 }
704
705 for (size_t i = 0; i < kIterations; ++i) {
706 // Select which lock to update.
707 size_t index = r.next() % kNumberOfLocks;
708
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700709 // Make lock fat?
710 if (!fat[index] && (r.next() % kMoveToFat == 0)) {
711 fat[index] = true;
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700712 objects[index]->IdentityHashCode();
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700713
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700714 LockWord lock_iter = objects[index]->GetLockWord(false);
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700715 LockWord::LockState iter_state = lock_iter.GetState();
716 if (counts[index] == 0) {
717 EXPECT_EQ(LockWord::LockState::kHashCode, iter_state);
718 } else {
719 EXPECT_EQ(LockWord::LockState::kFatLocked, iter_state);
720 }
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700721 } else {
Andreas Gampe277ccbd2014-11-03 21:36:10 -0800722 bool take_lock; // Whether to lock or unlock in this step.
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700723 if (counts[index] == 0) {
Andreas Gampe277ccbd2014-11-03 21:36:10 -0800724 take_lock = true;
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700725 } else if (counts[index] == kThinLockLoops) {
Andreas Gampe277ccbd2014-11-03 21:36:10 -0800726 take_lock = false;
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700727 } else {
728 // Randomly.
Andreas Gampe277ccbd2014-11-03 21:36:10 -0800729 take_lock = r.next() % 2 == 0;
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700730 }
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700731
Andreas Gampe277ccbd2014-11-03 21:36:10 -0800732 if (take_lock) {
Andreas Gampe29b38412014-08-13 00:15:43 -0700733 test->Invoke3(reinterpret_cast<size_t>(objects[index].Get()), 0U, 0U, art_quick_lock_object,
734 self);
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700735 counts[index]++;
736 } else {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700737 test->Invoke3(reinterpret_cast<size_t>(objects[index].Get()), 0U, 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -0700738 art_quick_unlock_object, self);
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700739 counts[index]--;
740 }
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700741
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700742 EXPECT_FALSE(self->IsExceptionPending());
743
744 // Check the new state.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700745 LockWord lock_iter = objects[index]->GetLockWord(true);
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700746 LockWord::LockState iter_state = lock_iter.GetState();
747 if (fat[index]) {
748 // Abuse MonitorInfo.
749 EXPECT_EQ(LockWord::LockState::kFatLocked, iter_state) << index;
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700750 MonitorInfo info(objects[index].Get());
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700751 EXPECT_EQ(counts[index], info.entry_count_) << index;
752 } else {
753 if (counts[index] > 0) {
754 EXPECT_EQ(LockWord::LockState::kThinLocked, iter_state);
755 EXPECT_EQ(counts[index] - 1, lock_iter.ThinLockCount());
756 } else {
757 EXPECT_EQ(LockWord::LockState::kUnlocked, iter_state);
758 }
759 }
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700760 }
761 }
762
763 // Unlock the remaining count times and then check it's unlocked. Then deallocate.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700764 // Go reverse order to correctly handle Handles.
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700765 for (size_t i = 0; i < kNumberOfLocks; ++i) {
766 size_t index = kNumberOfLocks - 1 - i;
767 size_t count = counts[index];
768 while (count > 0) {
Andreas Gampe29b38412014-08-13 00:15:43 -0700769 test->Invoke3(reinterpret_cast<size_t>(objects[index].Get()), 0U, 0U, art_quick_unlock_object,
770 self);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700771 count--;
772 }
773
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700774 LockWord lock_after4 = objects[index]->GetLockWord(false);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700775 LockWord::LockState new_state4 = lock_after4.GetState();
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700776 EXPECT_TRUE(LockWord::LockState::kUnlocked == new_state4
777 || LockWord::LockState::kFatLocked == new_state4);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700778 }
779
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700780 // Test done.
Andreas Gampe525cde22014-04-22 15:44:50 -0700781#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -0800782 UNUSED(test);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700783 LOG(INFO) << "Skipping unlock_object as I don't know how to do that on " << kRuntimeISA;
Andreas Gampe525cde22014-04-22 15:44:50 -0700784 // Force-print to std::cout so it's also outside the logcat.
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700785 std::cout << "Skipping unlock_object as I don't know how to do that on " << kRuntimeISA << std::endl;
Andreas Gampe525cde22014-04-22 15:44:50 -0700786#endif
787}
788
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700789TEST_F(StubTest, UnlockObject) {
Andreas Gampe369810a2015-01-14 19:53:31 -0800790 // This will lead to monitor error messages in the log.
791 ScopedLogSeverity sls(LogSeverity::FATAL);
792
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700793 TestUnlockObject(this);
794}
Andreas Gampe525cde22014-04-22 15:44:50 -0700795
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200796#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
797 (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe525cde22014-04-22 15:44:50 -0700798extern "C" void art_quick_check_cast(void);
799#endif
800
801TEST_F(StubTest, CheckCast) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200802#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
803 (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe525cde22014-04-22 15:44:50 -0700804 Thread* self = Thread::Current();
Andreas Gampe29b38412014-08-13 00:15:43 -0700805
806 const uintptr_t art_quick_check_cast = StubTest::GetEntrypoint(self, kQuickCheckCast);
807
Andreas Gampe525cde22014-04-22 15:44:50 -0700808 // Find some classes.
809 ScopedObjectAccess soa(self);
810 // garbage is created during ClassLinker::Init
811
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700812 StackHandleScope<2> hs(soa.Self());
813 Handle<mirror::Class> c(
814 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "[Ljava/lang/Object;")));
815 Handle<mirror::Class> c2(
816 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "[Ljava/lang/String;")));
Andreas Gampe525cde22014-04-22 15:44:50 -0700817
818 EXPECT_FALSE(self->IsExceptionPending());
819
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700820 Invoke3(reinterpret_cast<size_t>(c.Get()), reinterpret_cast<size_t>(c.Get()), 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -0700821 art_quick_check_cast, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700822
823 EXPECT_FALSE(self->IsExceptionPending());
824
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700825 Invoke3(reinterpret_cast<size_t>(c2.Get()), reinterpret_cast<size_t>(c2.Get()), 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -0700826 art_quick_check_cast, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700827
828 EXPECT_FALSE(self->IsExceptionPending());
829
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700830 Invoke3(reinterpret_cast<size_t>(c.Get()), reinterpret_cast<size_t>(c2.Get()), 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -0700831 art_quick_check_cast, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700832
833 EXPECT_FALSE(self->IsExceptionPending());
834
835 // TODO: Make the following work. But that would require correct managed frames.
836
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700837 Invoke3(reinterpret_cast<size_t>(c2.Get()), reinterpret_cast<size_t>(c.Get()), 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -0700838 art_quick_check_cast, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700839
840 EXPECT_TRUE(self->IsExceptionPending());
841 self->ClearException();
842
843#else
844 LOG(INFO) << "Skipping check_cast as I don't know how to do that on " << kRuntimeISA;
845 // Force-print to std::cout so it's also outside the logcat.
846 std::cout << "Skipping check_cast as I don't know how to do that on " << kRuntimeISA << std::endl;
847#endif
848}
849
850
Andreas Gampe525cde22014-04-22 15:44:50 -0700851TEST_F(StubTest, APutObj) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200852#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
853 (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe525cde22014-04-22 15:44:50 -0700854 Thread* self = Thread::Current();
Andreas Gampe29b38412014-08-13 00:15:43 -0700855
856 // Do not check non-checked ones, we'd need handlers and stuff...
857 const uintptr_t art_quick_aput_obj_with_null_and_bound_check =
858 StubTest::GetEntrypoint(self, kQuickAputObjectWithNullAndBoundCheck);
859
Andreas Gampe525cde22014-04-22 15:44:50 -0700860 // Create an object
861 ScopedObjectAccess soa(self);
862 // garbage is created during ClassLinker::Init
863
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700864 StackHandleScope<5> hs(soa.Self());
865 Handle<mirror::Class> c(
866 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "Ljava/lang/Object;")));
867 Handle<mirror::Class> ca(
868 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "[Ljava/lang/String;")));
Andreas Gampe525cde22014-04-22 15:44:50 -0700869
870 // Build a string array of size 1
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700871 Handle<mirror::ObjectArray<mirror::Object>> array(
872 hs.NewHandle(mirror::ObjectArray<mirror::Object>::Alloc(soa.Self(), ca.Get(), 10)));
Andreas Gampe525cde22014-04-22 15:44:50 -0700873
874 // Build a string -> should be assignable
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700875 Handle<mirror::String> str_obj(
876 hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), "hello, world!")));
Andreas Gampe525cde22014-04-22 15:44:50 -0700877
878 // Build a generic object -> should fail assigning
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700879 Handle<mirror::Object> obj_obj(hs.NewHandle(c->AllocObject(soa.Self())));
Andreas Gampe525cde22014-04-22 15:44:50 -0700880
881 // Play with it...
882
883 // 1) Success cases
Andreas Gampef4e910b2014-04-29 16:55:52 -0700884 // 1.1) Assign str_obj to array[0..3]
Andreas Gampe525cde22014-04-22 15:44:50 -0700885
886 EXPECT_FALSE(self->IsExceptionPending());
887
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700888 Invoke3(reinterpret_cast<size_t>(array.Get()), 0U, reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampe29b38412014-08-13 00:15:43 -0700889 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700890
891 EXPECT_FALSE(self->IsExceptionPending());
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700892 EXPECT_EQ(str_obj.Get(), array->Get(0));
Andreas Gampe525cde22014-04-22 15:44:50 -0700893
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700894 Invoke3(reinterpret_cast<size_t>(array.Get()), 1U, reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampe29b38412014-08-13 00:15:43 -0700895 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampef4e910b2014-04-29 16:55:52 -0700896
897 EXPECT_FALSE(self->IsExceptionPending());
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700898 EXPECT_EQ(str_obj.Get(), array->Get(1));
Andreas Gampef4e910b2014-04-29 16:55:52 -0700899
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700900 Invoke3(reinterpret_cast<size_t>(array.Get()), 2U, reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampe29b38412014-08-13 00:15:43 -0700901 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampef4e910b2014-04-29 16:55:52 -0700902
903 EXPECT_FALSE(self->IsExceptionPending());
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700904 EXPECT_EQ(str_obj.Get(), array->Get(2));
Andreas Gampef4e910b2014-04-29 16:55:52 -0700905
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700906 Invoke3(reinterpret_cast<size_t>(array.Get()), 3U, reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampe29b38412014-08-13 00:15:43 -0700907 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampef4e910b2014-04-29 16:55:52 -0700908
909 EXPECT_FALSE(self->IsExceptionPending());
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700910 EXPECT_EQ(str_obj.Get(), array->Get(3));
Andreas Gampef4e910b2014-04-29 16:55:52 -0700911
912 // 1.2) Assign null to array[0..3]
Andreas Gampe525cde22014-04-22 15:44:50 -0700913
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700914 Invoke3(reinterpret_cast<size_t>(array.Get()), 0U, reinterpret_cast<size_t>(nullptr),
Andreas Gampe29b38412014-08-13 00:15:43 -0700915 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700916
917 EXPECT_FALSE(self->IsExceptionPending());
Andreas Gampef4e910b2014-04-29 16:55:52 -0700918 EXPECT_EQ(nullptr, array->Get(0));
919
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700920 Invoke3(reinterpret_cast<size_t>(array.Get()), 1U, reinterpret_cast<size_t>(nullptr),
Andreas Gampe29b38412014-08-13 00:15:43 -0700921 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampef4e910b2014-04-29 16:55:52 -0700922
923 EXPECT_FALSE(self->IsExceptionPending());
924 EXPECT_EQ(nullptr, array->Get(1));
925
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700926 Invoke3(reinterpret_cast<size_t>(array.Get()), 2U, reinterpret_cast<size_t>(nullptr),
Andreas Gampe29b38412014-08-13 00:15:43 -0700927 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampef4e910b2014-04-29 16:55:52 -0700928
929 EXPECT_FALSE(self->IsExceptionPending());
930 EXPECT_EQ(nullptr, array->Get(2));
931
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700932 Invoke3(reinterpret_cast<size_t>(array.Get()), 3U, reinterpret_cast<size_t>(nullptr),
Andreas Gampe29b38412014-08-13 00:15:43 -0700933 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampef4e910b2014-04-29 16:55:52 -0700934
935 EXPECT_FALSE(self->IsExceptionPending());
936 EXPECT_EQ(nullptr, array->Get(3));
Andreas Gampe525cde22014-04-22 15:44:50 -0700937
938 // TODO: Check _which_ exception is thrown. Then make 3) check that it's the right check order.
939
940 // 2) Failure cases (str into str[])
941 // 2.1) Array = null
942 // TODO: Throwing NPE needs actual DEX code
943
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700944// Invoke3(reinterpret_cast<size_t>(nullptr), 0U, reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampe525cde22014-04-22 15:44:50 -0700945// reinterpret_cast<uintptr_t>(&art_quick_aput_obj_with_null_and_bound_check), self);
946//
947// EXPECT_TRUE(self->IsExceptionPending());
948// self->ClearException();
949
950 // 2.2) Index < 0
951
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700952 Invoke3(reinterpret_cast<size_t>(array.Get()), static_cast<size_t>(-1),
953 reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampe29b38412014-08-13 00:15:43 -0700954 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700955
956 EXPECT_TRUE(self->IsExceptionPending());
957 self->ClearException();
958
959 // 2.3) Index > 0
960
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700961 Invoke3(reinterpret_cast<size_t>(array.Get()), 10U, reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampe29b38412014-08-13 00:15:43 -0700962 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700963
964 EXPECT_TRUE(self->IsExceptionPending());
965 self->ClearException();
966
967 // 3) Failure cases (obj into str[])
968
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700969 Invoke3(reinterpret_cast<size_t>(array.Get()), 0U, reinterpret_cast<size_t>(obj_obj.Get()),
Andreas Gampe29b38412014-08-13 00:15:43 -0700970 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700971
972 EXPECT_TRUE(self->IsExceptionPending());
973 self->ClearException();
974
975 // Tests done.
976#else
977 LOG(INFO) << "Skipping aput_obj as I don't know how to do that on " << kRuntimeISA;
978 // Force-print to std::cout so it's also outside the logcat.
979 std::cout << "Skipping aput_obj as I don't know how to do that on " << kRuntimeISA << std::endl;
980#endif
981}
982
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700983TEST_F(StubTest, AllocObject) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200984#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
985 (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe369810a2015-01-14 19:53:31 -0800986 // This will lead to OOM error messages in the log.
987 ScopedLogSeverity sls(LogSeverity::FATAL);
988
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700989 // TODO: Check the "Unresolved" allocation stubs
990
991 Thread* self = Thread::Current();
992 // Create an object
993 ScopedObjectAccess soa(self);
994 // garbage is created during ClassLinker::Init
995
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700996 StackHandleScope<2> hs(soa.Self());
997 Handle<mirror::Class> c(
998 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "Ljava/lang/Object;")));
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700999
1000 // Play with it...
1001
1002 EXPECT_FALSE(self->IsExceptionPending());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001003 {
1004 // Use an arbitrary method from c to use as referrer
1005 size_t result = Invoke3(static_cast<size_t>(c->GetDexTypeIndex()), // type_idx
Mathieu Chartiere401d142015-04-22 13:56:20 -07001006 // arbitrary
1007 reinterpret_cast<size_t>(c->GetVirtualMethod(0, sizeof(void*))),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001008 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001009 StubTest::GetEntrypoint(self, kQuickAllocObject),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001010 self);
1011
1012 EXPECT_FALSE(self->IsExceptionPending());
1013 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
1014 mirror::Object* obj = reinterpret_cast<mirror::Object*>(result);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001015 EXPECT_EQ(c.Get(), obj->GetClass());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001016 VerifyObject(obj);
1017 }
1018
1019 {
Mathieu Chartier2cebb242015-04-21 16:50:40 -07001020 // We can use null in the second argument as we do not need a method here (not used in
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001021 // resolved/initialized cases)
Mathieu Chartiere401d142015-04-22 13:56:20 -07001022 size_t result = Invoke3(reinterpret_cast<size_t>(c.Get()), 0u, 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001023 StubTest::GetEntrypoint(self, kQuickAllocObjectResolved),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001024 self);
1025
1026 EXPECT_FALSE(self->IsExceptionPending());
1027 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
1028 mirror::Object* obj = reinterpret_cast<mirror::Object*>(result);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001029 EXPECT_EQ(c.Get(), obj->GetClass());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001030 VerifyObject(obj);
1031 }
1032
1033 {
Mathieu Chartier2cebb242015-04-21 16:50:40 -07001034 // We can use null in the second argument as we do not need a method here (not used in
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001035 // resolved/initialized cases)
Mathieu Chartiere401d142015-04-22 13:56:20 -07001036 size_t result = Invoke3(reinterpret_cast<size_t>(c.Get()), 0u, 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001037 StubTest::GetEntrypoint(self, kQuickAllocObjectInitialized),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001038 self);
1039
1040 EXPECT_FALSE(self->IsExceptionPending());
1041 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
1042 mirror::Object* obj = reinterpret_cast<mirror::Object*>(result);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001043 EXPECT_EQ(c.Get(), obj->GetClass());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001044 VerifyObject(obj);
1045 }
1046
1047 // Failure tests.
1048
1049 // Out-of-memory.
1050 {
1051 Runtime::Current()->GetHeap()->SetIdealFootprint(1 * GB);
1052
1053 // Array helps to fill memory faster.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001054 Handle<mirror::Class> ca(
1055 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "[Ljava/lang/Object;")));
1056
1057 // Use arbitrary large amount for now.
1058 static const size_t kMaxHandles = 1000000;
Ian Rogers700a4022014-05-19 16:49:03 -07001059 std::unique_ptr<StackHandleScope<kMaxHandles>> hsp(new StackHandleScope<kMaxHandles>(self));
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001060
1061 std::vector<Handle<mirror::Object>> handles;
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001062 // Start allocating with 128K
1063 size_t length = 128 * KB / 4;
1064 while (length > 10) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001065 Handle<mirror::Object> h(hsp->NewHandle<mirror::Object>(
1066 mirror::ObjectArray<mirror::Object>::Alloc(soa.Self(), ca.Get(), length / 4)));
1067 if (self->IsExceptionPending() || h.Get() == nullptr) {
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001068 self->ClearException();
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001069
1070 // Try a smaller length
1071 length = length / 8;
1072 // Use at most half the reported free space.
1073 size_t mem = Runtime::Current()->GetHeap()->GetFreeMemory();
1074 if (length * 8 > mem) {
1075 length = mem / 8;
1076 }
1077 } else {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001078 handles.push_back(h);
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001079 }
1080 }
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001081 LOG(INFO) << "Used " << handles.size() << " arrays to fill space.";
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001082
1083 // Allocate simple objects till it fails.
1084 while (!self->IsExceptionPending()) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001085 Handle<mirror::Object> h = hsp->NewHandle(c->AllocObject(soa.Self()));
1086 if (!self->IsExceptionPending() && h.Get() != nullptr) {
1087 handles.push_back(h);
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001088 }
1089 }
1090 self->ClearException();
1091
Mathieu Chartiere401d142015-04-22 13:56:20 -07001092 size_t result = Invoke3(reinterpret_cast<size_t>(c.Get()), 0u, 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001093 StubTest::GetEntrypoint(self, kQuickAllocObjectInitialized),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001094 self);
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001095 EXPECT_TRUE(self->IsExceptionPending());
1096 self->ClearException();
1097 EXPECT_EQ(reinterpret_cast<size_t>(nullptr), result);
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001098 }
1099
1100 // Tests done.
1101#else
1102 LOG(INFO) << "Skipping alloc_object as I don't know how to do that on " << kRuntimeISA;
1103 // Force-print to std::cout so it's also outside the logcat.
1104 std::cout << "Skipping alloc_object as I don't know how to do that on " << kRuntimeISA << std::endl;
1105#endif
1106}
1107
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001108TEST_F(StubTest, AllocObjectArray) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001109#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1110 (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001111 // TODO: Check the "Unresolved" allocation stubs
1112
Andreas Gampe369810a2015-01-14 19:53:31 -08001113 // This will lead to OOM error messages in the log.
1114 ScopedLogSeverity sls(LogSeverity::FATAL);
1115
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001116 Thread* self = Thread::Current();
1117 // Create an object
1118 ScopedObjectAccess soa(self);
1119 // garbage is created during ClassLinker::Init
1120
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001121 StackHandleScope<2> hs(self);
1122 Handle<mirror::Class> c(
1123 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "[Ljava/lang/Object;")));
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001124
1125 // Needed to have a linked method.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001126 Handle<mirror::Class> c_obj(
1127 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "Ljava/lang/Object;")));
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001128
1129 // Play with it...
1130
1131 EXPECT_FALSE(self->IsExceptionPending());
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001132
1133 // For some reason this does not work, as the type_idx is artificial and outside what the
1134 // resolved types of c_obj allow...
1135
Ian Rogerscf7f1912014-10-22 22:06:39 -07001136 if ((false)) {
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001137 // Use an arbitrary method from c to use as referrer
1138 size_t result = Invoke3(static_cast<size_t>(c->GetDexTypeIndex()), // type_idx
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001139 10U,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001140 // arbitrary
1141 reinterpret_cast<size_t>(c_obj->GetVirtualMethod(0, sizeof(void*))),
Andreas Gampe29b38412014-08-13 00:15:43 -07001142 StubTest::GetEntrypoint(self, kQuickAllocArray),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001143 self);
1144
1145 EXPECT_FALSE(self->IsExceptionPending());
1146 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
1147 mirror::Array* obj = reinterpret_cast<mirror::Array*>(result);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001148 EXPECT_EQ(c.Get(), obj->GetClass());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001149 VerifyObject(obj);
1150 EXPECT_EQ(obj->GetLength(), 10);
1151 }
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001152
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001153 {
Mathieu Chartier2cebb242015-04-21 16:50:40 -07001154 // We can use null in the second argument as we do not need a method here (not used in
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001155 // resolved/initialized cases)
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08001156 size_t result = Invoke3(reinterpret_cast<size_t>(c.Get()), 10U,
1157 reinterpret_cast<size_t>(nullptr),
Andreas Gampe29b38412014-08-13 00:15:43 -07001158 StubTest::GetEntrypoint(self, kQuickAllocArrayResolved),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001159 self);
Nicolas Geoffray14691c52015-03-05 10:40:17 +00001160 EXPECT_FALSE(self->IsExceptionPending()) << PrettyTypeOf(self->GetException());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001161 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
1162 mirror::Object* obj = reinterpret_cast<mirror::Object*>(result);
1163 EXPECT_TRUE(obj->IsArrayInstance());
1164 EXPECT_TRUE(obj->IsObjectArray());
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001165 EXPECT_EQ(c.Get(), obj->GetClass());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001166 VerifyObject(obj);
1167 mirror::Array* array = reinterpret_cast<mirror::Array*>(result);
1168 EXPECT_EQ(array->GetLength(), 10);
1169 }
1170
1171 // Failure tests.
1172
1173 // Out-of-memory.
1174 {
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08001175 size_t result = Invoke3(reinterpret_cast<size_t>(c.Get()),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001176 GB, // that should fail...
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08001177 reinterpret_cast<size_t>(nullptr),
Andreas Gampe29b38412014-08-13 00:15:43 -07001178 StubTest::GetEntrypoint(self, kQuickAllocArrayResolved),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001179 self);
1180
1181 EXPECT_TRUE(self->IsExceptionPending());
1182 self->ClearException();
1183 EXPECT_EQ(reinterpret_cast<size_t>(nullptr), result);
1184 }
1185
1186 // Tests done.
1187#else
1188 LOG(INFO) << "Skipping alloc_array as I don't know how to do that on " << kRuntimeISA;
1189 // Force-print to std::cout so it's also outside the logcat.
1190 std::cout << "Skipping alloc_array as I don't know how to do that on " << kRuntimeISA << std::endl;
1191#endif
1192}
1193
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001194
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001195TEST_F(StubTest, StringCompareTo) {
Ian Rogersc3ccc102014-06-25 11:52:14 -07001196#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001197 // TODO: Check the "Unresolved" allocation stubs
1198
1199 Thread* self = Thread::Current();
Andreas Gampe29b38412014-08-13 00:15:43 -07001200
1201 const uintptr_t art_quick_string_compareto = StubTest::GetEntrypoint(self, kQuickStringCompareTo);
1202
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001203 ScopedObjectAccess soa(self);
1204 // garbage is created during ClassLinker::Init
1205
1206 // Create some strings
1207 // Use array so we can index into it and use a matrix for expected results
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -07001208 // Setup: The first half is standard. The second half uses a non-zero offset.
1209 // TODO: Shared backing arrays.
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001210 const char* c[] = { "", "", "a", "aa", "ab",
Serban Constantinescu86797a72014-06-19 16:17:56 +01001211 "aacaacaacaacaacaac", // This one's under the default limit to go to __memcmp16.
1212 "aacaacaacaacaacaacaacaacaacaacaacaac", // This one's over.
1213 "aacaacaacaacaacaacaacaacaacaacaacaaca" }; // As is this one. We need a separate one to
1214 // defeat object-equal optimizations.
Jeff Hao848f70a2014-01-15 13:49:50 -08001215 static constexpr size_t kStringCount = arraysize(c);
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001216
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001217 StackHandleScope<kStringCount> hs(self);
1218 Handle<mirror::String> s[kStringCount];
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001219
Jeff Hao848f70a2014-01-15 13:49:50 -08001220 for (size_t i = 0; i < kStringCount; ++i) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001221 s[i] = hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), c[i]));
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001222 }
1223
1224 // TODO: wide characters
1225
1226 // Matrix of expectations. First component is first parameter. Note we only check against the
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -07001227 // sign, not the value. As we are testing random offsets, we need to compute this and need to
1228 // rely on String::CompareTo being correct.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001229 int32_t expected[kStringCount][kStringCount];
1230 for (size_t x = 0; x < kStringCount; ++x) {
1231 for (size_t y = 0; y < kStringCount; ++y) {
1232 expected[x][y] = s[x]->CompareTo(s[y].Get());
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -07001233 }
1234 }
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001235
1236 // Play with it...
1237
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001238 for (size_t x = 0; x < kStringCount; ++x) {
1239 for (size_t y = 0; y < kStringCount; ++y) {
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001240 // Test string_compareto x y
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001241 size_t result = Invoke3(reinterpret_cast<size_t>(s[x].Get()),
1242 reinterpret_cast<size_t>(s[y].Get()), 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001243 art_quick_string_compareto, self);
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001244
1245 EXPECT_FALSE(self->IsExceptionPending());
1246
1247 // The result is a 32b signed integer
1248 union {
1249 size_t r;
1250 int32_t i;
1251 } conv;
1252 conv.r = result;
1253 int32_t e = expected[x][y];
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -07001254 EXPECT_TRUE(e == 0 ? conv.i == 0 : true) << "x=" << c[x] << " y=" << c[y] << " res=" <<
1255 conv.r;
1256 EXPECT_TRUE(e < 0 ? conv.i < 0 : true) << "x=" << c[x] << " y=" << c[y] << " res=" <<
1257 conv.r;
1258 EXPECT_TRUE(e > 0 ? conv.i > 0 : true) << "x=" << c[x] << " y=" << c[y] << " res=" <<
1259 conv.r;
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001260 }
1261 }
1262
Andreas Gampe7177d7c2014-05-02 12:10:02 -07001263 // TODO: Deallocate things.
1264
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001265 // Tests done.
1266#else
1267 LOG(INFO) << "Skipping string_compareto as I don't know how to do that on " << kRuntimeISA;
1268 // Force-print to std::cout so it's also outside the logcat.
1269 std::cout << "Skipping string_compareto as I don't know how to do that on " << kRuntimeISA <<
1270 std::endl;
1271#endif
1272}
1273
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001274
Mathieu Chartierc7853442015-03-27 14:35:38 -07001275static void GetSetBooleanStatic(ArtField* f, Thread* self,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001276 ArtMethod* referrer, StubTest* test)
Mathieu Chartier90443472015-07-16 20:32:27 -07001277 SHARED_REQUIRES(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001278#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1279 (defined(__x86_64__) && !defined(__APPLE__))
Fred Shih37f05ef2014-07-16 18:38:08 -07001280 constexpr size_t num_values = 5;
1281 uint8_t values[num_values] = { 0, 1, 2, 128, 0xFF };
1282
1283 for (size_t i = 0; i < num_values; ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001284 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001285 static_cast<size_t>(values[i]),
1286 0U,
1287 StubTest::GetEntrypoint(self, kQuickSet8Static),
1288 self,
1289 referrer);
1290
Mathieu Chartierc7853442015-03-27 14:35:38 -07001291 size_t res = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001292 0U, 0U,
1293 StubTest::GetEntrypoint(self, kQuickGetBooleanStatic),
1294 self,
1295 referrer);
1296 // Boolean currently stores bools as uint8_t, be more zealous about asserting correct writes/gets.
1297 EXPECT_EQ(values[i], static_cast<uint8_t>(res)) << "Iteration " << i;
1298 }
1299#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001300 UNUSED(f, self, referrer, test);
Fred Shih37f05ef2014-07-16 18:38:08 -07001301 LOG(INFO) << "Skipping set_boolean_static as I don't know how to do that on " << kRuntimeISA;
1302 // Force-print to std::cout so it's also outside the logcat.
1303 std::cout << "Skipping set_boolean_static as I don't know how to do that on " << kRuntimeISA << std::endl;
1304#endif
1305}
Mathieu Chartiere401d142015-04-22 13:56:20 -07001306static void GetSetByteStatic(ArtField* f, Thread* self, ArtMethod* referrer,
Mathieu Chartierc7853442015-03-27 14:35:38 -07001307 StubTest* test)
Mathieu Chartier90443472015-07-16 20:32:27 -07001308 SHARED_REQUIRES(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001309#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1310 (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001311 int8_t values[] = { -128, -64, 0, 64, 127 };
Fred Shih37f05ef2014-07-16 18:38:08 -07001312
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001313 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001314 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001315 static_cast<size_t>(values[i]),
1316 0U,
1317 StubTest::GetEntrypoint(self, kQuickSet8Static),
1318 self,
1319 referrer);
1320
Mathieu Chartierc7853442015-03-27 14:35:38 -07001321 size_t res = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001322 0U, 0U,
1323 StubTest::GetEntrypoint(self, kQuickGetByteStatic),
1324 self,
1325 referrer);
1326 EXPECT_EQ(values[i], static_cast<int8_t>(res)) << "Iteration " << i;
1327 }
1328#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001329 UNUSED(f, self, referrer, test);
Fred Shih37f05ef2014-07-16 18:38:08 -07001330 LOG(INFO) << "Skipping set_byte_static as I don't know how to do that on " << kRuntimeISA;
1331 // Force-print to std::cout so it's also outside the logcat.
1332 std::cout << "Skipping set_byte_static as I don't know how to do that on " << kRuntimeISA << std::endl;
1333#endif
1334}
1335
1336
Mathieu Chartierc7853442015-03-27 14:35:38 -07001337static void GetSetBooleanInstance(Handle<mirror::Object>* obj, ArtField* f, Thread* self,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001338 ArtMethod* referrer, StubTest* test)
Mathieu Chartier90443472015-07-16 20:32:27 -07001339 SHARED_REQUIRES(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001340#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1341 (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001342 uint8_t values[] = { 0, true, 2, 128, 0xFF };
Fred Shih37f05ef2014-07-16 18:38:08 -07001343
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001344 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001345 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001346 reinterpret_cast<size_t>(obj->Get()),
1347 static_cast<size_t>(values[i]),
1348 StubTest::GetEntrypoint(self, kQuickSet8Instance),
1349 self,
1350 referrer);
1351
Mathieu Chartierc7853442015-03-27 14:35:38 -07001352 uint8_t res = f->GetBoolean(obj->Get());
Fred Shih37f05ef2014-07-16 18:38:08 -07001353 EXPECT_EQ(values[i], res) << "Iteration " << i;
1354
Mathieu Chartierc7853442015-03-27 14:35:38 -07001355 f->SetBoolean<false>(obj->Get(), res);
Fred Shih37f05ef2014-07-16 18:38:08 -07001356
Mathieu Chartierc7853442015-03-27 14:35:38 -07001357 size_t res2 = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001358 reinterpret_cast<size_t>(obj->Get()),
1359 0U,
1360 StubTest::GetEntrypoint(self, kQuickGetBooleanInstance),
1361 self,
1362 referrer);
1363 EXPECT_EQ(res, static_cast<uint8_t>(res2));
1364 }
1365#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001366 UNUSED(obj, f, self, referrer, test);
Fred Shih37f05ef2014-07-16 18:38:08 -07001367 LOG(INFO) << "Skipping set_boolean_instance as I don't know how to do that on " << kRuntimeISA;
1368 // Force-print to std::cout so it's also outside the logcat.
1369 std::cout << "Skipping set_boolean_instance as I don't know how to do that on " << kRuntimeISA << std::endl;
1370#endif
1371}
Mathieu Chartierc7853442015-03-27 14:35:38 -07001372static void GetSetByteInstance(Handle<mirror::Object>* obj, ArtField* f,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001373 Thread* self, ArtMethod* referrer, StubTest* test)
Mathieu Chartier90443472015-07-16 20:32:27 -07001374 SHARED_REQUIRES(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001375#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1376 (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001377 int8_t values[] = { -128, -64, 0, 64, 127 };
Fred Shih37f05ef2014-07-16 18:38:08 -07001378
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001379 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001380 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001381 reinterpret_cast<size_t>(obj->Get()),
1382 static_cast<size_t>(values[i]),
1383 StubTest::GetEntrypoint(self, kQuickSet8Instance),
1384 self,
1385 referrer);
1386
Mathieu Chartierc7853442015-03-27 14:35:38 -07001387 int8_t res = f->GetByte(obj->Get());
Fred Shih37f05ef2014-07-16 18:38:08 -07001388 EXPECT_EQ(res, values[i]) << "Iteration " << i;
Mathieu Chartierc7853442015-03-27 14:35:38 -07001389 f->SetByte<false>(obj->Get(), ++res);
Fred Shih37f05ef2014-07-16 18:38:08 -07001390
Mathieu Chartierc7853442015-03-27 14:35:38 -07001391 size_t res2 = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001392 reinterpret_cast<size_t>(obj->Get()),
1393 0U,
1394 StubTest::GetEntrypoint(self, kQuickGetByteInstance),
1395 self,
1396 referrer);
1397 EXPECT_EQ(res, static_cast<int8_t>(res2));
1398 }
1399#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001400 UNUSED(obj, f, self, referrer, test);
Fred Shih37f05ef2014-07-16 18:38:08 -07001401 LOG(INFO) << "Skipping set_byte_instance as I don't know how to do that on " << kRuntimeISA;
1402 // Force-print to std::cout so it's also outside the logcat.
1403 std::cout << "Skipping set_byte_instance as I don't know how to do that on " << kRuntimeISA << std::endl;
1404#endif
1405}
1406
Mathieu Chartiere401d142015-04-22 13:56:20 -07001407static void GetSetCharStatic(ArtField* f, Thread* self, ArtMethod* referrer,
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001408 StubTest* test)
Mathieu Chartier90443472015-07-16 20:32:27 -07001409 SHARED_REQUIRES(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001410#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1411 (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001412 uint16_t values[] = { 0, 1, 2, 255, 32768, 0xFFFF };
Fred Shih37f05ef2014-07-16 18:38:08 -07001413
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001414 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001415 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001416 static_cast<size_t>(values[i]),
1417 0U,
1418 StubTest::GetEntrypoint(self, kQuickSet16Static),
1419 self,
1420 referrer);
1421
Mathieu Chartierc7853442015-03-27 14:35:38 -07001422 size_t res = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001423 0U, 0U,
1424 StubTest::GetEntrypoint(self, kQuickGetCharStatic),
1425 self,
1426 referrer);
1427
1428 EXPECT_EQ(values[i], static_cast<uint16_t>(res)) << "Iteration " << i;
1429 }
1430#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001431 UNUSED(f, self, referrer, test);
Fred Shih37f05ef2014-07-16 18:38:08 -07001432 LOG(INFO) << "Skipping set_char_static as I don't know how to do that on " << kRuntimeISA;
1433 // Force-print to std::cout so it's also outside the logcat.
1434 std::cout << "Skipping set_char_static as I don't know how to do that on " << kRuntimeISA << std::endl;
1435#endif
1436}
Mathieu Chartierc7853442015-03-27 14:35:38 -07001437static void GetSetShortStatic(ArtField* f, Thread* self,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001438 ArtMethod* referrer, StubTest* test)
Mathieu Chartier90443472015-07-16 20:32:27 -07001439 SHARED_REQUIRES(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001440#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1441 (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001442 int16_t values[] = { -0x7FFF, -32768, 0, 255, 32767, 0x7FFE };
Fred Shih37f05ef2014-07-16 18:38:08 -07001443
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001444 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001445 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001446 static_cast<size_t>(values[i]),
1447 0U,
1448 StubTest::GetEntrypoint(self, kQuickSet16Static),
1449 self,
1450 referrer);
1451
Mathieu Chartierc7853442015-03-27 14:35:38 -07001452 size_t res = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001453 0U, 0U,
1454 StubTest::GetEntrypoint(self, kQuickGetShortStatic),
1455 self,
1456 referrer);
1457
1458 EXPECT_EQ(static_cast<int16_t>(res), values[i]) << "Iteration " << i;
1459 }
1460#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001461 UNUSED(f, self, referrer, test);
Fred Shih37f05ef2014-07-16 18:38:08 -07001462 LOG(INFO) << "Skipping set_short_static as I don't know how to do that on " << kRuntimeISA;
1463 // Force-print to std::cout so it's also outside the logcat.
1464 std::cout << "Skipping set_short_static as I don't know how to do that on " << kRuntimeISA << std::endl;
1465#endif
1466}
1467
Mathieu Chartierc7853442015-03-27 14:35:38 -07001468static void GetSetCharInstance(Handle<mirror::Object>* obj, ArtField* f,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001469 Thread* self, ArtMethod* referrer, StubTest* test)
Mathieu Chartier90443472015-07-16 20:32:27 -07001470 SHARED_REQUIRES(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001471#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1472 (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001473 uint16_t values[] = { 0, 1, 2, 255, 32768, 0xFFFF };
Fred Shih37f05ef2014-07-16 18:38:08 -07001474
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001475 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001476 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001477 reinterpret_cast<size_t>(obj->Get()),
1478 static_cast<size_t>(values[i]),
1479 StubTest::GetEntrypoint(self, kQuickSet16Instance),
1480 self,
1481 referrer);
1482
Mathieu Chartierc7853442015-03-27 14:35:38 -07001483 uint16_t res = f->GetChar(obj->Get());
Fred Shih37f05ef2014-07-16 18:38:08 -07001484 EXPECT_EQ(res, values[i]) << "Iteration " << i;
Mathieu Chartierc7853442015-03-27 14:35:38 -07001485 f->SetChar<false>(obj->Get(), ++res);
Fred Shih37f05ef2014-07-16 18:38:08 -07001486
Mathieu Chartierc7853442015-03-27 14:35:38 -07001487 size_t res2 = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001488 reinterpret_cast<size_t>(obj->Get()),
1489 0U,
1490 StubTest::GetEntrypoint(self, kQuickGetCharInstance),
1491 self,
1492 referrer);
1493 EXPECT_EQ(res, static_cast<uint16_t>(res2));
1494 }
1495#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001496 UNUSED(obj, f, self, referrer, test);
Fred Shih37f05ef2014-07-16 18:38:08 -07001497 LOG(INFO) << "Skipping set_char_instance as I don't know how to do that on " << kRuntimeISA;
1498 // Force-print to std::cout so it's also outside the logcat.
1499 std::cout << "Skipping set_char_instance as I don't know how to do that on " << kRuntimeISA << std::endl;
1500#endif
1501}
Mathieu Chartierc7853442015-03-27 14:35:38 -07001502static void GetSetShortInstance(Handle<mirror::Object>* obj, ArtField* f,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001503 Thread* self, ArtMethod* referrer, StubTest* test)
Mathieu Chartier90443472015-07-16 20:32:27 -07001504 SHARED_REQUIRES(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001505#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1506 (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001507 int16_t values[] = { -0x7FFF, -32768, 0, 255, 32767, 0x7FFE };
Fred Shih37f05ef2014-07-16 18:38:08 -07001508
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001509 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001510 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001511 reinterpret_cast<size_t>(obj->Get()),
1512 static_cast<size_t>(values[i]),
1513 StubTest::GetEntrypoint(self, kQuickSet16Instance),
1514 self,
1515 referrer);
1516
Mathieu Chartierc7853442015-03-27 14:35:38 -07001517 int16_t res = f->GetShort(obj->Get());
Fred Shih37f05ef2014-07-16 18:38:08 -07001518 EXPECT_EQ(res, values[i]) << "Iteration " << i;
Mathieu Chartierc7853442015-03-27 14:35:38 -07001519 f->SetShort<false>(obj->Get(), ++res);
Fred Shih37f05ef2014-07-16 18:38:08 -07001520
Mathieu Chartierc7853442015-03-27 14:35:38 -07001521 size_t res2 = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001522 reinterpret_cast<size_t>(obj->Get()),
1523 0U,
1524 StubTest::GetEntrypoint(self, kQuickGetShortInstance),
1525 self,
1526 referrer);
1527 EXPECT_EQ(res, static_cast<int16_t>(res2));
1528 }
1529#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001530 UNUSED(obj, f, self, referrer, test);
Fred Shih37f05ef2014-07-16 18:38:08 -07001531 LOG(INFO) << "Skipping set_short_instance as I don't know how to do that on " << kRuntimeISA;
1532 // Force-print to std::cout so it's also outside the logcat.
1533 std::cout << "Skipping set_short_instance as I don't know how to do that on " << kRuntimeISA << std::endl;
1534#endif
1535}
1536
Mathieu Chartiere401d142015-04-22 13:56:20 -07001537static void GetSet32Static(ArtField* f, Thread* self, ArtMethod* referrer,
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001538 StubTest* test)
Mathieu Chartier90443472015-07-16 20:32:27 -07001539 SHARED_REQUIRES(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001540#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1541 (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001542 uint32_t values[] = { 0, 1, 2, 255, 32768, 1000000, 0xFFFFFFFF };
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001543
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001544 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001545 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001546 static_cast<size_t>(values[i]),
1547 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001548 StubTest::GetEntrypoint(self, kQuickSet32Static),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001549 self,
1550 referrer);
1551
Mathieu Chartierc7853442015-03-27 14:35:38 -07001552 size_t res = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001553 0U, 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001554 StubTest::GetEntrypoint(self, kQuickGet32Static),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001555 self,
1556 referrer);
1557
Goran Jakovljevic04568812015-04-23 15:27:23 +02001558#if defined(__mips__) && defined(__LP64__)
1559 EXPECT_EQ(static_cast<uint32_t>(res), values[i]) << "Iteration " << i;
1560#else
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001561 EXPECT_EQ(res, values[i]) << "Iteration " << i;
Goran Jakovljevic04568812015-04-23 15:27:23 +02001562#endif
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001563 }
1564#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001565 UNUSED(f, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001566 LOG(INFO) << "Skipping set32static as I don't know how to do that on " << kRuntimeISA;
1567 // Force-print to std::cout so it's also outside the logcat.
1568 std::cout << "Skipping set32static as I don't know how to do that on " << kRuntimeISA << std::endl;
1569#endif
1570}
1571
1572
Mathieu Chartierc7853442015-03-27 14:35:38 -07001573static void GetSet32Instance(Handle<mirror::Object>* obj, ArtField* f,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001574 Thread* self, ArtMethod* referrer, StubTest* test)
Mathieu Chartier90443472015-07-16 20:32:27 -07001575 SHARED_REQUIRES(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001576#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1577 (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001578 uint32_t values[] = { 0, 1, 2, 255, 32768, 1000000, 0xFFFFFFFF };
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001579
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001580 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001581 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001582 reinterpret_cast<size_t>(obj->Get()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001583 static_cast<size_t>(values[i]),
Andreas Gampe29b38412014-08-13 00:15:43 -07001584 StubTest::GetEntrypoint(self, kQuickSet32Instance),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001585 self,
1586 referrer);
1587
Mathieu Chartierc7853442015-03-27 14:35:38 -07001588 int32_t res = f->GetInt(obj->Get());
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001589 EXPECT_EQ(res, static_cast<int32_t>(values[i])) << "Iteration " << i;
1590
1591 res++;
Mathieu Chartierc7853442015-03-27 14:35:38 -07001592 f->SetInt<false>(obj->Get(), res);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001593
Mathieu Chartierc7853442015-03-27 14:35:38 -07001594 size_t res2 = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001595 reinterpret_cast<size_t>(obj->Get()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001596 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001597 StubTest::GetEntrypoint(self, kQuickGet32Instance),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001598 self,
1599 referrer);
1600 EXPECT_EQ(res, static_cast<int32_t>(res2));
1601 }
1602#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001603 UNUSED(obj, f, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001604 LOG(INFO) << "Skipping set32instance as I don't know how to do that on " << kRuntimeISA;
1605 // Force-print to std::cout so it's also outside the logcat.
1606 std::cout << "Skipping set32instance as I don't know how to do that on " << kRuntimeISA << std::endl;
1607#endif
1608}
1609
1610
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001611#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1612 (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001613
1614static void set_and_check_static(uint32_t f_idx, mirror::Object* val, Thread* self,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001615 ArtMethod* referrer, StubTest* test)
Mathieu Chartier90443472015-07-16 20:32:27 -07001616 SHARED_REQUIRES(Locks::mutator_lock_) {
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001617 test->Invoke3WithReferrer(static_cast<size_t>(f_idx),
1618 reinterpret_cast<size_t>(val),
1619 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001620 StubTest::GetEntrypoint(self, kQuickSetObjStatic),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001621 self,
1622 referrer);
1623
1624 size_t res = test->Invoke3WithReferrer(static_cast<size_t>(f_idx),
1625 0U, 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001626 StubTest::GetEntrypoint(self, kQuickGetObjStatic),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001627 self,
1628 referrer);
1629
1630 EXPECT_EQ(res, reinterpret_cast<size_t>(val)) << "Value " << val;
1631}
1632#endif
1633
Mathieu Chartiere401d142015-04-22 13:56:20 -07001634static void GetSetObjStatic(ArtField* f, Thread* self, ArtMethod* referrer,
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001635 StubTest* test)
Mathieu Chartier90443472015-07-16 20:32:27 -07001636 SHARED_REQUIRES(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001637#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1638 (defined(__x86_64__) && !defined(__APPLE__))
Mathieu Chartierc7853442015-03-27 14:35:38 -07001639 set_and_check_static(f->GetDexFieldIndex(), nullptr, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001640
1641 // Allocate a string object for simplicity.
1642 mirror::String* str = mirror::String::AllocFromModifiedUtf8(self, "Test");
Mathieu Chartierc7853442015-03-27 14:35:38 -07001643 set_and_check_static(f->GetDexFieldIndex(), str, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001644
Mathieu Chartierc7853442015-03-27 14:35:38 -07001645 set_and_check_static(f->GetDexFieldIndex(), nullptr, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001646#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001647 UNUSED(f, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001648 LOG(INFO) << "Skipping setObjstatic as I don't know how to do that on " << kRuntimeISA;
1649 // Force-print to std::cout so it's also outside the logcat.
1650 std::cout << "Skipping setObjstatic as I don't know how to do that on " << kRuntimeISA << std::endl;
1651#endif
1652}
1653
1654
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001655#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1656 (defined(__x86_64__) && !defined(__APPLE__))
Mathieu Chartierc7853442015-03-27 14:35:38 -07001657static void set_and_check_instance(ArtField* f, mirror::Object* trg,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001658 mirror::Object* val, Thread* self, ArtMethod* referrer,
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001659 StubTest* test)
Mathieu Chartier90443472015-07-16 20:32:27 -07001660 SHARED_REQUIRES(Locks::mutator_lock_) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001661 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001662 reinterpret_cast<size_t>(trg),
1663 reinterpret_cast<size_t>(val),
Andreas Gampe29b38412014-08-13 00:15:43 -07001664 StubTest::GetEntrypoint(self, kQuickSetObjInstance),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001665 self,
1666 referrer);
1667
Mathieu Chartierc7853442015-03-27 14:35:38 -07001668 size_t res = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001669 reinterpret_cast<size_t>(trg),
1670 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001671 StubTest::GetEntrypoint(self, kQuickGetObjInstance),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001672 self,
1673 referrer);
1674
1675 EXPECT_EQ(res, reinterpret_cast<size_t>(val)) << "Value " << val;
1676
Mathieu Chartierc7853442015-03-27 14:35:38 -07001677 EXPECT_EQ(val, f->GetObj(trg));
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001678}
1679#endif
1680
Mathieu Chartierc7853442015-03-27 14:35:38 -07001681static void GetSetObjInstance(Handle<mirror::Object>* obj, ArtField* f,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001682 Thread* self, ArtMethod* referrer, StubTest* test)
Mathieu Chartier90443472015-07-16 20:32:27 -07001683 SHARED_REQUIRES(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001684#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1685 (defined(__x86_64__) && !defined(__APPLE__))
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001686 set_and_check_instance(f, obj->Get(), nullptr, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001687
1688 // Allocate a string object for simplicity.
1689 mirror::String* str = mirror::String::AllocFromModifiedUtf8(self, "Test");
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001690 set_and_check_instance(f, obj->Get(), str, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001691
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001692 set_and_check_instance(f, obj->Get(), nullptr, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001693#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001694 UNUSED(obj, f, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001695 LOG(INFO) << "Skipping setObjinstance as I don't know how to do that on " << kRuntimeISA;
1696 // Force-print to std::cout so it's also outside the logcat.
1697 std::cout << "Skipping setObjinstance as I don't know how to do that on " << kRuntimeISA << std::endl;
1698#endif
1699}
1700
1701
Calin Juravle872ab3f2015-10-02 07:27:51 +01001702// TODO: Complete these tests for 32b architectures
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001703
Mathieu Chartiere401d142015-04-22 13:56:20 -07001704static void GetSet64Static(ArtField* f, Thread* self, ArtMethod* referrer,
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001705 StubTest* test)
Mathieu Chartier90443472015-07-16 20:32:27 -07001706 SHARED_REQUIRES(Locks::mutator_lock_) {
Calin Juravle6e399ac2015-10-02 23:56:06 +01001707#if (defined(__x86_64__) && !defined(__APPLE__)) || (defined(__mips__) && defined(__LP64__)) \
1708 || defined(__aarch64__)
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001709 uint64_t values[] = { 0, 1, 2, 255, 32768, 1000000, 0xFFFFFFFF, 0xFFFFFFFFFFFF };
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001710
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001711 for (size_t i = 0; i < arraysize(values); ++i) {
Calin Juravle6e399ac2015-10-02 23:56:06 +01001712 // 64 bit FieldSet stores the set value in the second register.
1713 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Calin Juravle24cc1b32015-10-06 11:46:58 +01001714 0U,
1715 values[i],
1716 StubTest::GetEntrypoint(self, kQuickSet64Static),
1717 self,
1718 referrer);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001719
Mathieu Chartierc7853442015-03-27 14:35:38 -07001720 size_t res = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001721 0U, 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001722 StubTest::GetEntrypoint(self, kQuickGet64Static),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001723 self,
1724 referrer);
1725
1726 EXPECT_EQ(res, values[i]) << "Iteration " << i;
1727 }
1728#else
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001729 UNUSED(f, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001730 LOG(INFO) << "Skipping set64static as I don't know how to do that on " << kRuntimeISA;
1731 // Force-print to std::cout so it's also outside the logcat.
1732 std::cout << "Skipping set64static as I don't know how to do that on " << kRuntimeISA << std::endl;
1733#endif
1734}
1735
1736
Mathieu Chartierc7853442015-03-27 14:35:38 -07001737static void GetSet64Instance(Handle<mirror::Object>* obj, ArtField* f,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001738 Thread* self, ArtMethod* referrer, StubTest* test)
Mathieu Chartier90443472015-07-16 20:32:27 -07001739 SHARED_REQUIRES(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001740#if (defined(__x86_64__) && !defined(__APPLE__)) || (defined(__mips__) && defined(__LP64__)) || \
1741 defined(__aarch64__)
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001742 uint64_t values[] = { 0, 1, 2, 255, 32768, 1000000, 0xFFFFFFFF, 0xFFFFFFFFFFFF };
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001743
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001744 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001745 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001746 reinterpret_cast<size_t>(obj->Get()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001747 static_cast<size_t>(values[i]),
Andreas Gampe29b38412014-08-13 00:15:43 -07001748 StubTest::GetEntrypoint(self, kQuickSet64Instance),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001749 self,
1750 referrer);
1751
Mathieu Chartierc7853442015-03-27 14:35:38 -07001752 int64_t res = f->GetLong(obj->Get());
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001753 EXPECT_EQ(res, static_cast<int64_t>(values[i])) << "Iteration " << i;
1754
1755 res++;
Mathieu Chartierc7853442015-03-27 14:35:38 -07001756 f->SetLong<false>(obj->Get(), res);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001757
Mathieu Chartierc7853442015-03-27 14:35:38 -07001758 size_t res2 = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001759 reinterpret_cast<size_t>(obj->Get()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001760 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001761 StubTest::GetEntrypoint(self, kQuickGet64Instance),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001762 self,
1763 referrer);
1764 EXPECT_EQ(res, static_cast<int64_t>(res2));
1765 }
1766#else
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001767 UNUSED(obj, f, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001768 LOG(INFO) << "Skipping set64instance as I don't know how to do that on " << kRuntimeISA;
1769 // Force-print to std::cout so it's also outside the logcat.
1770 std::cout << "Skipping set64instance as I don't know how to do that on " << kRuntimeISA << std::endl;
1771#endif
1772}
1773
1774static void TestFields(Thread* self, StubTest* test, Primitive::Type test_type) {
1775 // garbage is created during ClassLinker::Init
1776
1777 JNIEnv* env = Thread::Current()->GetJniEnv();
1778 jclass jc = env->FindClass("AllFields");
Mathieu Chartier2cebb242015-04-21 16:50:40 -07001779 CHECK(jc != nullptr);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001780 jobject o = env->AllocObject(jc);
Mathieu Chartier2cebb242015-04-21 16:50:40 -07001781 CHECK(o != nullptr);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001782
1783 ScopedObjectAccess soa(self);
Mathieu Chartiere401d142015-04-22 13:56:20 -07001784 StackHandleScope<3> hs(self);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001785 Handle<mirror::Object> obj(hs.NewHandle(soa.Decode<mirror::Object*>(o)));
1786 Handle<mirror::Class> c(hs.NewHandle(obj->GetClass()));
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001787 // Need a method as a referrer
Mathieu Chartiere401d142015-04-22 13:56:20 -07001788 ArtMethod* m = c->GetDirectMethod(0, sizeof(void*));
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001789
1790 // Play with it...
1791
1792 // Static fields.
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001793 for (ArtField& f : c->GetSFields()) {
1794 Primitive::Type type = f.GetTypeAsPrimitiveType();
Mathieu Chartierc7853442015-03-27 14:35:38 -07001795 if (test_type != type) {
1796 continue;
1797 }
1798 switch (type) {
1799 case Primitive::Type::kPrimBoolean:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001800 GetSetBooleanStatic(&f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001801 break;
1802 case Primitive::Type::kPrimByte:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001803 GetSetByteStatic(&f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001804 break;
1805 case Primitive::Type::kPrimChar:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001806 GetSetCharStatic(&f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001807 break;
1808 case Primitive::Type::kPrimShort:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001809 GetSetShortStatic(&f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001810 break;
1811 case Primitive::Type::kPrimInt:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001812 GetSet32Static(&f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001813 break;
1814 case Primitive::Type::kPrimLong:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001815 GetSet64Static(&f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001816 break;
1817 case Primitive::Type::kPrimNot:
1818 // Don't try array.
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001819 if (f.GetTypeDescriptor()[0] != '[') {
1820 GetSetObjStatic(&f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001821 }
1822 break;
1823 default:
1824 break; // Skip.
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001825 }
1826 }
1827
1828 // Instance fields.
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001829 for (ArtField& f : c->GetIFields()) {
1830 Primitive::Type type = f.GetTypeAsPrimitiveType();
Mathieu Chartierc7853442015-03-27 14:35:38 -07001831 if (test_type != type) {
1832 continue;
1833 }
1834 switch (type) {
1835 case Primitive::Type::kPrimBoolean:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001836 GetSetBooleanInstance(&obj, &f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001837 break;
1838 case Primitive::Type::kPrimByte:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001839 GetSetByteInstance(&obj, &f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001840 break;
1841 case Primitive::Type::kPrimChar:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001842 GetSetCharInstance(&obj, &f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001843 break;
1844 case Primitive::Type::kPrimShort:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001845 GetSetShortInstance(&obj, &f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001846 break;
1847 case Primitive::Type::kPrimInt:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001848 GetSet32Instance(&obj, &f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001849 break;
1850 case Primitive::Type::kPrimLong:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001851 GetSet64Instance(&obj, &f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001852 break;
1853 case Primitive::Type::kPrimNot:
1854 // Don't try array.
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001855 if (f.GetTypeDescriptor()[0] != '[') {
1856 GetSetObjInstance(&obj, &f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001857 }
1858 break;
1859 default:
1860 break; // Skip.
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001861 }
1862 }
1863
1864 // TODO: Deallocate things.
1865}
1866
Fred Shih37f05ef2014-07-16 18:38:08 -07001867TEST_F(StubTest, Fields8) {
Fred Shih37f05ef2014-07-16 18:38:08 -07001868 Thread* self = Thread::Current();
1869
1870 self->TransitionFromSuspendedToRunnable();
1871 LoadDex("AllFields");
1872 bool started = runtime_->Start();
1873 CHECK(started);
1874
1875 TestFields(self, this, Primitive::Type::kPrimBoolean);
1876 TestFields(self, this, Primitive::Type::kPrimByte);
1877}
1878
1879TEST_F(StubTest, Fields16) {
Fred Shih37f05ef2014-07-16 18:38:08 -07001880 Thread* self = Thread::Current();
1881
1882 self->TransitionFromSuspendedToRunnable();
1883 LoadDex("AllFields");
1884 bool started = runtime_->Start();
1885 CHECK(started);
1886
1887 TestFields(self, this, Primitive::Type::kPrimChar);
1888 TestFields(self, this, Primitive::Type::kPrimShort);
1889}
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001890
1891TEST_F(StubTest, Fields32) {
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001892 Thread* self = Thread::Current();
1893
1894 self->TransitionFromSuspendedToRunnable();
1895 LoadDex("AllFields");
1896 bool started = runtime_->Start();
1897 CHECK(started);
1898
1899 TestFields(self, this, Primitive::Type::kPrimInt);
1900}
1901
1902TEST_F(StubTest, FieldsObj) {
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001903 Thread* self = Thread::Current();
1904
1905 self->TransitionFromSuspendedToRunnable();
1906 LoadDex("AllFields");
1907 bool started = runtime_->Start();
1908 CHECK(started);
1909
1910 TestFields(self, this, Primitive::Type::kPrimNot);
1911}
1912
1913TEST_F(StubTest, Fields64) {
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001914 Thread* self = Thread::Current();
1915
1916 self->TransitionFromSuspendedToRunnable();
1917 LoadDex("AllFields");
1918 bool started = runtime_->Start();
1919 CHECK(started);
1920
1921 TestFields(self, this, Primitive::Type::kPrimLong);
1922}
1923
Andreas Gampe51f76352014-05-21 08:28:48 -07001924TEST_F(StubTest, IMT) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001925#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1926 (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe51f76352014-05-21 08:28:48 -07001927 Thread* self = Thread::Current();
1928
1929 ScopedObjectAccess soa(self);
1930 StackHandleScope<7> hs(self);
1931
1932 JNIEnv* env = Thread::Current()->GetJniEnv();
1933
1934 // ArrayList
1935
1936 // Load ArrayList and used methods (JNI).
1937 jclass arraylist_jclass = env->FindClass("java/util/ArrayList");
1938 ASSERT_NE(nullptr, arraylist_jclass);
1939 jmethodID arraylist_constructor = env->GetMethodID(arraylist_jclass, "<init>", "()V");
1940 ASSERT_NE(nullptr, arraylist_constructor);
Mathieu Chartiere401d142015-04-22 13:56:20 -07001941 jmethodID contains_jmethod = env->GetMethodID(
1942 arraylist_jclass, "contains", "(Ljava/lang/Object;)Z");
Andreas Gampe51f76352014-05-21 08:28:48 -07001943 ASSERT_NE(nullptr, contains_jmethod);
1944 jmethodID add_jmethod = env->GetMethodID(arraylist_jclass, "add", "(Ljava/lang/Object;)Z");
1945 ASSERT_NE(nullptr, add_jmethod);
1946
Mathieu Chartiere401d142015-04-22 13:56:20 -07001947 // Get representation.
1948 ArtMethod* contains_amethod = soa.DecodeMethod(contains_jmethod);
Andreas Gampe51f76352014-05-21 08:28:48 -07001949
1950 // Patch up ArrayList.contains.
Mathieu Chartiere401d142015-04-22 13:56:20 -07001951 if (contains_amethod->GetEntryPointFromQuickCompiledCode() == nullptr) {
1952 contains_amethod->SetEntryPointFromQuickCompiledCode(reinterpret_cast<void*>(
Andreas Gampe29b38412014-08-13 00:15:43 -07001953 StubTest::GetEntrypoint(self, kQuickQuickToInterpreterBridge)));
Andreas Gampe51f76352014-05-21 08:28:48 -07001954 }
1955
1956 // List
1957
1958 // Load List and used methods (JNI).
1959 jclass list_jclass = env->FindClass("java/util/List");
1960 ASSERT_NE(nullptr, list_jclass);
Mathieu Chartiere401d142015-04-22 13:56:20 -07001961 jmethodID inf_contains_jmethod = env->GetMethodID(
1962 list_jclass, "contains", "(Ljava/lang/Object;)Z");
Andreas Gampe51f76352014-05-21 08:28:48 -07001963 ASSERT_NE(nullptr, inf_contains_jmethod);
1964
1965 // Get mirror representation.
Mathieu Chartiere401d142015-04-22 13:56:20 -07001966 ArtMethod* inf_contains = soa.DecodeMethod(inf_contains_jmethod);
Andreas Gampe51f76352014-05-21 08:28:48 -07001967
1968 // Object
1969
1970 jclass obj_jclass = env->FindClass("java/lang/Object");
1971 ASSERT_NE(nullptr, obj_jclass);
1972 jmethodID obj_constructor = env->GetMethodID(obj_jclass, "<init>", "()V");
1973 ASSERT_NE(nullptr, obj_constructor);
1974
Andreas Gampe51f76352014-05-21 08:28:48 -07001975 // Create instances.
1976
1977 jobject jarray_list = env->NewObject(arraylist_jclass, arraylist_constructor);
1978 ASSERT_NE(nullptr, jarray_list);
1979 Handle<mirror::Object> array_list(hs.NewHandle(soa.Decode<mirror::Object*>(jarray_list)));
1980
1981 jobject jobj = env->NewObject(obj_jclass, obj_constructor);
1982 ASSERT_NE(nullptr, jobj);
1983 Handle<mirror::Object> obj(hs.NewHandle(soa.Decode<mirror::Object*>(jobj)));
1984
Andreas Gampe1a7e2922014-05-21 15:37:53 -07001985 // Invocation tests.
1986
1987 // 1. imt_conflict
1988
1989 // Contains.
Andreas Gampe51f76352014-05-21 08:28:48 -07001990
1991 size_t result =
1992 Invoke3WithReferrerAndHidden(0U, reinterpret_cast<size_t>(array_list.Get()),
1993 reinterpret_cast<size_t>(obj.Get()),
Andreas Gampe29b38412014-08-13 00:15:43 -07001994 StubTest::GetEntrypoint(self, kQuickQuickImtConflictTrampoline),
Mathieu Chartiere401d142015-04-22 13:56:20 -07001995 self, contains_amethod,
1996 static_cast<size_t>(inf_contains->GetDexMethodIndex()));
Andreas Gampe51f76352014-05-21 08:28:48 -07001997
1998 ASSERT_FALSE(self->IsExceptionPending());
1999 EXPECT_EQ(static_cast<size_t>(JNI_FALSE), result);
2000
2001 // Add object.
2002
2003 env->CallBooleanMethod(jarray_list, add_jmethod, jobj);
2004
Nicolas Geoffray14691c52015-03-05 10:40:17 +00002005 ASSERT_FALSE(self->IsExceptionPending()) << PrettyTypeOf(self->GetException());
Andreas Gampe51f76352014-05-21 08:28:48 -07002006
Andreas Gampe1a7e2922014-05-21 15:37:53 -07002007 // Contains.
Andreas Gampe51f76352014-05-21 08:28:48 -07002008
Mathieu Chartiere401d142015-04-22 13:56:20 -07002009 result = Invoke3WithReferrerAndHidden(
2010 0U, reinterpret_cast<size_t>(array_list.Get()), reinterpret_cast<size_t>(obj.Get()),
2011 StubTest::GetEntrypoint(self, kQuickQuickImtConflictTrampoline), self, contains_amethod,
2012 static_cast<size_t>(inf_contains->GetDexMethodIndex()));
Andreas Gampe51f76352014-05-21 08:28:48 -07002013
2014 ASSERT_FALSE(self->IsExceptionPending());
2015 EXPECT_EQ(static_cast<size_t>(JNI_TRUE), result);
Andreas Gampe1a7e2922014-05-21 15:37:53 -07002016
2017 // 2. regular interface trampoline
2018
Mathieu Chartiere401d142015-04-22 13:56:20 -07002019 result = Invoke3WithReferrer(static_cast<size_t>(inf_contains->GetDexMethodIndex()),
Andreas Gampe1a7e2922014-05-21 15:37:53 -07002020 reinterpret_cast<size_t>(array_list.Get()),
2021 reinterpret_cast<size_t>(obj.Get()),
2022 StubTest::GetEntrypoint(self,
2023 kQuickInvokeInterfaceTrampolineWithAccessCheck),
Mathieu Chartiere401d142015-04-22 13:56:20 -07002024 self, contains_amethod);
Andreas Gampe1a7e2922014-05-21 15:37:53 -07002025
2026 ASSERT_FALSE(self->IsExceptionPending());
2027 EXPECT_EQ(static_cast<size_t>(JNI_TRUE), result);
2028
Mathieu Chartiere401d142015-04-22 13:56:20 -07002029 result = Invoke3WithReferrer(
2030 static_cast<size_t>(inf_contains->GetDexMethodIndex()),
2031 reinterpret_cast<size_t>(array_list.Get()), reinterpret_cast<size_t>(array_list.Get()),
2032 StubTest::GetEntrypoint(self, kQuickInvokeInterfaceTrampolineWithAccessCheck), self,
2033 contains_amethod);
Andreas Gampe1a7e2922014-05-21 15:37:53 -07002034
2035 ASSERT_FALSE(self->IsExceptionPending());
2036 EXPECT_EQ(static_cast<size_t>(JNI_FALSE), result);
Andreas Gampe51f76352014-05-21 08:28:48 -07002037#else
Andreas Gampe6aac3552014-06-09 14:55:53 -07002038 LOG(INFO) << "Skipping imt as I don't know how to do that on " << kRuntimeISA;
Andreas Gampe51f76352014-05-21 08:28:48 -07002039 // Force-print to std::cout so it's also outside the logcat.
Andreas Gampe6aac3552014-06-09 14:55:53 -07002040 std::cout << "Skipping imt as I don't know how to do that on " << kRuntimeISA << std::endl;
2041#endif
2042}
2043
Andreas Gampe6aac3552014-06-09 14:55:53 -07002044TEST_F(StubTest, StringIndexOf) {
2045#if defined(__arm__) || defined(__aarch64__)
2046 Thread* self = Thread::Current();
2047 ScopedObjectAccess soa(self);
2048 // garbage is created during ClassLinker::Init
2049
2050 // Create some strings
2051 // Use array so we can index into it and use a matrix for expected results
2052 // Setup: The first half is standard. The second half uses a non-zero offset.
2053 // TODO: Shared backing arrays.
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07002054 const char* c_str[] = { "", "a", "ba", "cba", "dcba", "edcba", "asdfghjkl" };
2055 static constexpr size_t kStringCount = arraysize(c_str);
2056 const char c_char[] = { 'a', 'b', 'c', 'd', 'e' };
2057 static constexpr size_t kCharCount = arraysize(c_char);
Andreas Gampe6aac3552014-06-09 14:55:53 -07002058
2059 StackHandleScope<kStringCount> hs(self);
2060 Handle<mirror::String> s[kStringCount];
2061
2062 for (size_t i = 0; i < kStringCount; ++i) {
2063 s[i] = hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), c_str[i]));
2064 }
2065
2066 // Matrix of expectations. First component is first parameter. Note we only check against the
2067 // sign, not the value. As we are testing random offsets, we need to compute this and need to
2068 // rely on String::CompareTo being correct.
2069 static constexpr size_t kMaxLen = 9;
2070 DCHECK_LE(strlen(c_str[kStringCount-1]), kMaxLen) << "Please fix the indexof test.";
2071
2072 // Last dimension: start, offset by 1.
2073 int32_t expected[kStringCount][kCharCount][kMaxLen + 3];
2074 for (size_t x = 0; x < kStringCount; ++x) {
2075 for (size_t y = 0; y < kCharCount; ++y) {
2076 for (size_t z = 0; z <= kMaxLen + 2; ++z) {
2077 expected[x][y][z] = s[x]->FastIndexOf(c_char[y], static_cast<int32_t>(z) - 1);
2078 }
2079 }
2080 }
2081
2082 // Play with it...
2083
2084 for (size_t x = 0; x < kStringCount; ++x) {
2085 for (size_t y = 0; y < kCharCount; ++y) {
2086 for (size_t z = 0; z <= kMaxLen + 2; ++z) {
2087 int32_t start = static_cast<int32_t>(z) - 1;
2088
2089 // Test string_compareto x y
2090 size_t result = Invoke3(reinterpret_cast<size_t>(s[x].Get()), c_char[y], start,
Andreas Gampe29b38412014-08-13 00:15:43 -07002091 StubTest::GetEntrypoint(self, kQuickIndexOf), self);
Andreas Gampe6aac3552014-06-09 14:55:53 -07002092
2093 EXPECT_FALSE(self->IsExceptionPending());
2094
2095 // The result is a 32b signed integer
2096 union {
2097 size_t r;
2098 int32_t i;
2099 } conv;
2100 conv.r = result;
2101
2102 EXPECT_EQ(expected[x][y][z], conv.i) << "Wrong result for " << c_str[x] << " / " <<
2103 c_char[y] << " @ " << start;
2104 }
2105 }
2106 }
2107
2108 // TODO: Deallocate things.
2109
2110 // Tests done.
2111#else
2112 LOG(INFO) << "Skipping indexof as I don't know how to do that on " << kRuntimeISA;
2113 // Force-print to std::cout so it's also outside the logcat.
2114 std::cout << "Skipping indexof as I don't know how to do that on " << kRuntimeISA << std::endl;
Andreas Gampe51f76352014-05-21 08:28:48 -07002115#endif
2116}
2117
Man Cao1aee9002015-07-14 22:31:42 -07002118TEST_F(StubTest, ReadBarrier) {
2119#if defined(ART_USE_READ_BARRIER) && (defined(__i386__) || defined(__arm__) || \
2120 defined(__aarch64__) || defined(__mips__) || (defined(__x86_64__) && !defined(__APPLE__)))
2121 Thread* self = Thread::Current();
2122
2123 const uintptr_t readBarrierSlow = StubTest::GetEntrypoint(self, kQuickReadBarrierSlow);
2124
2125 // Create an object
2126 ScopedObjectAccess soa(self);
2127 // garbage is created during ClassLinker::Init
2128
2129 StackHandleScope<2> hs(soa.Self());
2130 Handle<mirror::Class> c(
2131 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "Ljava/lang/Object;")));
2132
2133 // Build an object instance
2134 Handle<mirror::Object> obj(hs.NewHandle(c->AllocObject(soa.Self())));
2135
2136 EXPECT_FALSE(self->IsExceptionPending());
2137
2138 size_t result = Invoke3(0U, reinterpret_cast<size_t>(obj.Get()),
2139 mirror::Object::ClassOffset().SizeValue(), readBarrierSlow, self);
2140
2141 EXPECT_FALSE(self->IsExceptionPending());
2142 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
2143 mirror::Class* klass = reinterpret_cast<mirror::Class*>(result);
2144 EXPECT_EQ(klass, obj->GetClass());
2145
2146 // Tests done.
2147#else
2148 LOG(INFO) << "Skipping read_barrier_slow";
2149 // Force-print to std::cout so it's also outside the logcat.
2150 std::cout << "Skipping read_barrier_slow" << std::endl;
2151#endif
2152}
2153
Roland Levillain0d5a2812015-11-13 10:07:31 +00002154TEST_F(StubTest, ReadBarrierForRoot) {
2155#if defined(ART_USE_READ_BARRIER) && (defined(__i386__) || defined(__arm__) || \
2156 defined(__aarch64__) || defined(__mips__) || (defined(__x86_64__) && !defined(__APPLE__)))
2157 Thread* self = Thread::Current();
2158
2159 const uintptr_t readBarrierForRootSlow =
2160 StubTest::GetEntrypoint(self, kQuickReadBarrierForRootSlow);
2161
2162 // Create an object
2163 ScopedObjectAccess soa(self);
2164 // garbage is created during ClassLinker::Init
2165
2166 StackHandleScope<1> hs(soa.Self());
2167
2168 Handle<mirror::String> obj(
2169 hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), "hello, world!")));
2170
2171 EXPECT_FALSE(self->IsExceptionPending());
2172
2173 GcRoot<mirror::Class>& root = mirror::String::java_lang_String_;
2174 size_t result = Invoke3(reinterpret_cast<size_t>(&root), 0U, 0U, readBarrierForRootSlow, self);
2175
2176 EXPECT_FALSE(self->IsExceptionPending());
2177 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
2178 mirror::Class* klass = reinterpret_cast<mirror::Class*>(result);
2179 EXPECT_EQ(klass, obj->GetClass());
2180
2181 // Tests done.
2182#else
2183 LOG(INFO) << "Skipping read_barrier_for_root_slow";
2184 // Force-print to std::cout so it's also outside the logcat.
2185 std::cout << "Skipping read_barrier_for_root_slow" << std::endl;
2186#endif
2187}
2188
Andreas Gampe525cde22014-04-22 15:44:50 -07002189} // namespace art