blob: c3a5ce34297f3151fe17939aeaac2a8c1cc28daa [file] [log] [blame]
Andreas Gampe525cde22014-04-22 15:44:50 -07001/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
Ian Rogerse63db272014-07-15 15:36:11 -070017#include <cstdio>
18
Mathieu Chartierc7853442015-03-27 14:35:38 -070019#include "art_field-inl.h"
Mathieu Chartiere401d142015-04-22 13:56:20 -070020#include "art_method-inl.h"
Vladimir Marko3481ba22015-04-13 12:22:36 +010021#include "class_linker-inl.h"
Andreas Gampe525cde22014-04-22 15:44:50 -070022#include "common_runtime_test.h"
Andreas Gampe29b38412014-08-13 00:15:43 -070023#include "entrypoints/quick/quick_entrypoints_enum.h"
Andreas Gampe51f76352014-05-21 08:28:48 -070024#include "mirror/class-inl.h"
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -070025#include "mirror/string-inl.h"
Ian Rogerse63db272014-07-15 15:36:11 -070026#include "scoped_thread_state_change.h"
Andreas Gampe525cde22014-04-22 15:44:50 -070027
28namespace art {
29
30
31class StubTest : public CommonRuntimeTest {
32 protected:
33 // We need callee-save methods set up in the Runtime for exceptions.
34 void SetUp() OVERRIDE {
35 // Do the normal setup.
36 CommonRuntimeTest::SetUp();
37
38 {
39 // Create callee-save methods
40 ScopedObjectAccess soa(Thread::Current());
Vladimir Marko7624d252014-05-02 14:40:15 +010041 runtime_->SetInstructionSet(kRuntimeISA);
Andreas Gampe525cde22014-04-22 15:44:50 -070042 for (int i = 0; i < Runtime::kLastCalleeSaveType; i++) {
43 Runtime::CalleeSaveType type = Runtime::CalleeSaveType(i);
44 if (!runtime_->HasCalleeSaveMethod(type)) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -070045 runtime_->SetCalleeSaveMethod(runtime_->CreateCalleeSaveMethod(), type);
Andreas Gampe525cde22014-04-22 15:44:50 -070046 }
47 }
48 }
49 }
50
Ian Rogerse63db272014-07-15 15:36:11 -070051 void SetUpRuntimeOptions(RuntimeOptions *options) OVERRIDE {
Andreas Gampe00c1e6d2014-04-25 15:47:13 -070052 // Use a smaller heap
53 for (std::pair<std::string, const void*>& pair : *options) {
54 if (pair.first.find("-Xmx") == 0) {
55 pair.first = "-Xmx4M"; // Smallest we can go.
56 }
57 }
Andreas Gampe51f76352014-05-21 08:28:48 -070058 options->push_back(std::make_pair("-Xint", nullptr));
Andreas Gampe00c1e6d2014-04-25 15:47:13 -070059 }
Andreas Gampe525cde22014-04-22 15:44:50 -070060
Mathieu Chartier119c6bd2014-05-09 14:11:47 -070061 // Helper function needed since TEST_F makes a new class.
62 Thread::tls_ptr_sized_values* GetTlsPtr(Thread* self) {
63 return &self->tlsPtr_;
64 }
65
Andreas Gampe4fc046e2014-05-06 16:56:39 -070066 public:
Andreas Gampe525cde22014-04-22 15:44:50 -070067 size_t Invoke3(size_t arg0, size_t arg1, size_t arg2, uintptr_t code, Thread* self) {
Andreas Gampe6cf80102014-05-19 11:32:41 -070068 return Invoke3WithReferrer(arg0, arg1, arg2, code, self, nullptr);
Andreas Gampe525cde22014-04-22 15:44:50 -070069 }
Andreas Gampe6e4e59c2014-05-05 20:11:02 -070070
Andreas Gampe6e4e59c2014-05-05 20:11:02 -070071 // TODO: Set up a frame according to referrer's specs.
72 size_t Invoke3WithReferrer(size_t arg0, size_t arg1, size_t arg2, uintptr_t code, Thread* self,
Mathieu Chartiere401d142015-04-22 13:56:20 -070073 ArtMethod* referrer) {
Andreas Gampe6e4e59c2014-05-05 20:11:02 -070074 // Push a transition back into managed code onto the linked list in thread.
75 ManagedStack fragment;
76 self->PushManagedStackFragment(&fragment);
77
78 size_t result;
Andreas Gampe6cf80102014-05-19 11:32:41 -070079 size_t fpr_result = 0;
Andreas Gampe6e4e59c2014-05-05 20:11:02 -070080#if defined(__i386__)
81 // TODO: Set the thread?
82 __asm__ __volatile__(
Ian Rogersc5f17732014-06-05 20:48:42 -070083 "subl $12, %%esp\n\t" // Align stack.
84 "pushl %[referrer]\n\t" // Store referrer.
Andreas Gampe6e4e59c2014-05-05 20:11:02 -070085 "call *%%edi\n\t" // Call the stub
Ian Rogersc5f17732014-06-05 20:48:42 -070086 "addl $16, %%esp" // Pop referrer
Andreas Gampe6e4e59c2014-05-05 20:11:02 -070087 : "=a" (result)
88 // Use the result from eax
Andreas Gampe2f6e3512014-06-07 01:32:33 -070089 : "a"(arg0), "c"(arg1), "d"(arg2), "D"(code), [referrer]"r"(referrer)
90 // This places code into edi, arg0 into eax, arg1 into ecx, and arg2 into edx
91 : "memory"); // clobber.
Andreas Gampe6e4e59c2014-05-05 20:11:02 -070092 // TODO: Should we clobber the other registers? EBX gets clobbered by some of the stubs,
93 // but compilation fails when declaring that.
94#elif defined(__arm__)
95 __asm__ __volatile__(
96 "push {r1-r12, lr}\n\t" // Save state, 13*4B = 52B
97 ".cfi_adjust_cfa_offset 52\n\t"
98 "push {r9}\n\t"
99 ".cfi_adjust_cfa_offset 4\n\t"
100 "mov r9, %[referrer]\n\n"
101 "str r9, [sp, #-8]!\n\t" // Push referrer, +8B padding so 16B aligned
102 ".cfi_adjust_cfa_offset 8\n\t"
103 "ldr r9, [sp, #8]\n\t"
104
105 // Push everything on the stack, so we don't rely on the order. What a mess. :-(
106 "sub sp, sp, #20\n\t"
107 "str %[arg0], [sp]\n\t"
108 "str %[arg1], [sp, #4]\n\t"
109 "str %[arg2], [sp, #8]\n\t"
110 "str %[code], [sp, #12]\n\t"
111 "str %[self], [sp, #16]\n\t"
112 "ldr r0, [sp]\n\t"
113 "ldr r1, [sp, #4]\n\t"
114 "ldr r2, [sp, #8]\n\t"
115 "ldr r3, [sp, #12]\n\t"
116 "ldr r9, [sp, #16]\n\t"
117 "add sp, sp, #20\n\t"
118
119 "blx r3\n\t" // Call the stub
Mathieu Chartier2cebb242015-04-21 16:50:40 -0700120 "add sp, sp, #12\n\t" // Pop null and padding
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700121 ".cfi_adjust_cfa_offset -12\n\t"
122 "pop {r1-r12, lr}\n\t" // Restore state
123 ".cfi_adjust_cfa_offset -52\n\t"
124 "mov %[result], r0\n\t" // Save the result
125 : [result] "=r" (result)
126 // Use the result from r0
127 : [arg0] "r"(arg0), [arg1] "r"(arg1), [arg2] "r"(arg2), [code] "r"(code), [self] "r"(self),
128 [referrer] "r"(referrer)
Andreas Gampeff7b1142015-08-03 10:25:06 -0700129 : "r0", "memory"); // clobber.
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700130#elif defined(__aarch64__)
131 __asm__ __volatile__(
Andreas Gampef39b3782014-06-03 14:38:30 -0700132 // Spill x0-x7 which we say we don't clobber. May contain args.
Andreas Gampe6cf80102014-05-19 11:32:41 -0700133 "sub sp, sp, #64\n\t"
134 ".cfi_adjust_cfa_offset 64\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700135 "stp x0, x1, [sp]\n\t"
136 "stp x2, x3, [sp, #16]\n\t"
137 "stp x4, x5, [sp, #32]\n\t"
138 "stp x6, x7, [sp, #48]\n\t"
Andreas Gampe6cf80102014-05-19 11:32:41 -0700139
Andreas Gampef39b3782014-06-03 14:38:30 -0700140 "sub sp, sp, #16\n\t" // Reserve stack space, 16B aligned
141 ".cfi_adjust_cfa_offset 16\n\t"
142 "str %[referrer], [sp]\n\t" // referrer
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700143
144 // Push everything on the stack, so we don't rely on the order. What a mess. :-(
145 "sub sp, sp, #48\n\t"
Andreas Gampe6cf80102014-05-19 11:32:41 -0700146 ".cfi_adjust_cfa_offset 48\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700147 // All things are "r" constraints, so direct str/stp should work.
148 "stp %[arg0], %[arg1], [sp]\n\t"
149 "stp %[arg2], %[code], [sp, #16]\n\t"
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700150 "str %[self], [sp, #32]\n\t"
Andreas Gampe6cf80102014-05-19 11:32:41 -0700151
152 // Now we definitely have x0-x3 free, use it to garble d8 - d15
153 "movk x0, #0xfad0\n\t"
154 "movk x0, #0xebad, lsl #16\n\t"
155 "movk x0, #0xfad0, lsl #32\n\t"
156 "movk x0, #0xebad, lsl #48\n\t"
157 "fmov d8, x0\n\t"
158 "add x0, x0, 1\n\t"
159 "fmov d9, x0\n\t"
160 "add x0, x0, 1\n\t"
161 "fmov d10, x0\n\t"
162 "add x0, x0, 1\n\t"
163 "fmov d11, x0\n\t"
164 "add x0, x0, 1\n\t"
165 "fmov d12, x0\n\t"
166 "add x0, x0, 1\n\t"
167 "fmov d13, x0\n\t"
168 "add x0, x0, 1\n\t"
169 "fmov d14, x0\n\t"
170 "add x0, x0, 1\n\t"
171 "fmov d15, x0\n\t"
172
Andreas Gampef39b3782014-06-03 14:38:30 -0700173 // Load call params into the right registers.
174 "ldp x0, x1, [sp]\n\t"
175 "ldp x2, x3, [sp, #16]\n\t"
Serban Constantinescu9bd88b02015-04-22 16:24:46 +0100176 "ldr x19, [sp, #32]\n\t"
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700177 "add sp, sp, #48\n\t"
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700178 ".cfi_adjust_cfa_offset -48\n\t"
179
Andreas Gampe6cf80102014-05-19 11:32:41 -0700180
181 "blr x3\n\t" // Call the stub
Andreas Gampef39b3782014-06-03 14:38:30 -0700182 "mov x8, x0\n\t" // Store result
183 "add sp, sp, #16\n\t" // Drop the quick "frame"
184 ".cfi_adjust_cfa_offset -16\n\t"
Andreas Gampe6cf80102014-05-19 11:32:41 -0700185
186 // Test d8 - d15. We can use x1 and x2.
187 "movk x1, #0xfad0\n\t"
188 "movk x1, #0xebad, lsl #16\n\t"
189 "movk x1, #0xfad0, lsl #32\n\t"
190 "movk x1, #0xebad, lsl #48\n\t"
191 "fmov x2, d8\n\t"
192 "cmp x1, x2\n\t"
193 "b.ne 1f\n\t"
194 "add x1, x1, 1\n\t"
195
196 "fmov x2, d9\n\t"
197 "cmp x1, x2\n\t"
198 "b.ne 1f\n\t"
199 "add x1, x1, 1\n\t"
200
201 "fmov x2, d10\n\t"
202 "cmp x1, x2\n\t"
203 "b.ne 1f\n\t"
204 "add x1, x1, 1\n\t"
205
206 "fmov x2, d11\n\t"
207 "cmp x1, x2\n\t"
208 "b.ne 1f\n\t"
209 "add x1, x1, 1\n\t"
210
211 "fmov x2, d12\n\t"
212 "cmp x1, x2\n\t"
213 "b.ne 1f\n\t"
214 "add x1, x1, 1\n\t"
215
216 "fmov x2, d13\n\t"
217 "cmp x1, x2\n\t"
218 "b.ne 1f\n\t"
219 "add x1, x1, 1\n\t"
220
221 "fmov x2, d14\n\t"
222 "cmp x1, x2\n\t"
223 "b.ne 1f\n\t"
224 "add x1, x1, 1\n\t"
225
226 "fmov x2, d15\n\t"
227 "cmp x1, x2\n\t"
228 "b.ne 1f\n\t"
229
Andreas Gampef39b3782014-06-03 14:38:30 -0700230 "mov x9, #0\n\t" // Use x9 as flag, in clobber list
Andreas Gampe6cf80102014-05-19 11:32:41 -0700231
232 // Finish up.
233 "2:\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700234 "ldp x0, x1, [sp]\n\t" // Restore stuff not named clobbered, may contain fpr_result
235 "ldp x2, x3, [sp, #16]\n\t"
236 "ldp x4, x5, [sp, #32]\n\t"
237 "ldp x6, x7, [sp, #48]\n\t"
238 "add sp, sp, #64\n\t" // Free stack space, now sp as on entry
Andreas Gampe6cf80102014-05-19 11:32:41 -0700239 ".cfi_adjust_cfa_offset -64\n\t"
240
Andreas Gampef39b3782014-06-03 14:38:30 -0700241 "str x9, %[fpr_result]\n\t" // Store the FPR comparison result
242 "mov %[result], x8\n\t" // Store the call result
243
Andreas Gampe6cf80102014-05-19 11:32:41 -0700244 "b 3f\n\t" // Goto end
245
246 // Failed fpr verification.
247 "1:\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700248 "mov x9, #1\n\t"
Andreas Gampe6cf80102014-05-19 11:32:41 -0700249 "b 2b\n\t" // Goto finish-up
250
251 // End
252 "3:\n\t"
Andreas Gampecf4035a2014-05-28 22:43:01 -0700253 : [result] "=r" (result)
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700254 // Use the result from r0
255 : [arg0] "0"(arg0), [arg1] "r"(arg1), [arg2] "r"(arg2), [code] "r"(code), [self] "r"(self),
Andreas Gampecf4035a2014-05-28 22:43:01 -0700256 [referrer] "r"(referrer), [fpr_result] "m" (fpr_result)
Andreas Gampef39b3782014-06-03 14:38:30 -0700257 : "x8", "x9", "x10", "x11", "x12", "x13", "x14", "x15", "x16", "x17", "x18", "x19", "x20",
258 "x21", "x22", "x23", "x24", "x25", "x26", "x27", "x28", "x30",
259 "d0", "d1", "d2", "d3", "d4", "d5", "d6", "d7",
260 "d8", "d9", "d10", "d11", "d12", "d13", "d14", "d15",
261 "d16", "d17", "d18", "d19", "d20", "d21", "d22", "d23",
Andreas Gampe2f6e3512014-06-07 01:32:33 -0700262 "d24", "d25", "d26", "d27", "d28", "d29", "d30", "d31",
263 "memory"); // clobber.
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200264#elif defined(__mips__) && !defined(__LP64__)
265 __asm__ __volatile__ (
266 // Spill a0-a3 and t0-t7 which we say we don't clobber. May contain args.
267 "addiu $sp, $sp, -64\n\t"
268 "sw $a0, 0($sp)\n\t"
269 "sw $a1, 4($sp)\n\t"
270 "sw $a2, 8($sp)\n\t"
271 "sw $a3, 12($sp)\n\t"
272 "sw $t0, 16($sp)\n\t"
273 "sw $t1, 20($sp)\n\t"
274 "sw $t2, 24($sp)\n\t"
275 "sw $t3, 28($sp)\n\t"
276 "sw $t4, 32($sp)\n\t"
277 "sw $t5, 36($sp)\n\t"
278 "sw $t6, 40($sp)\n\t"
279 "sw $t7, 44($sp)\n\t"
280 // Spill gp register since it is caller save.
281 "sw $gp, 52($sp)\n\t"
282
283 "addiu $sp, $sp, -16\n\t" // Reserve stack space, 16B aligned.
284 "sw %[referrer], 0($sp)\n\t"
285
286 // Push everything on the stack, so we don't rely on the order.
287 "addiu $sp, $sp, -20\n\t"
288 "sw %[arg0], 0($sp)\n\t"
289 "sw %[arg1], 4($sp)\n\t"
290 "sw %[arg2], 8($sp)\n\t"
291 "sw %[code], 12($sp)\n\t"
292 "sw %[self], 16($sp)\n\t"
293
294 // Load call params into the right registers.
295 "lw $a0, 0($sp)\n\t"
296 "lw $a1, 4($sp)\n\t"
297 "lw $a2, 8($sp)\n\t"
298 "lw $t9, 12($sp)\n\t"
299 "lw $s1, 16($sp)\n\t"
300 "addiu $sp, $sp, 20\n\t"
301
302 "jalr $t9\n\t" // Call the stub.
303 "nop\n\t"
304 "addiu $sp, $sp, 16\n\t" // Drop the quick "frame".
305
306 // Restore stuff not named clobbered.
307 "lw $a0, 0($sp)\n\t"
308 "lw $a1, 4($sp)\n\t"
309 "lw $a2, 8($sp)\n\t"
310 "lw $a3, 12($sp)\n\t"
311 "lw $t0, 16($sp)\n\t"
312 "lw $t1, 20($sp)\n\t"
313 "lw $t2, 24($sp)\n\t"
314 "lw $t3, 28($sp)\n\t"
315 "lw $t4, 32($sp)\n\t"
316 "lw $t5, 36($sp)\n\t"
317 "lw $t6, 40($sp)\n\t"
318 "lw $t7, 44($sp)\n\t"
319 // Restore gp.
320 "lw $gp, 52($sp)\n\t"
321 "addiu $sp, $sp, 64\n\t" // Free stack space, now sp as on entry.
322
323 "move %[result], $v0\n\t" // Store the call result.
324 : [result] "=r" (result)
325 : [arg0] "r"(arg0), [arg1] "r"(arg1), [arg2] "r"(arg2), [code] "r"(code), [self] "r"(self),
326 [referrer] "r"(referrer)
327 : "at", "v0", "v1", "s0", "s1", "s2", "s3", "s4", "s5", "s6", "s7", "t8", "t9", "k0", "k1",
328 "fp", "ra",
Nicolas Geoffrayc5b4b322015-09-15 16:36:50 +0100329 "$f0", "$f1", "$f2", "$f3", "$f4", "$f5", "$f6", "$f7", "$f8", "$f9", "$f10", "$f11",
330 "$f12", "$f13", "$f14", "$f15", "$f16", "$f17", "$f18", "$f19", "$f20", "$f21", "$f22",
331 "$f23", "$f24", "$f25", "$f26", "$f27", "$f28", "$f29", "$f30", "$f31",
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200332 "memory"); // clobber.
333#elif defined(__mips__) && defined(__LP64__)
334 __asm__ __volatile__ (
335 // Spill a0-a7 which we say we don't clobber. May contain args.
336 "daddiu $sp, $sp, -64\n\t"
337 "sd $a0, 0($sp)\n\t"
338 "sd $a1, 8($sp)\n\t"
339 "sd $a2, 16($sp)\n\t"
340 "sd $a3, 24($sp)\n\t"
341 "sd $a4, 32($sp)\n\t"
342 "sd $a5, 40($sp)\n\t"
343 "sd $a6, 48($sp)\n\t"
344 "sd $a7, 56($sp)\n\t"
345
346 "daddiu $sp, $sp, -16\n\t" // Reserve stack space, 16B aligned.
347 "sd %[referrer], 0($sp)\n\t"
348
349 // Push everything on the stack, so we don't rely on the order.
350 "daddiu $sp, $sp, -40\n\t"
351 "sd %[arg0], 0($sp)\n\t"
352 "sd %[arg1], 8($sp)\n\t"
353 "sd %[arg2], 16($sp)\n\t"
354 "sd %[code], 24($sp)\n\t"
355 "sd %[self], 32($sp)\n\t"
356
357 // Load call params into the right registers.
358 "ld $a0, 0($sp)\n\t"
359 "ld $a1, 8($sp)\n\t"
360 "ld $a2, 16($sp)\n\t"
361 "ld $t9, 24($sp)\n\t"
362 "ld $s1, 32($sp)\n\t"
363 "daddiu $sp, $sp, 40\n\t"
364
365 "jalr $t9\n\t" // Call the stub.
366 "nop\n\t"
367 "daddiu $sp, $sp, 16\n\t" // Drop the quick "frame".
368
369 // Restore stuff not named clobbered.
370 "ld $a0, 0($sp)\n\t"
371 "ld $a1, 8($sp)\n\t"
372 "ld $a2, 16($sp)\n\t"
373 "ld $a3, 24($sp)\n\t"
374 "ld $a4, 32($sp)\n\t"
375 "ld $a5, 40($sp)\n\t"
376 "ld $a6, 48($sp)\n\t"
377 "ld $a7, 56($sp)\n\t"
378 "daddiu $sp, $sp, 64\n\t"
379
380 "move %[result], $v0\n\t" // Store the call result.
381 : [result] "=r" (result)
382 : [arg0] "r"(arg0), [arg1] "r"(arg1), [arg2] "r"(arg2), [code] "r"(code), [self] "r"(self),
383 [referrer] "r"(referrer)
384 : "at", "v0", "v1", "t0", "t1", "t2", "t3", "s0", "s1", "s2", "s3", "s4", "s5", "s6", "s7",
385 "t8", "t9", "k0", "k1", "fp", "ra",
386 "f0", "f1", "f2", "f3", "f4", "f5", "f6", "f7", "f8", "f9", "f10", "f11", "f12", "f13",
387 "f14", "f15", "f16", "f17", "f18", "f19", "f20", "f21", "f22", "f23", "f24", "f25", "f26",
388 "f27", "f28", "f29", "f30", "f31",
389 "memory"); // clobber.
Ian Rogers6f3dbba2014-10-14 17:41:57 -0700390#elif defined(__x86_64__) && !defined(__APPLE__) && defined(__clang__)
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700391 // Note: Uses the native convention
392 // TODO: Set the thread?
393 __asm__ __volatile__(
394 "pushq %[referrer]\n\t" // Push referrer
395 "pushq (%%rsp)\n\t" // & 16B alignment padding
396 ".cfi_adjust_cfa_offset 16\n\t"
397 "call *%%rax\n\t" // Call the stub
Mathieu Chartier2cebb242015-04-21 16:50:40 -0700398 "addq $16, %%rsp\n\t" // Pop null and padding
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700399 ".cfi_adjust_cfa_offset -16\n\t"
400 : "=a" (result)
401 // Use the result from rax
Andreas Gampe5c3d3a92015-01-21 12:23:50 -0800402 : "D"(arg0), "S"(arg1), "d"(arg2), "a"(code), [referrer] "c"(referrer)
Andreas Gampe2f6e3512014-06-07 01:32:33 -0700403 // This places arg0 into rdi, arg1 into rsi, arg2 into rdx, and code into rax
Andreas Gampe5c3d3a92015-01-21 12:23:50 -0800404 : "rbx", "rbp", "r8", "r9", "r10", "r11", "r12", "r13", "r14", "r15",
Andreas Gampe2f6e3512014-06-07 01:32:33 -0700405 "memory"); // clobber all
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700406 // TODO: Should we clobber the other registers?
407#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -0800408 UNUSED(arg0, arg1, arg2, code, referrer);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700409 LOG(WARNING) << "Was asked to invoke for an architecture I do not understand.";
410 result = 0;
411#endif
412 // Pop transition.
413 self->PopManagedStackFragment(fragment);
Andreas Gampe6cf80102014-05-19 11:32:41 -0700414
415 fp_result = fpr_result;
416 EXPECT_EQ(0U, fp_result);
417
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700418 return result;
419 }
420
Andreas Gampe51f76352014-05-21 08:28:48 -0700421 // TODO: Set up a frame according to referrer's specs.
422 size_t Invoke3WithReferrerAndHidden(size_t arg0, size_t arg1, size_t arg2, uintptr_t code,
Mathieu Chartiere401d142015-04-22 13:56:20 -0700423 Thread* self, ArtMethod* referrer, size_t hidden) {
Andreas Gampe51f76352014-05-21 08:28:48 -0700424 // Push a transition back into managed code onto the linked list in thread.
425 ManagedStack fragment;
426 self->PushManagedStackFragment(&fragment);
427
428 size_t result;
429 size_t fpr_result = 0;
430#if defined(__i386__)
431 // TODO: Set the thread?
432 __asm__ __volatile__(
Mark P Mendell966c3ae2015-01-27 15:45:27 +0000433 "movd %[hidden], %%xmm7\n\t"
Ian Rogersc5f17732014-06-05 20:48:42 -0700434 "subl $12, %%esp\n\t" // Align stack.
Andreas Gampe51f76352014-05-21 08:28:48 -0700435 "pushl %[referrer]\n\t" // Store referrer
436 "call *%%edi\n\t" // Call the stub
Ian Rogersc5f17732014-06-05 20:48:42 -0700437 "addl $16, %%esp" // Pop referrer
Andreas Gampe51f76352014-05-21 08:28:48 -0700438 : "=a" (result)
439 // Use the result from eax
Andreas Gampe1a7e2922014-05-21 15:37:53 -0700440 : "a"(arg0), "c"(arg1), "d"(arg2), "D"(code), [referrer]"r"(referrer), [hidden]"m"(hidden)
Andreas Gampe2f6e3512014-06-07 01:32:33 -0700441 // This places code into edi, arg0 into eax, arg1 into ecx, and arg2 into edx
442 : "memory"); // clobber.
Andreas Gampe51f76352014-05-21 08:28:48 -0700443 // TODO: Should we clobber the other registers? EBX gets clobbered by some of the stubs,
444 // but compilation fails when declaring that.
445#elif defined(__arm__)
446 __asm__ __volatile__(
447 "push {r1-r12, lr}\n\t" // Save state, 13*4B = 52B
448 ".cfi_adjust_cfa_offset 52\n\t"
449 "push {r9}\n\t"
450 ".cfi_adjust_cfa_offset 4\n\t"
451 "mov r9, %[referrer]\n\n"
452 "str r9, [sp, #-8]!\n\t" // Push referrer, +8B padding so 16B aligned
453 ".cfi_adjust_cfa_offset 8\n\t"
454 "ldr r9, [sp, #8]\n\t"
455
456 // Push everything on the stack, so we don't rely on the order. What a mess. :-(
457 "sub sp, sp, #24\n\t"
458 "str %[arg0], [sp]\n\t"
459 "str %[arg1], [sp, #4]\n\t"
460 "str %[arg2], [sp, #8]\n\t"
461 "str %[code], [sp, #12]\n\t"
462 "str %[self], [sp, #16]\n\t"
463 "str %[hidden], [sp, #20]\n\t"
464 "ldr r0, [sp]\n\t"
465 "ldr r1, [sp, #4]\n\t"
466 "ldr r2, [sp, #8]\n\t"
467 "ldr r3, [sp, #12]\n\t"
468 "ldr r9, [sp, #16]\n\t"
469 "ldr r12, [sp, #20]\n\t"
470 "add sp, sp, #24\n\t"
471
472 "blx r3\n\t" // Call the stub
Mathieu Chartier2cebb242015-04-21 16:50:40 -0700473 "add sp, sp, #12\n\t" // Pop null and padding
Andreas Gampe51f76352014-05-21 08:28:48 -0700474 ".cfi_adjust_cfa_offset -12\n\t"
475 "pop {r1-r12, lr}\n\t" // Restore state
476 ".cfi_adjust_cfa_offset -52\n\t"
477 "mov %[result], r0\n\t" // Save the result
478 : [result] "=r" (result)
479 // Use the result from r0
Andreas Gampe2f6e3512014-06-07 01:32:33 -0700480 : [arg0] "r"(arg0), [arg1] "r"(arg1), [arg2] "r"(arg2), [code] "r"(code), [self] "r"(self),
481 [referrer] "r"(referrer), [hidden] "r"(hidden)
Andreas Gampeff7b1142015-08-03 10:25:06 -0700482 : "r0", "memory"); // clobber.
Andreas Gampe51f76352014-05-21 08:28:48 -0700483#elif defined(__aarch64__)
484 __asm__ __volatile__(
Andreas Gampef39b3782014-06-03 14:38:30 -0700485 // Spill x0-x7 which we say we don't clobber. May contain args.
Andreas Gampe51f76352014-05-21 08:28:48 -0700486 "sub sp, sp, #64\n\t"
487 ".cfi_adjust_cfa_offset 64\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700488 "stp x0, x1, [sp]\n\t"
489 "stp x2, x3, [sp, #16]\n\t"
490 "stp x4, x5, [sp, #32]\n\t"
491 "stp x6, x7, [sp, #48]\n\t"
Andreas Gampe51f76352014-05-21 08:28:48 -0700492
Andreas Gampef39b3782014-06-03 14:38:30 -0700493 "sub sp, sp, #16\n\t" // Reserve stack space, 16B aligned
494 ".cfi_adjust_cfa_offset 16\n\t"
495 "str %[referrer], [sp]\n\t" // referrer
Andreas Gampe51f76352014-05-21 08:28:48 -0700496
497 // Push everything on the stack, so we don't rely on the order. What a mess. :-(
498 "sub sp, sp, #48\n\t"
499 ".cfi_adjust_cfa_offset 48\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700500 // All things are "r" constraints, so direct str/stp should work.
501 "stp %[arg0], %[arg1], [sp]\n\t"
502 "stp %[arg2], %[code], [sp, #16]\n\t"
503 "stp %[self], %[hidden], [sp, #32]\n\t"
Andreas Gampe51f76352014-05-21 08:28:48 -0700504
505 // Now we definitely have x0-x3 free, use it to garble d8 - d15
506 "movk x0, #0xfad0\n\t"
507 "movk x0, #0xebad, lsl #16\n\t"
508 "movk x0, #0xfad0, lsl #32\n\t"
509 "movk x0, #0xebad, lsl #48\n\t"
510 "fmov d8, x0\n\t"
511 "add x0, x0, 1\n\t"
512 "fmov d9, x0\n\t"
513 "add x0, x0, 1\n\t"
514 "fmov d10, x0\n\t"
515 "add x0, x0, 1\n\t"
516 "fmov d11, x0\n\t"
517 "add x0, x0, 1\n\t"
518 "fmov d12, x0\n\t"
519 "add x0, x0, 1\n\t"
520 "fmov d13, x0\n\t"
521 "add x0, x0, 1\n\t"
522 "fmov d14, x0\n\t"
523 "add x0, x0, 1\n\t"
524 "fmov d15, x0\n\t"
525
Andreas Gampef39b3782014-06-03 14:38:30 -0700526 // Load call params into the right registers.
527 "ldp x0, x1, [sp]\n\t"
528 "ldp x2, x3, [sp, #16]\n\t"
Serban Constantinescu9bd88b02015-04-22 16:24:46 +0100529 "ldp x19, x17, [sp, #32]\n\t"
Andreas Gampe51f76352014-05-21 08:28:48 -0700530 "add sp, sp, #48\n\t"
531 ".cfi_adjust_cfa_offset -48\n\t"
532
Andreas Gampe51f76352014-05-21 08:28:48 -0700533 "blr x3\n\t" // Call the stub
Andreas Gampef39b3782014-06-03 14:38:30 -0700534 "mov x8, x0\n\t" // Store result
535 "add sp, sp, #16\n\t" // Drop the quick "frame"
536 ".cfi_adjust_cfa_offset -16\n\t"
Andreas Gampe51f76352014-05-21 08:28:48 -0700537
538 // Test d8 - d15. We can use x1 and x2.
539 "movk x1, #0xfad0\n\t"
540 "movk x1, #0xebad, lsl #16\n\t"
541 "movk x1, #0xfad0, lsl #32\n\t"
542 "movk x1, #0xebad, lsl #48\n\t"
543 "fmov x2, d8\n\t"
544 "cmp x1, x2\n\t"
545 "b.ne 1f\n\t"
546 "add x1, x1, 1\n\t"
547
548 "fmov x2, d9\n\t"
549 "cmp x1, x2\n\t"
550 "b.ne 1f\n\t"
551 "add x1, x1, 1\n\t"
552
553 "fmov x2, d10\n\t"
554 "cmp x1, x2\n\t"
555 "b.ne 1f\n\t"
556 "add x1, x1, 1\n\t"
557
558 "fmov x2, d11\n\t"
559 "cmp x1, x2\n\t"
560 "b.ne 1f\n\t"
561 "add x1, x1, 1\n\t"
562
563 "fmov x2, d12\n\t"
564 "cmp x1, x2\n\t"
565 "b.ne 1f\n\t"
566 "add x1, x1, 1\n\t"
567
568 "fmov x2, d13\n\t"
569 "cmp x1, x2\n\t"
570 "b.ne 1f\n\t"
571 "add x1, x1, 1\n\t"
572
573 "fmov x2, d14\n\t"
574 "cmp x1, x2\n\t"
575 "b.ne 1f\n\t"
576 "add x1, x1, 1\n\t"
577
578 "fmov x2, d15\n\t"
579 "cmp x1, x2\n\t"
580 "b.ne 1f\n\t"
581
Andreas Gampef39b3782014-06-03 14:38:30 -0700582 "mov x9, #0\n\t" // Use x9 as flag, in clobber list
Andreas Gampe51f76352014-05-21 08:28:48 -0700583
584 // Finish up.
585 "2:\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700586 "ldp x0, x1, [sp]\n\t" // Restore stuff not named clobbered, may contain fpr_result
587 "ldp x2, x3, [sp, #16]\n\t"
588 "ldp x4, x5, [sp, #32]\n\t"
589 "ldp x6, x7, [sp, #48]\n\t"
590 "add sp, sp, #64\n\t" // Free stack space, now sp as on entry
Andreas Gampe51f76352014-05-21 08:28:48 -0700591 ".cfi_adjust_cfa_offset -64\n\t"
592
Andreas Gampef39b3782014-06-03 14:38:30 -0700593 "str x9, %[fpr_result]\n\t" // Store the FPR comparison result
594 "mov %[result], x8\n\t" // Store the call result
595
Andreas Gampe51f76352014-05-21 08:28:48 -0700596 "b 3f\n\t" // Goto end
597
598 // Failed fpr verification.
599 "1:\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700600 "mov x9, #1\n\t"
Andreas Gampe51f76352014-05-21 08:28:48 -0700601 "b 2b\n\t" // Goto finish-up
602
603 // End
604 "3:\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700605 : [result] "=r" (result)
606 // Use the result from r0
Andreas Gampe51f76352014-05-21 08:28:48 -0700607 : [arg0] "0"(arg0), [arg1] "r"(arg1), [arg2] "r"(arg2), [code] "r"(code), [self] "r"(self),
Andreas Gampef39b3782014-06-03 14:38:30 -0700608 [referrer] "r"(referrer), [hidden] "r"(hidden), [fpr_result] "m" (fpr_result)
609 : "x8", "x9", "x10", "x11", "x12", "x13", "x14", "x15", "x16", "x17", "x18", "x19", "x20",
610 "x21", "x22", "x23", "x24", "x25", "x26", "x27", "x28", "x30",
611 "d0", "d1", "d2", "d3", "d4", "d5", "d6", "d7",
612 "d8", "d9", "d10", "d11", "d12", "d13", "d14", "d15",
613 "d16", "d17", "d18", "d19", "d20", "d21", "d22", "d23",
Andreas Gampe2f6e3512014-06-07 01:32:33 -0700614 "d24", "d25", "d26", "d27", "d28", "d29", "d30", "d31",
615 "memory"); // clobber.
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200616#elif defined(__mips__) && !defined(__LP64__)
617 __asm__ __volatile__ (
618 // Spill a0-a3 and t0-t7 which we say we don't clobber. May contain args.
619 "addiu $sp, $sp, -64\n\t"
620 "sw $a0, 0($sp)\n\t"
621 "sw $a1, 4($sp)\n\t"
622 "sw $a2, 8($sp)\n\t"
623 "sw $a3, 12($sp)\n\t"
624 "sw $t0, 16($sp)\n\t"
625 "sw $t1, 20($sp)\n\t"
626 "sw $t2, 24($sp)\n\t"
627 "sw $t3, 28($sp)\n\t"
628 "sw $t4, 32($sp)\n\t"
629 "sw $t5, 36($sp)\n\t"
630 "sw $t6, 40($sp)\n\t"
631 "sw $t7, 44($sp)\n\t"
632 // Spill gp register since it is caller save.
633 "sw $gp, 52($sp)\n\t"
634
635 "addiu $sp, $sp, -16\n\t" // Reserve stack space, 16B aligned.
636 "sw %[referrer], 0($sp)\n\t"
637
638 // Push everything on the stack, so we don't rely on the order.
639 "addiu $sp, $sp, -24\n\t"
640 "sw %[arg0], 0($sp)\n\t"
641 "sw %[arg1], 4($sp)\n\t"
642 "sw %[arg2], 8($sp)\n\t"
643 "sw %[code], 12($sp)\n\t"
644 "sw %[self], 16($sp)\n\t"
645 "sw %[hidden], 20($sp)\n\t"
646
647 // Load call params into the right registers.
648 "lw $a0, 0($sp)\n\t"
649 "lw $a1, 4($sp)\n\t"
650 "lw $a2, 8($sp)\n\t"
651 "lw $t9, 12($sp)\n\t"
652 "lw $s1, 16($sp)\n\t"
653 "lw $t0, 20($sp)\n\t"
654 "addiu $sp, $sp, 24\n\t"
655
656 "jalr $t9\n\t" // Call the stub.
657 "nop\n\t"
658 "addiu $sp, $sp, 16\n\t" // Drop the quick "frame".
659
660 // Restore stuff not named clobbered.
661 "lw $a0, 0($sp)\n\t"
662 "lw $a1, 4($sp)\n\t"
663 "lw $a2, 8($sp)\n\t"
664 "lw $a3, 12($sp)\n\t"
665 "lw $t0, 16($sp)\n\t"
666 "lw $t1, 20($sp)\n\t"
667 "lw $t2, 24($sp)\n\t"
668 "lw $t3, 28($sp)\n\t"
669 "lw $t4, 32($sp)\n\t"
670 "lw $t5, 36($sp)\n\t"
671 "lw $t6, 40($sp)\n\t"
672 "lw $t7, 44($sp)\n\t"
673 // Restore gp.
674 "lw $gp, 52($sp)\n\t"
675 "addiu $sp, $sp, 64\n\t" // Free stack space, now sp as on entry.
676
677 "move %[result], $v0\n\t" // Store the call result.
678 : [result] "=r" (result)
679 : [arg0] "r"(arg0), [arg1] "r"(arg1), [arg2] "r"(arg2), [code] "r"(code), [self] "r"(self),
680 [referrer] "r"(referrer), [hidden] "r"(hidden)
681 : "at", "v0", "v1", "s0", "s1", "s2", "s3", "s4", "s5", "s6", "s7", "t8", "t9", "k0", "k1",
682 "fp", "ra",
Nicolas Geoffrayc5b4b322015-09-15 16:36:50 +0100683 "$f0", "$f1", "$f2", "$f3", "$f4", "$f5", "$f6", "$f7", "$f8", "$f9", "$f10", "$f11",
684 "$f12", "$f13", "$f14", "$f15", "$f16", "$f17", "$f18", "$f19", "$f20", "$f21", "$f22",
685 "$f23", "$f24", "$f25", "$f26", "$f27", "$f28", "$f29", "$f30", "$f31",
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200686 "memory"); // clobber.
687#elif defined(__mips__) && defined(__LP64__)
688 __asm__ __volatile__ (
689 // Spill a0-a7 which we say we don't clobber. May contain args.
690 "daddiu $sp, $sp, -64\n\t"
691 "sd $a0, 0($sp)\n\t"
692 "sd $a1, 8($sp)\n\t"
693 "sd $a2, 16($sp)\n\t"
694 "sd $a3, 24($sp)\n\t"
695 "sd $a4, 32($sp)\n\t"
696 "sd $a5, 40($sp)\n\t"
697 "sd $a6, 48($sp)\n\t"
698 "sd $a7, 56($sp)\n\t"
699
700 "daddiu $sp, $sp, -16\n\t" // Reserve stack space, 16B aligned.
701 "sd %[referrer], 0($sp)\n\t"
702
703 // Push everything on the stack, so we don't rely on the order.
704 "daddiu $sp, $sp, -48\n\t"
705 "sd %[arg0], 0($sp)\n\t"
706 "sd %[arg1], 8($sp)\n\t"
707 "sd %[arg2], 16($sp)\n\t"
708 "sd %[code], 24($sp)\n\t"
709 "sd %[self], 32($sp)\n\t"
710 "sd %[hidden], 40($sp)\n\t"
711
712 // Load call params into the right registers.
713 "ld $a0, 0($sp)\n\t"
714 "ld $a1, 8($sp)\n\t"
715 "ld $a2, 16($sp)\n\t"
716 "ld $t9, 24($sp)\n\t"
717 "ld $s1, 32($sp)\n\t"
718 "ld $t0, 40($sp)\n\t"
719 "daddiu $sp, $sp, 48\n\t"
720
721 "jalr $t9\n\t" // Call the stub.
722 "nop\n\t"
723 "daddiu $sp, $sp, 16\n\t" // Drop the quick "frame".
724
725 // Restore stuff not named clobbered.
726 "ld $a0, 0($sp)\n\t"
727 "ld $a1, 8($sp)\n\t"
728 "ld $a2, 16($sp)\n\t"
729 "ld $a3, 24($sp)\n\t"
730 "ld $a4, 32($sp)\n\t"
731 "ld $a5, 40($sp)\n\t"
732 "ld $a6, 48($sp)\n\t"
733 "ld $a7, 56($sp)\n\t"
734 "daddiu $sp, $sp, 64\n\t"
735
736 "move %[result], $v0\n\t" // Store the call result.
737 : [result] "=r" (result)
738 : [arg0] "r"(arg0), [arg1] "r"(arg1), [arg2] "r"(arg2), [code] "r"(code), [self] "r"(self),
739 [referrer] "r"(referrer), [hidden] "r"(hidden)
740 : "at", "v0", "v1", "t0", "t1", "t2", "t3", "s0", "s1", "s2", "s3", "s4", "s5", "s6", "s7",
741 "t8", "t9", "k0", "k1", "fp", "ra",
742 "f0", "f1", "f2", "f3", "f4", "f5", "f6", "f7", "f8", "f9", "f10", "f11", "f12", "f13",
743 "f14", "f15", "f16", "f17", "f18", "f19", "f20", "f21", "f22", "f23", "f24", "f25", "f26",
744 "f27", "f28", "f29", "f30", "f31",
745 "memory"); // clobber.
Ian Rogers6f3dbba2014-10-14 17:41:57 -0700746#elif defined(__x86_64__) && !defined(__APPLE__) && defined(__clang__)
Andreas Gampe51f76352014-05-21 08:28:48 -0700747 // Note: Uses the native convention
748 // TODO: Set the thread?
749 __asm__ __volatile__(
Andreas Gampe51f76352014-05-21 08:28:48 -0700750 "pushq %[referrer]\n\t" // Push referrer
751 "pushq (%%rsp)\n\t" // & 16B alignment padding
752 ".cfi_adjust_cfa_offset 16\n\t"
Andreas Gampe1a7e2922014-05-21 15:37:53 -0700753 "call *%%rbx\n\t" // Call the stub
Mathieu Chartier2cebb242015-04-21 16:50:40 -0700754 "addq $16, %%rsp\n\t" // Pop null and padding
Andreas Gampe51f76352014-05-21 08:28:48 -0700755 ".cfi_adjust_cfa_offset -16\n\t"
756 : "=a" (result)
757 // Use the result from rax
Andreas Gampe1a7e2922014-05-21 15:37:53 -0700758 : "D"(arg0), "S"(arg1), "d"(arg2), "b"(code), [referrer] "c"(referrer), [hidden] "a"(hidden)
Andreas Gampe51f76352014-05-21 08:28:48 -0700759 // This places arg0 into rdi, arg1 into rsi, arg2 into rdx, and code into rax
Andreas Gampe1a7e2922014-05-21 15:37:53 -0700760 : "rbp", "r8", "r9", "r10", "r11", "r12", "r13", "r14", "r15",
Andreas Gampe2f6e3512014-06-07 01:32:33 -0700761 "memory"); // clobber all
Andreas Gampe51f76352014-05-21 08:28:48 -0700762 // TODO: Should we clobber the other registers?
763#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -0800764 UNUSED(arg0, arg1, arg2, code, referrer, hidden);
Andreas Gampe51f76352014-05-21 08:28:48 -0700765 LOG(WARNING) << "Was asked to invoke for an architecture I do not understand.";
766 result = 0;
767#endif
768 // Pop transition.
769 self->PopManagedStackFragment(fragment);
770
771 fp_result = fpr_result;
772 EXPECT_EQ(0U, fp_result);
773
774 return result;
775 }
776
Andreas Gampe29b38412014-08-13 00:15:43 -0700777 static uintptr_t GetEntrypoint(Thread* self, QuickEntrypointEnum entrypoint) {
778 int32_t offset;
779#ifdef __LP64__
780 offset = GetThreadOffset<8>(entrypoint).Int32Value();
781#else
782 offset = GetThreadOffset<4>(entrypoint).Int32Value();
783#endif
784 return *reinterpret_cast<uintptr_t*>(reinterpret_cast<uint8_t*>(self) + offset);
785 }
786
Andreas Gampe6cf80102014-05-19 11:32:41 -0700787 protected:
788 size_t fp_result;
Andreas Gampe525cde22014-04-22 15:44:50 -0700789};
790
791
Andreas Gampe525cde22014-04-22 15:44:50 -0700792TEST_F(StubTest, Memcpy) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200793#if defined(__i386__) || (defined(__x86_64__) && !defined(__APPLE__)) || defined(__mips__)
Andreas Gampe525cde22014-04-22 15:44:50 -0700794 Thread* self = Thread::Current();
795
796 uint32_t orig[20];
797 uint32_t trg[20];
798 for (size_t i = 0; i < 20; ++i) {
799 orig[i] = i;
800 trg[i] = 0;
801 }
802
803 Invoke3(reinterpret_cast<size_t>(&trg[4]), reinterpret_cast<size_t>(&orig[4]),
Andreas Gampe29b38412014-08-13 00:15:43 -0700804 10 * sizeof(uint32_t), StubTest::GetEntrypoint(self, kQuickMemcpy), self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700805
806 EXPECT_EQ(orig[0], trg[0]);
807
808 for (size_t i = 1; i < 4; ++i) {
809 EXPECT_NE(orig[i], trg[i]);
810 }
811
812 for (size_t i = 4; i < 14; ++i) {
813 EXPECT_EQ(orig[i], trg[i]);
814 }
815
816 for (size_t i = 14; i < 20; ++i) {
817 EXPECT_NE(orig[i], trg[i]);
818 }
819
820 // TODO: Test overlapping?
821
822#else
823 LOG(INFO) << "Skipping memcpy as I don't know how to do that on " << kRuntimeISA;
824 // Force-print to std::cout so it's also outside the logcat.
825 std::cout << "Skipping memcpy as I don't know how to do that on " << kRuntimeISA << std::endl;
826#endif
827}
828
Andreas Gampe525cde22014-04-22 15:44:50 -0700829TEST_F(StubTest, LockObject) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200830#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
831 (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -0700832 static constexpr size_t kThinLockLoops = 100;
833
Andreas Gampe525cde22014-04-22 15:44:50 -0700834 Thread* self = Thread::Current();
Andreas Gampe29b38412014-08-13 00:15:43 -0700835
836 const uintptr_t art_quick_lock_object = StubTest::GetEntrypoint(self, kQuickLockObject);
837
Andreas Gampe525cde22014-04-22 15:44:50 -0700838 // Create an object
839 ScopedObjectAccess soa(self);
840 // garbage is created during ClassLinker::Init
841
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700842 StackHandleScope<2> hs(soa.Self());
843 Handle<mirror::String> obj(
844 hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), "hello, world!")));
Andreas Gampe525cde22014-04-22 15:44:50 -0700845 LockWord lock = obj->GetLockWord(false);
846 LockWord::LockState old_state = lock.GetState();
847 EXPECT_EQ(LockWord::LockState::kUnlocked, old_state);
848
Andreas Gampe29b38412014-08-13 00:15:43 -0700849 Invoke3(reinterpret_cast<size_t>(obj.Get()), 0U, 0U, art_quick_lock_object, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700850
851 LockWord lock_after = obj->GetLockWord(false);
852 LockWord::LockState new_state = lock_after.GetState();
853 EXPECT_EQ(LockWord::LockState::kThinLocked, new_state);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700854 EXPECT_EQ(lock_after.ThinLockCount(), 0U); // Thin lock starts count at zero
855
856 for (size_t i = 1; i < kThinLockLoops; ++i) {
Andreas Gampe29b38412014-08-13 00:15:43 -0700857 Invoke3(reinterpret_cast<size_t>(obj.Get()), 0U, 0U, art_quick_lock_object, self);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700858
859 // Check we're at lock count i
860
861 LockWord l_inc = obj->GetLockWord(false);
862 LockWord::LockState l_inc_state = l_inc.GetState();
863 EXPECT_EQ(LockWord::LockState::kThinLocked, l_inc_state);
864 EXPECT_EQ(l_inc.ThinLockCount(), i);
865 }
866
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700867 // Force a fat lock by running identity hashcode to fill up lock word.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700868 Handle<mirror::String> obj2(hs.NewHandle(
869 mirror::String::AllocFromModifiedUtf8(soa.Self(), "hello, world!")));
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700870
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700871 obj2->IdentityHashCode();
872
Andreas Gampe29b38412014-08-13 00:15:43 -0700873 Invoke3(reinterpret_cast<size_t>(obj2.Get()), 0U, 0U, art_quick_lock_object, self);
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700874
875 LockWord lock_after2 = obj2->GetLockWord(false);
876 LockWord::LockState new_state2 = lock_after2.GetState();
877 EXPECT_EQ(LockWord::LockState::kFatLocked, new_state2);
878 EXPECT_NE(lock_after2.FatLockMonitor(), static_cast<Monitor*>(nullptr));
879
880 // Test done.
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700881#else
882 LOG(INFO) << "Skipping lock_object as I don't know how to do that on " << kRuntimeISA;
883 // Force-print to std::cout so it's also outside the logcat.
884 std::cout << "Skipping lock_object as I don't know how to do that on " << kRuntimeISA << std::endl;
885#endif
886}
887
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700888
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700889class RandGen {
890 public:
891 explicit RandGen(uint32_t seed) : val_(seed) {}
892
893 uint32_t next() {
894 val_ = val_ * 48271 % 2147483647 + 13;
895 return val_;
896 }
897
898 uint32_t val_;
899};
900
901
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700902// NO_THREAD_SAFETY_ANALYSIS as we do not want to grab exclusive mutator lock for MonitorInfo.
903static void TestUnlockObject(StubTest* test) NO_THREAD_SAFETY_ANALYSIS {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200904#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
905 (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -0700906 static constexpr size_t kThinLockLoops = 100;
907
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700908 Thread* self = Thread::Current();
Andreas Gampe29b38412014-08-13 00:15:43 -0700909
910 const uintptr_t art_quick_lock_object = StubTest::GetEntrypoint(self, kQuickLockObject);
911 const uintptr_t art_quick_unlock_object = StubTest::GetEntrypoint(self, kQuickUnlockObject);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700912 // Create an object
913 ScopedObjectAccess soa(self);
914 // garbage is created during ClassLinker::Init
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700915 static constexpr size_t kNumberOfLocks = 10; // Number of objects = lock
916 StackHandleScope<kNumberOfLocks + 1> hs(self);
917 Handle<mirror::String> obj(
918 hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), "hello, world!")));
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700919 LockWord lock = obj->GetLockWord(false);
920 LockWord::LockState old_state = lock.GetState();
921 EXPECT_EQ(LockWord::LockState::kUnlocked, old_state);
922
Andreas Gampe29b38412014-08-13 00:15:43 -0700923 test->Invoke3(reinterpret_cast<size_t>(obj.Get()), 0U, 0U, art_quick_unlock_object, self);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700924 // This should be an illegal monitor state.
925 EXPECT_TRUE(self->IsExceptionPending());
926 self->ClearException();
927
928 LockWord lock_after = obj->GetLockWord(false);
929 LockWord::LockState new_state = lock_after.GetState();
930 EXPECT_EQ(LockWord::LockState::kUnlocked, new_state);
Andreas Gampe525cde22014-04-22 15:44:50 -0700931
Andreas Gampe29b38412014-08-13 00:15:43 -0700932 test->Invoke3(reinterpret_cast<size_t>(obj.Get()), 0U, 0U, art_quick_lock_object, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700933
934 LockWord lock_after2 = obj->GetLockWord(false);
935 LockWord::LockState new_state2 = lock_after2.GetState();
936 EXPECT_EQ(LockWord::LockState::kThinLocked, new_state2);
937
Andreas Gampe29b38412014-08-13 00:15:43 -0700938 test->Invoke3(reinterpret_cast<size_t>(obj.Get()), 0U, 0U, art_quick_unlock_object, self);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700939
940 LockWord lock_after3 = obj->GetLockWord(false);
941 LockWord::LockState new_state3 = lock_after3.GetState();
942 EXPECT_EQ(LockWord::LockState::kUnlocked, new_state3);
943
944 // Stress test:
945 // Keep a number of objects and their locks in flight. Randomly lock or unlock one of them in
946 // each step.
947
948 RandGen r(0x1234);
949
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700950 constexpr size_t kIterations = 10000; // Number of iterations
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700951 constexpr size_t kMoveToFat = 1000; // Chance of 1:kMoveFat to make a lock fat.
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700952
953 size_t counts[kNumberOfLocks];
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700954 bool fat[kNumberOfLocks]; // Whether a lock should be thin or fat.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700955 Handle<mirror::String> objects[kNumberOfLocks];
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700956
957 // Initialize = allocate.
958 for (size_t i = 0; i < kNumberOfLocks; ++i) {
959 counts[i] = 0;
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700960 fat[i] = false;
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700961 objects[i] = hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), ""));
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700962 }
963
964 for (size_t i = 0; i < kIterations; ++i) {
965 // Select which lock to update.
966 size_t index = r.next() % kNumberOfLocks;
967
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700968 // Make lock fat?
969 if (!fat[index] && (r.next() % kMoveToFat == 0)) {
970 fat[index] = true;
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700971 objects[index]->IdentityHashCode();
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700972
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700973 LockWord lock_iter = objects[index]->GetLockWord(false);
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700974 LockWord::LockState iter_state = lock_iter.GetState();
975 if (counts[index] == 0) {
976 EXPECT_EQ(LockWord::LockState::kHashCode, iter_state);
977 } else {
978 EXPECT_EQ(LockWord::LockState::kFatLocked, iter_state);
979 }
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700980 } else {
Andreas Gampe277ccbd2014-11-03 21:36:10 -0800981 bool take_lock; // Whether to lock or unlock in this step.
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700982 if (counts[index] == 0) {
Andreas Gampe277ccbd2014-11-03 21:36:10 -0800983 take_lock = true;
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700984 } else if (counts[index] == kThinLockLoops) {
Andreas Gampe277ccbd2014-11-03 21:36:10 -0800985 take_lock = false;
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700986 } else {
987 // Randomly.
Andreas Gampe277ccbd2014-11-03 21:36:10 -0800988 take_lock = r.next() % 2 == 0;
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700989 }
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700990
Andreas Gampe277ccbd2014-11-03 21:36:10 -0800991 if (take_lock) {
Andreas Gampe29b38412014-08-13 00:15:43 -0700992 test->Invoke3(reinterpret_cast<size_t>(objects[index].Get()), 0U, 0U, art_quick_lock_object,
993 self);
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700994 counts[index]++;
995 } else {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700996 test->Invoke3(reinterpret_cast<size_t>(objects[index].Get()), 0U, 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -0700997 art_quick_unlock_object, self);
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700998 counts[index]--;
999 }
Andreas Gampe7177d7c2014-05-02 12:10:02 -07001000
Andreas Gampe4fc046e2014-05-06 16:56:39 -07001001 EXPECT_FALSE(self->IsExceptionPending());
1002
1003 // Check the new state.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001004 LockWord lock_iter = objects[index]->GetLockWord(true);
Andreas Gampe4fc046e2014-05-06 16:56:39 -07001005 LockWord::LockState iter_state = lock_iter.GetState();
1006 if (fat[index]) {
1007 // Abuse MonitorInfo.
1008 EXPECT_EQ(LockWord::LockState::kFatLocked, iter_state) << index;
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001009 MonitorInfo info(objects[index].Get());
Andreas Gampe4fc046e2014-05-06 16:56:39 -07001010 EXPECT_EQ(counts[index], info.entry_count_) << index;
1011 } else {
1012 if (counts[index] > 0) {
1013 EXPECT_EQ(LockWord::LockState::kThinLocked, iter_state);
1014 EXPECT_EQ(counts[index] - 1, lock_iter.ThinLockCount());
1015 } else {
1016 EXPECT_EQ(LockWord::LockState::kUnlocked, iter_state);
1017 }
1018 }
Andreas Gampe7177d7c2014-05-02 12:10:02 -07001019 }
1020 }
1021
1022 // Unlock the remaining count times and then check it's unlocked. Then deallocate.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001023 // Go reverse order to correctly handle Handles.
Andreas Gampe7177d7c2014-05-02 12:10:02 -07001024 for (size_t i = 0; i < kNumberOfLocks; ++i) {
1025 size_t index = kNumberOfLocks - 1 - i;
1026 size_t count = counts[index];
1027 while (count > 0) {
Andreas Gampe29b38412014-08-13 00:15:43 -07001028 test->Invoke3(reinterpret_cast<size_t>(objects[index].Get()), 0U, 0U, art_quick_unlock_object,
1029 self);
Andreas Gampe7177d7c2014-05-02 12:10:02 -07001030 count--;
1031 }
1032
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001033 LockWord lock_after4 = objects[index]->GetLockWord(false);
Andreas Gampe7177d7c2014-05-02 12:10:02 -07001034 LockWord::LockState new_state4 = lock_after4.GetState();
Andreas Gampe4fc046e2014-05-06 16:56:39 -07001035 EXPECT_TRUE(LockWord::LockState::kUnlocked == new_state4
1036 || LockWord::LockState::kFatLocked == new_state4);
Andreas Gampe7177d7c2014-05-02 12:10:02 -07001037 }
1038
Andreas Gampe4fc046e2014-05-06 16:56:39 -07001039 // Test done.
Andreas Gampe525cde22014-04-22 15:44:50 -07001040#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001041 UNUSED(test);
Andreas Gampe7177d7c2014-05-02 12:10:02 -07001042 LOG(INFO) << "Skipping unlock_object as I don't know how to do that on " << kRuntimeISA;
Andreas Gampe525cde22014-04-22 15:44:50 -07001043 // Force-print to std::cout so it's also outside the logcat.
Andreas Gampe7177d7c2014-05-02 12:10:02 -07001044 std::cout << "Skipping unlock_object as I don't know how to do that on " << kRuntimeISA << std::endl;
Andreas Gampe525cde22014-04-22 15:44:50 -07001045#endif
1046}
1047
Andreas Gampe4fc046e2014-05-06 16:56:39 -07001048TEST_F(StubTest, UnlockObject) {
Andreas Gampe369810a2015-01-14 19:53:31 -08001049 // This will lead to monitor error messages in the log.
1050 ScopedLogSeverity sls(LogSeverity::FATAL);
1051
Andreas Gampe4fc046e2014-05-06 16:56:39 -07001052 TestUnlockObject(this);
1053}
Andreas Gampe525cde22014-04-22 15:44:50 -07001054
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001055#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1056 (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe525cde22014-04-22 15:44:50 -07001057extern "C" void art_quick_check_cast(void);
1058#endif
1059
1060TEST_F(StubTest, CheckCast) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001061#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1062 (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe525cde22014-04-22 15:44:50 -07001063 Thread* self = Thread::Current();
Andreas Gampe29b38412014-08-13 00:15:43 -07001064
1065 const uintptr_t art_quick_check_cast = StubTest::GetEntrypoint(self, kQuickCheckCast);
1066
Andreas Gampe525cde22014-04-22 15:44:50 -07001067 // Find some classes.
1068 ScopedObjectAccess soa(self);
1069 // garbage is created during ClassLinker::Init
1070
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001071 StackHandleScope<2> hs(soa.Self());
1072 Handle<mirror::Class> c(
1073 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "[Ljava/lang/Object;")));
1074 Handle<mirror::Class> c2(
1075 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "[Ljava/lang/String;")));
Andreas Gampe525cde22014-04-22 15:44:50 -07001076
1077 EXPECT_FALSE(self->IsExceptionPending());
1078
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001079 Invoke3(reinterpret_cast<size_t>(c.Get()), reinterpret_cast<size_t>(c.Get()), 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001080 art_quick_check_cast, self);
Andreas Gampe525cde22014-04-22 15:44:50 -07001081
1082 EXPECT_FALSE(self->IsExceptionPending());
1083
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001084 Invoke3(reinterpret_cast<size_t>(c2.Get()), reinterpret_cast<size_t>(c2.Get()), 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001085 art_quick_check_cast, self);
Andreas Gampe525cde22014-04-22 15:44:50 -07001086
1087 EXPECT_FALSE(self->IsExceptionPending());
1088
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001089 Invoke3(reinterpret_cast<size_t>(c.Get()), reinterpret_cast<size_t>(c2.Get()), 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001090 art_quick_check_cast, self);
Andreas Gampe525cde22014-04-22 15:44:50 -07001091
1092 EXPECT_FALSE(self->IsExceptionPending());
1093
1094 // TODO: Make the following work. But that would require correct managed frames.
1095
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001096 Invoke3(reinterpret_cast<size_t>(c2.Get()), reinterpret_cast<size_t>(c.Get()), 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001097 art_quick_check_cast, self);
Andreas Gampe525cde22014-04-22 15:44:50 -07001098
1099 EXPECT_TRUE(self->IsExceptionPending());
1100 self->ClearException();
1101
1102#else
1103 LOG(INFO) << "Skipping check_cast as I don't know how to do that on " << kRuntimeISA;
1104 // Force-print to std::cout so it's also outside the logcat.
1105 std::cout << "Skipping check_cast as I don't know how to do that on " << kRuntimeISA << std::endl;
1106#endif
1107}
1108
1109
Andreas Gampe525cde22014-04-22 15:44:50 -07001110TEST_F(StubTest, APutObj) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001111#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1112 (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe525cde22014-04-22 15:44:50 -07001113 Thread* self = Thread::Current();
Andreas Gampe29b38412014-08-13 00:15:43 -07001114
1115 // Do not check non-checked ones, we'd need handlers and stuff...
1116 const uintptr_t art_quick_aput_obj_with_null_and_bound_check =
1117 StubTest::GetEntrypoint(self, kQuickAputObjectWithNullAndBoundCheck);
1118
Andreas Gampe525cde22014-04-22 15:44:50 -07001119 // Create an object
1120 ScopedObjectAccess soa(self);
1121 // garbage is created during ClassLinker::Init
1122
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001123 StackHandleScope<5> hs(soa.Self());
1124 Handle<mirror::Class> c(
1125 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "Ljava/lang/Object;")));
1126 Handle<mirror::Class> ca(
1127 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "[Ljava/lang/String;")));
Andreas Gampe525cde22014-04-22 15:44:50 -07001128
1129 // Build a string array of size 1
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001130 Handle<mirror::ObjectArray<mirror::Object>> array(
1131 hs.NewHandle(mirror::ObjectArray<mirror::Object>::Alloc(soa.Self(), ca.Get(), 10)));
Andreas Gampe525cde22014-04-22 15:44:50 -07001132
1133 // Build a string -> should be assignable
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001134 Handle<mirror::String> str_obj(
1135 hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), "hello, world!")));
Andreas Gampe525cde22014-04-22 15:44:50 -07001136
1137 // Build a generic object -> should fail assigning
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001138 Handle<mirror::Object> obj_obj(hs.NewHandle(c->AllocObject(soa.Self())));
Andreas Gampe525cde22014-04-22 15:44:50 -07001139
1140 // Play with it...
1141
1142 // 1) Success cases
Andreas Gampef4e910b2014-04-29 16:55:52 -07001143 // 1.1) Assign str_obj to array[0..3]
Andreas Gampe525cde22014-04-22 15:44:50 -07001144
1145 EXPECT_FALSE(self->IsExceptionPending());
1146
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001147 Invoke3(reinterpret_cast<size_t>(array.Get()), 0U, reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampe29b38412014-08-13 00:15:43 -07001148 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampe525cde22014-04-22 15:44:50 -07001149
1150 EXPECT_FALSE(self->IsExceptionPending());
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001151 EXPECT_EQ(str_obj.Get(), array->Get(0));
Andreas Gampe525cde22014-04-22 15:44:50 -07001152
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001153 Invoke3(reinterpret_cast<size_t>(array.Get()), 1U, reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampe29b38412014-08-13 00:15:43 -07001154 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampef4e910b2014-04-29 16:55:52 -07001155
1156 EXPECT_FALSE(self->IsExceptionPending());
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001157 EXPECT_EQ(str_obj.Get(), array->Get(1));
Andreas Gampef4e910b2014-04-29 16:55:52 -07001158
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001159 Invoke3(reinterpret_cast<size_t>(array.Get()), 2U, reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampe29b38412014-08-13 00:15:43 -07001160 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampef4e910b2014-04-29 16:55:52 -07001161
1162 EXPECT_FALSE(self->IsExceptionPending());
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001163 EXPECT_EQ(str_obj.Get(), array->Get(2));
Andreas Gampef4e910b2014-04-29 16:55:52 -07001164
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001165 Invoke3(reinterpret_cast<size_t>(array.Get()), 3U, reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampe29b38412014-08-13 00:15:43 -07001166 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampef4e910b2014-04-29 16:55:52 -07001167
1168 EXPECT_FALSE(self->IsExceptionPending());
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001169 EXPECT_EQ(str_obj.Get(), array->Get(3));
Andreas Gampef4e910b2014-04-29 16:55:52 -07001170
1171 // 1.2) Assign null to array[0..3]
Andreas Gampe525cde22014-04-22 15:44:50 -07001172
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001173 Invoke3(reinterpret_cast<size_t>(array.Get()), 0U, reinterpret_cast<size_t>(nullptr),
Andreas Gampe29b38412014-08-13 00:15:43 -07001174 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampe525cde22014-04-22 15:44:50 -07001175
1176 EXPECT_FALSE(self->IsExceptionPending());
Andreas Gampef4e910b2014-04-29 16:55:52 -07001177 EXPECT_EQ(nullptr, array->Get(0));
1178
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001179 Invoke3(reinterpret_cast<size_t>(array.Get()), 1U, reinterpret_cast<size_t>(nullptr),
Andreas Gampe29b38412014-08-13 00:15:43 -07001180 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampef4e910b2014-04-29 16:55:52 -07001181
1182 EXPECT_FALSE(self->IsExceptionPending());
1183 EXPECT_EQ(nullptr, array->Get(1));
1184
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001185 Invoke3(reinterpret_cast<size_t>(array.Get()), 2U, reinterpret_cast<size_t>(nullptr),
Andreas Gampe29b38412014-08-13 00:15:43 -07001186 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampef4e910b2014-04-29 16:55:52 -07001187
1188 EXPECT_FALSE(self->IsExceptionPending());
1189 EXPECT_EQ(nullptr, array->Get(2));
1190
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001191 Invoke3(reinterpret_cast<size_t>(array.Get()), 3U, reinterpret_cast<size_t>(nullptr),
Andreas Gampe29b38412014-08-13 00:15:43 -07001192 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampef4e910b2014-04-29 16:55:52 -07001193
1194 EXPECT_FALSE(self->IsExceptionPending());
1195 EXPECT_EQ(nullptr, array->Get(3));
Andreas Gampe525cde22014-04-22 15:44:50 -07001196
1197 // TODO: Check _which_ exception is thrown. Then make 3) check that it's the right check order.
1198
1199 // 2) Failure cases (str into str[])
1200 // 2.1) Array = null
1201 // TODO: Throwing NPE needs actual DEX code
1202
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001203// Invoke3(reinterpret_cast<size_t>(nullptr), 0U, reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampe525cde22014-04-22 15:44:50 -07001204// reinterpret_cast<uintptr_t>(&art_quick_aput_obj_with_null_and_bound_check), self);
1205//
1206// EXPECT_TRUE(self->IsExceptionPending());
1207// self->ClearException();
1208
1209 // 2.2) Index < 0
1210
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001211 Invoke3(reinterpret_cast<size_t>(array.Get()), static_cast<size_t>(-1),
1212 reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampe29b38412014-08-13 00:15:43 -07001213 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampe525cde22014-04-22 15:44:50 -07001214
1215 EXPECT_TRUE(self->IsExceptionPending());
1216 self->ClearException();
1217
1218 // 2.3) Index > 0
1219
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001220 Invoke3(reinterpret_cast<size_t>(array.Get()), 10U, reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampe29b38412014-08-13 00:15:43 -07001221 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampe525cde22014-04-22 15:44:50 -07001222
1223 EXPECT_TRUE(self->IsExceptionPending());
1224 self->ClearException();
1225
1226 // 3) Failure cases (obj into str[])
1227
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001228 Invoke3(reinterpret_cast<size_t>(array.Get()), 0U, reinterpret_cast<size_t>(obj_obj.Get()),
Andreas Gampe29b38412014-08-13 00:15:43 -07001229 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampe525cde22014-04-22 15:44:50 -07001230
1231 EXPECT_TRUE(self->IsExceptionPending());
1232 self->ClearException();
1233
1234 // Tests done.
1235#else
1236 LOG(INFO) << "Skipping aput_obj as I don't know how to do that on " << kRuntimeISA;
1237 // Force-print to std::cout so it's also outside the logcat.
1238 std::cout << "Skipping aput_obj as I don't know how to do that on " << kRuntimeISA << std::endl;
1239#endif
1240}
1241
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001242TEST_F(StubTest, AllocObject) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001243#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1244 (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe369810a2015-01-14 19:53:31 -08001245 // This will lead to OOM error messages in the log.
1246 ScopedLogSeverity sls(LogSeverity::FATAL);
1247
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001248 // TODO: Check the "Unresolved" allocation stubs
1249
1250 Thread* self = Thread::Current();
1251 // Create an object
1252 ScopedObjectAccess soa(self);
1253 // garbage is created during ClassLinker::Init
1254
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001255 StackHandleScope<2> hs(soa.Self());
1256 Handle<mirror::Class> c(
1257 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "Ljava/lang/Object;")));
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001258
1259 // Play with it...
1260
1261 EXPECT_FALSE(self->IsExceptionPending());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001262 {
1263 // Use an arbitrary method from c to use as referrer
1264 size_t result = Invoke3(static_cast<size_t>(c->GetDexTypeIndex()), // type_idx
Mathieu Chartiere401d142015-04-22 13:56:20 -07001265 // arbitrary
1266 reinterpret_cast<size_t>(c->GetVirtualMethod(0, sizeof(void*))),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001267 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001268 StubTest::GetEntrypoint(self, kQuickAllocObject),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001269 self);
1270
1271 EXPECT_FALSE(self->IsExceptionPending());
1272 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
1273 mirror::Object* obj = reinterpret_cast<mirror::Object*>(result);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001274 EXPECT_EQ(c.Get(), obj->GetClass());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001275 VerifyObject(obj);
1276 }
1277
1278 {
Mathieu Chartier2cebb242015-04-21 16:50:40 -07001279 // We can use null in the second argument as we do not need a method here (not used in
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001280 // resolved/initialized cases)
Mathieu Chartiere401d142015-04-22 13:56:20 -07001281 size_t result = Invoke3(reinterpret_cast<size_t>(c.Get()), 0u, 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001282 StubTest::GetEntrypoint(self, kQuickAllocObjectResolved),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001283 self);
1284
1285 EXPECT_FALSE(self->IsExceptionPending());
1286 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
1287 mirror::Object* obj = reinterpret_cast<mirror::Object*>(result);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001288 EXPECT_EQ(c.Get(), obj->GetClass());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001289 VerifyObject(obj);
1290 }
1291
1292 {
Mathieu Chartier2cebb242015-04-21 16:50:40 -07001293 // We can use null in the second argument as we do not need a method here (not used in
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001294 // resolved/initialized cases)
Mathieu Chartiere401d142015-04-22 13:56:20 -07001295 size_t result = Invoke3(reinterpret_cast<size_t>(c.Get()), 0u, 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001296 StubTest::GetEntrypoint(self, kQuickAllocObjectInitialized),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001297 self);
1298
1299 EXPECT_FALSE(self->IsExceptionPending());
1300 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
1301 mirror::Object* obj = reinterpret_cast<mirror::Object*>(result);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001302 EXPECT_EQ(c.Get(), obj->GetClass());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001303 VerifyObject(obj);
1304 }
1305
1306 // Failure tests.
1307
1308 // Out-of-memory.
1309 {
1310 Runtime::Current()->GetHeap()->SetIdealFootprint(1 * GB);
1311
1312 // Array helps to fill memory faster.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001313 Handle<mirror::Class> ca(
1314 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "[Ljava/lang/Object;")));
1315
1316 // Use arbitrary large amount for now.
1317 static const size_t kMaxHandles = 1000000;
Ian Rogers700a4022014-05-19 16:49:03 -07001318 std::unique_ptr<StackHandleScope<kMaxHandles>> hsp(new StackHandleScope<kMaxHandles>(self));
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001319
1320 std::vector<Handle<mirror::Object>> handles;
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001321 // Start allocating with 128K
1322 size_t length = 128 * KB / 4;
1323 while (length > 10) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001324 Handle<mirror::Object> h(hsp->NewHandle<mirror::Object>(
1325 mirror::ObjectArray<mirror::Object>::Alloc(soa.Self(), ca.Get(), length / 4)));
1326 if (self->IsExceptionPending() || h.Get() == nullptr) {
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001327 self->ClearException();
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001328
1329 // Try a smaller length
1330 length = length / 8;
1331 // Use at most half the reported free space.
1332 size_t mem = Runtime::Current()->GetHeap()->GetFreeMemory();
1333 if (length * 8 > mem) {
1334 length = mem / 8;
1335 }
1336 } else {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001337 handles.push_back(h);
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001338 }
1339 }
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001340 LOG(INFO) << "Used " << handles.size() << " arrays to fill space.";
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001341
1342 // Allocate simple objects till it fails.
1343 while (!self->IsExceptionPending()) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001344 Handle<mirror::Object> h = hsp->NewHandle(c->AllocObject(soa.Self()));
1345 if (!self->IsExceptionPending() && h.Get() != nullptr) {
1346 handles.push_back(h);
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001347 }
1348 }
1349 self->ClearException();
1350
Mathieu Chartiere401d142015-04-22 13:56:20 -07001351 size_t result = Invoke3(reinterpret_cast<size_t>(c.Get()), 0u, 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001352 StubTest::GetEntrypoint(self, kQuickAllocObjectInitialized),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001353 self);
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001354 EXPECT_TRUE(self->IsExceptionPending());
1355 self->ClearException();
1356 EXPECT_EQ(reinterpret_cast<size_t>(nullptr), result);
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001357 }
1358
1359 // Tests done.
1360#else
1361 LOG(INFO) << "Skipping alloc_object as I don't know how to do that on " << kRuntimeISA;
1362 // Force-print to std::cout so it's also outside the logcat.
1363 std::cout << "Skipping alloc_object as I don't know how to do that on " << kRuntimeISA << std::endl;
1364#endif
1365}
1366
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001367TEST_F(StubTest, AllocObjectArray) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001368#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1369 (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001370 // TODO: Check the "Unresolved" allocation stubs
1371
Andreas Gampe369810a2015-01-14 19:53:31 -08001372 // This will lead to OOM error messages in the log.
1373 ScopedLogSeverity sls(LogSeverity::FATAL);
1374
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001375 Thread* self = Thread::Current();
1376 // Create an object
1377 ScopedObjectAccess soa(self);
1378 // garbage is created during ClassLinker::Init
1379
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001380 StackHandleScope<2> hs(self);
1381 Handle<mirror::Class> c(
1382 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "[Ljava/lang/Object;")));
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001383
1384 // Needed to have a linked method.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001385 Handle<mirror::Class> c_obj(
1386 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "Ljava/lang/Object;")));
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001387
1388 // Play with it...
1389
1390 EXPECT_FALSE(self->IsExceptionPending());
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001391
1392 // For some reason this does not work, as the type_idx is artificial and outside what the
1393 // resolved types of c_obj allow...
1394
Ian Rogerscf7f1912014-10-22 22:06:39 -07001395 if ((false)) {
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001396 // Use an arbitrary method from c to use as referrer
1397 size_t result = Invoke3(static_cast<size_t>(c->GetDexTypeIndex()), // type_idx
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001398 10U,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001399 // arbitrary
1400 reinterpret_cast<size_t>(c_obj->GetVirtualMethod(0, sizeof(void*))),
Andreas Gampe29b38412014-08-13 00:15:43 -07001401 StubTest::GetEntrypoint(self, kQuickAllocArray),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001402 self);
1403
1404 EXPECT_FALSE(self->IsExceptionPending());
1405 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
1406 mirror::Array* obj = reinterpret_cast<mirror::Array*>(result);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001407 EXPECT_EQ(c.Get(), obj->GetClass());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001408 VerifyObject(obj);
1409 EXPECT_EQ(obj->GetLength(), 10);
1410 }
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001411
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001412 {
Mathieu Chartier2cebb242015-04-21 16:50:40 -07001413 // We can use null in the second argument as we do not need a method here (not used in
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001414 // resolved/initialized cases)
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08001415 size_t result = Invoke3(reinterpret_cast<size_t>(c.Get()), 10U,
1416 reinterpret_cast<size_t>(nullptr),
Andreas Gampe29b38412014-08-13 00:15:43 -07001417 StubTest::GetEntrypoint(self, kQuickAllocArrayResolved),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001418 self);
Nicolas Geoffray14691c52015-03-05 10:40:17 +00001419 EXPECT_FALSE(self->IsExceptionPending()) << PrettyTypeOf(self->GetException());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001420 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
1421 mirror::Object* obj = reinterpret_cast<mirror::Object*>(result);
1422 EXPECT_TRUE(obj->IsArrayInstance());
1423 EXPECT_TRUE(obj->IsObjectArray());
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001424 EXPECT_EQ(c.Get(), obj->GetClass());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001425 VerifyObject(obj);
1426 mirror::Array* array = reinterpret_cast<mirror::Array*>(result);
1427 EXPECT_EQ(array->GetLength(), 10);
1428 }
1429
1430 // Failure tests.
1431
1432 // Out-of-memory.
1433 {
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08001434 size_t result = Invoke3(reinterpret_cast<size_t>(c.Get()),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001435 GB, // that should fail...
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08001436 reinterpret_cast<size_t>(nullptr),
Andreas Gampe29b38412014-08-13 00:15:43 -07001437 StubTest::GetEntrypoint(self, kQuickAllocArrayResolved),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001438 self);
1439
1440 EXPECT_TRUE(self->IsExceptionPending());
1441 self->ClearException();
1442 EXPECT_EQ(reinterpret_cast<size_t>(nullptr), result);
1443 }
1444
1445 // Tests done.
1446#else
1447 LOG(INFO) << "Skipping alloc_array as I don't know how to do that on " << kRuntimeISA;
1448 // Force-print to std::cout so it's also outside the logcat.
1449 std::cout << "Skipping alloc_array as I don't know how to do that on " << kRuntimeISA << std::endl;
1450#endif
1451}
1452
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001453
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001454TEST_F(StubTest, StringCompareTo) {
Ian Rogersc3ccc102014-06-25 11:52:14 -07001455#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001456 // TODO: Check the "Unresolved" allocation stubs
1457
1458 Thread* self = Thread::Current();
Andreas Gampe29b38412014-08-13 00:15:43 -07001459
1460 const uintptr_t art_quick_string_compareto = StubTest::GetEntrypoint(self, kQuickStringCompareTo);
1461
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001462 ScopedObjectAccess soa(self);
1463 // garbage is created during ClassLinker::Init
1464
1465 // Create some strings
1466 // Use array so we can index into it and use a matrix for expected results
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -07001467 // Setup: The first half is standard. The second half uses a non-zero offset.
1468 // TODO: Shared backing arrays.
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001469 const char* c[] = { "", "", "a", "aa", "ab",
Serban Constantinescu86797a72014-06-19 16:17:56 +01001470 "aacaacaacaacaacaac", // This one's under the default limit to go to __memcmp16.
1471 "aacaacaacaacaacaacaacaacaacaacaacaac", // This one's over.
1472 "aacaacaacaacaacaacaacaacaacaacaacaaca" }; // As is this one. We need a separate one to
1473 // defeat object-equal optimizations.
Jeff Hao848f70a2014-01-15 13:49:50 -08001474 static constexpr size_t kStringCount = arraysize(c);
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001475
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001476 StackHandleScope<kStringCount> hs(self);
1477 Handle<mirror::String> s[kStringCount];
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001478
Jeff Hao848f70a2014-01-15 13:49:50 -08001479 for (size_t i = 0; i < kStringCount; ++i) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001480 s[i] = hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), c[i]));
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001481 }
1482
1483 // TODO: wide characters
1484
1485 // Matrix of expectations. First component is first parameter. Note we only check against the
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -07001486 // sign, not the value. As we are testing random offsets, we need to compute this and need to
1487 // rely on String::CompareTo being correct.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001488 int32_t expected[kStringCount][kStringCount];
1489 for (size_t x = 0; x < kStringCount; ++x) {
1490 for (size_t y = 0; y < kStringCount; ++y) {
1491 expected[x][y] = s[x]->CompareTo(s[y].Get());
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -07001492 }
1493 }
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001494
1495 // Play with it...
1496
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001497 for (size_t x = 0; x < kStringCount; ++x) {
1498 for (size_t y = 0; y < kStringCount; ++y) {
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001499 // Test string_compareto x y
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001500 size_t result = Invoke3(reinterpret_cast<size_t>(s[x].Get()),
1501 reinterpret_cast<size_t>(s[y].Get()), 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001502 art_quick_string_compareto, self);
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001503
1504 EXPECT_FALSE(self->IsExceptionPending());
1505
1506 // The result is a 32b signed integer
1507 union {
1508 size_t r;
1509 int32_t i;
1510 } conv;
1511 conv.r = result;
1512 int32_t e = expected[x][y];
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -07001513 EXPECT_TRUE(e == 0 ? conv.i == 0 : true) << "x=" << c[x] << " y=" << c[y] << " res=" <<
1514 conv.r;
1515 EXPECT_TRUE(e < 0 ? conv.i < 0 : true) << "x=" << c[x] << " y=" << c[y] << " res=" <<
1516 conv.r;
1517 EXPECT_TRUE(e > 0 ? conv.i > 0 : true) << "x=" << c[x] << " y=" << c[y] << " res=" <<
1518 conv.r;
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001519 }
1520 }
1521
Andreas Gampe7177d7c2014-05-02 12:10:02 -07001522 // TODO: Deallocate things.
1523
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001524 // Tests done.
1525#else
1526 LOG(INFO) << "Skipping string_compareto as I don't know how to do that on " << kRuntimeISA;
1527 // Force-print to std::cout so it's also outside the logcat.
1528 std::cout << "Skipping string_compareto as I don't know how to do that on " << kRuntimeISA <<
1529 std::endl;
1530#endif
1531}
1532
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001533
Mathieu Chartierc7853442015-03-27 14:35:38 -07001534static void GetSetBooleanStatic(ArtField* f, Thread* self,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001535 ArtMethod* referrer, StubTest* test)
Mathieu Chartier90443472015-07-16 20:32:27 -07001536 SHARED_REQUIRES(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001537#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1538 (defined(__x86_64__) && !defined(__APPLE__))
Fred Shih37f05ef2014-07-16 18:38:08 -07001539 constexpr size_t num_values = 5;
1540 uint8_t values[num_values] = { 0, 1, 2, 128, 0xFF };
1541
1542 for (size_t i = 0; i < num_values; ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001543 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001544 static_cast<size_t>(values[i]),
1545 0U,
1546 StubTest::GetEntrypoint(self, kQuickSet8Static),
1547 self,
1548 referrer);
1549
Mathieu Chartierc7853442015-03-27 14:35:38 -07001550 size_t res = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001551 0U, 0U,
1552 StubTest::GetEntrypoint(self, kQuickGetBooleanStatic),
1553 self,
1554 referrer);
1555 // Boolean currently stores bools as uint8_t, be more zealous about asserting correct writes/gets.
1556 EXPECT_EQ(values[i], static_cast<uint8_t>(res)) << "Iteration " << i;
1557 }
1558#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001559 UNUSED(f, self, referrer, test);
Fred Shih37f05ef2014-07-16 18:38:08 -07001560 LOG(INFO) << "Skipping set_boolean_static as I don't know how to do that on " << kRuntimeISA;
1561 // Force-print to std::cout so it's also outside the logcat.
1562 std::cout << "Skipping set_boolean_static as I don't know how to do that on " << kRuntimeISA << std::endl;
1563#endif
1564}
Mathieu Chartiere401d142015-04-22 13:56:20 -07001565static void GetSetByteStatic(ArtField* f, Thread* self, ArtMethod* referrer,
Mathieu Chartierc7853442015-03-27 14:35:38 -07001566 StubTest* test)
Mathieu Chartier90443472015-07-16 20:32:27 -07001567 SHARED_REQUIRES(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001568#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1569 (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001570 int8_t values[] = { -128, -64, 0, 64, 127 };
Fred Shih37f05ef2014-07-16 18:38:08 -07001571
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001572 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001573 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001574 static_cast<size_t>(values[i]),
1575 0U,
1576 StubTest::GetEntrypoint(self, kQuickSet8Static),
1577 self,
1578 referrer);
1579
Mathieu Chartierc7853442015-03-27 14:35:38 -07001580 size_t res = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001581 0U, 0U,
1582 StubTest::GetEntrypoint(self, kQuickGetByteStatic),
1583 self,
1584 referrer);
1585 EXPECT_EQ(values[i], static_cast<int8_t>(res)) << "Iteration " << i;
1586 }
1587#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001588 UNUSED(f, self, referrer, test);
Fred Shih37f05ef2014-07-16 18:38:08 -07001589 LOG(INFO) << "Skipping set_byte_static as I don't know how to do that on " << kRuntimeISA;
1590 // Force-print to std::cout so it's also outside the logcat.
1591 std::cout << "Skipping set_byte_static as I don't know how to do that on " << kRuntimeISA << std::endl;
1592#endif
1593}
1594
1595
Mathieu Chartierc7853442015-03-27 14:35:38 -07001596static void GetSetBooleanInstance(Handle<mirror::Object>* obj, ArtField* f, Thread* self,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001597 ArtMethod* referrer, StubTest* test)
Mathieu Chartier90443472015-07-16 20:32:27 -07001598 SHARED_REQUIRES(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001599#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1600 (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001601 uint8_t values[] = { 0, true, 2, 128, 0xFF };
Fred Shih37f05ef2014-07-16 18:38:08 -07001602
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001603 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001604 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001605 reinterpret_cast<size_t>(obj->Get()),
1606 static_cast<size_t>(values[i]),
1607 StubTest::GetEntrypoint(self, kQuickSet8Instance),
1608 self,
1609 referrer);
1610
Mathieu Chartierc7853442015-03-27 14:35:38 -07001611 uint8_t res = f->GetBoolean(obj->Get());
Fred Shih37f05ef2014-07-16 18:38:08 -07001612 EXPECT_EQ(values[i], res) << "Iteration " << i;
1613
Mathieu Chartierc7853442015-03-27 14:35:38 -07001614 f->SetBoolean<false>(obj->Get(), res);
Fred Shih37f05ef2014-07-16 18:38:08 -07001615
Mathieu Chartierc7853442015-03-27 14:35:38 -07001616 size_t res2 = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001617 reinterpret_cast<size_t>(obj->Get()),
1618 0U,
1619 StubTest::GetEntrypoint(self, kQuickGetBooleanInstance),
1620 self,
1621 referrer);
1622 EXPECT_EQ(res, static_cast<uint8_t>(res2));
1623 }
1624#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001625 UNUSED(obj, f, self, referrer, test);
Fred Shih37f05ef2014-07-16 18:38:08 -07001626 LOG(INFO) << "Skipping set_boolean_instance as I don't know how to do that on " << kRuntimeISA;
1627 // Force-print to std::cout so it's also outside the logcat.
1628 std::cout << "Skipping set_boolean_instance as I don't know how to do that on " << kRuntimeISA << std::endl;
1629#endif
1630}
Mathieu Chartierc7853442015-03-27 14:35:38 -07001631static void GetSetByteInstance(Handle<mirror::Object>* obj, ArtField* f,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001632 Thread* self, ArtMethod* referrer, StubTest* test)
Mathieu Chartier90443472015-07-16 20:32:27 -07001633 SHARED_REQUIRES(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001634#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1635 (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001636 int8_t values[] = { -128, -64, 0, 64, 127 };
Fred Shih37f05ef2014-07-16 18:38:08 -07001637
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001638 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001639 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001640 reinterpret_cast<size_t>(obj->Get()),
1641 static_cast<size_t>(values[i]),
1642 StubTest::GetEntrypoint(self, kQuickSet8Instance),
1643 self,
1644 referrer);
1645
Mathieu Chartierc7853442015-03-27 14:35:38 -07001646 int8_t res = f->GetByte(obj->Get());
Fred Shih37f05ef2014-07-16 18:38:08 -07001647 EXPECT_EQ(res, values[i]) << "Iteration " << i;
Mathieu Chartierc7853442015-03-27 14:35:38 -07001648 f->SetByte<false>(obj->Get(), ++res);
Fred Shih37f05ef2014-07-16 18:38:08 -07001649
Mathieu Chartierc7853442015-03-27 14:35:38 -07001650 size_t res2 = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001651 reinterpret_cast<size_t>(obj->Get()),
1652 0U,
1653 StubTest::GetEntrypoint(self, kQuickGetByteInstance),
1654 self,
1655 referrer);
1656 EXPECT_EQ(res, static_cast<int8_t>(res2));
1657 }
1658#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001659 UNUSED(obj, f, self, referrer, test);
Fred Shih37f05ef2014-07-16 18:38:08 -07001660 LOG(INFO) << "Skipping set_byte_instance as I don't know how to do that on " << kRuntimeISA;
1661 // Force-print to std::cout so it's also outside the logcat.
1662 std::cout << "Skipping set_byte_instance as I don't know how to do that on " << kRuntimeISA << std::endl;
1663#endif
1664}
1665
Mathieu Chartiere401d142015-04-22 13:56:20 -07001666static void GetSetCharStatic(ArtField* f, Thread* self, ArtMethod* referrer,
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001667 StubTest* test)
Mathieu Chartier90443472015-07-16 20:32:27 -07001668 SHARED_REQUIRES(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001669#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1670 (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001671 uint16_t values[] = { 0, 1, 2, 255, 32768, 0xFFFF };
Fred Shih37f05ef2014-07-16 18:38:08 -07001672
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001673 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001674 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001675 static_cast<size_t>(values[i]),
1676 0U,
1677 StubTest::GetEntrypoint(self, kQuickSet16Static),
1678 self,
1679 referrer);
1680
Mathieu Chartierc7853442015-03-27 14:35:38 -07001681 size_t res = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001682 0U, 0U,
1683 StubTest::GetEntrypoint(self, kQuickGetCharStatic),
1684 self,
1685 referrer);
1686
1687 EXPECT_EQ(values[i], static_cast<uint16_t>(res)) << "Iteration " << i;
1688 }
1689#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001690 UNUSED(f, self, referrer, test);
Fred Shih37f05ef2014-07-16 18:38:08 -07001691 LOG(INFO) << "Skipping set_char_static as I don't know how to do that on " << kRuntimeISA;
1692 // Force-print to std::cout so it's also outside the logcat.
1693 std::cout << "Skipping set_char_static as I don't know how to do that on " << kRuntimeISA << std::endl;
1694#endif
1695}
Mathieu Chartierc7853442015-03-27 14:35:38 -07001696static void GetSetShortStatic(ArtField* f, Thread* self,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001697 ArtMethod* referrer, StubTest* test)
Mathieu Chartier90443472015-07-16 20:32:27 -07001698 SHARED_REQUIRES(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001699#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1700 (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001701 int16_t values[] = { -0x7FFF, -32768, 0, 255, 32767, 0x7FFE };
Fred Shih37f05ef2014-07-16 18:38:08 -07001702
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001703 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001704 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001705 static_cast<size_t>(values[i]),
1706 0U,
1707 StubTest::GetEntrypoint(self, kQuickSet16Static),
1708 self,
1709 referrer);
1710
Mathieu Chartierc7853442015-03-27 14:35:38 -07001711 size_t res = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001712 0U, 0U,
1713 StubTest::GetEntrypoint(self, kQuickGetShortStatic),
1714 self,
1715 referrer);
1716
1717 EXPECT_EQ(static_cast<int16_t>(res), values[i]) << "Iteration " << i;
1718 }
1719#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001720 UNUSED(f, self, referrer, test);
Fred Shih37f05ef2014-07-16 18:38:08 -07001721 LOG(INFO) << "Skipping set_short_static as I don't know how to do that on " << kRuntimeISA;
1722 // Force-print to std::cout so it's also outside the logcat.
1723 std::cout << "Skipping set_short_static as I don't know how to do that on " << kRuntimeISA << std::endl;
1724#endif
1725}
1726
Mathieu Chartierc7853442015-03-27 14:35:38 -07001727static void GetSetCharInstance(Handle<mirror::Object>* obj, ArtField* f,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001728 Thread* self, ArtMethod* referrer, StubTest* test)
Mathieu Chartier90443472015-07-16 20:32:27 -07001729 SHARED_REQUIRES(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001730#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1731 (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001732 uint16_t values[] = { 0, 1, 2, 255, 32768, 0xFFFF };
Fred Shih37f05ef2014-07-16 18:38:08 -07001733
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001734 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001735 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001736 reinterpret_cast<size_t>(obj->Get()),
1737 static_cast<size_t>(values[i]),
1738 StubTest::GetEntrypoint(self, kQuickSet16Instance),
1739 self,
1740 referrer);
1741
Mathieu Chartierc7853442015-03-27 14:35:38 -07001742 uint16_t res = f->GetChar(obj->Get());
Fred Shih37f05ef2014-07-16 18:38:08 -07001743 EXPECT_EQ(res, values[i]) << "Iteration " << i;
Mathieu Chartierc7853442015-03-27 14:35:38 -07001744 f->SetChar<false>(obj->Get(), ++res);
Fred Shih37f05ef2014-07-16 18:38:08 -07001745
Mathieu Chartierc7853442015-03-27 14:35:38 -07001746 size_t res2 = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001747 reinterpret_cast<size_t>(obj->Get()),
1748 0U,
1749 StubTest::GetEntrypoint(self, kQuickGetCharInstance),
1750 self,
1751 referrer);
1752 EXPECT_EQ(res, static_cast<uint16_t>(res2));
1753 }
1754#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001755 UNUSED(obj, f, self, referrer, test);
Fred Shih37f05ef2014-07-16 18:38:08 -07001756 LOG(INFO) << "Skipping set_char_instance as I don't know how to do that on " << kRuntimeISA;
1757 // Force-print to std::cout so it's also outside the logcat.
1758 std::cout << "Skipping set_char_instance as I don't know how to do that on " << kRuntimeISA << std::endl;
1759#endif
1760}
Mathieu Chartierc7853442015-03-27 14:35:38 -07001761static void GetSetShortInstance(Handle<mirror::Object>* obj, ArtField* f,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001762 Thread* self, ArtMethod* referrer, StubTest* test)
Mathieu Chartier90443472015-07-16 20:32:27 -07001763 SHARED_REQUIRES(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001764#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1765 (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001766 int16_t values[] = { -0x7FFF, -32768, 0, 255, 32767, 0x7FFE };
Fred Shih37f05ef2014-07-16 18:38:08 -07001767
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001768 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001769 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001770 reinterpret_cast<size_t>(obj->Get()),
1771 static_cast<size_t>(values[i]),
1772 StubTest::GetEntrypoint(self, kQuickSet16Instance),
1773 self,
1774 referrer);
1775
Mathieu Chartierc7853442015-03-27 14:35:38 -07001776 int16_t res = f->GetShort(obj->Get());
Fred Shih37f05ef2014-07-16 18:38:08 -07001777 EXPECT_EQ(res, values[i]) << "Iteration " << i;
Mathieu Chartierc7853442015-03-27 14:35:38 -07001778 f->SetShort<false>(obj->Get(), ++res);
Fred Shih37f05ef2014-07-16 18:38:08 -07001779
Mathieu Chartierc7853442015-03-27 14:35:38 -07001780 size_t res2 = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001781 reinterpret_cast<size_t>(obj->Get()),
1782 0U,
1783 StubTest::GetEntrypoint(self, kQuickGetShortInstance),
1784 self,
1785 referrer);
1786 EXPECT_EQ(res, static_cast<int16_t>(res2));
1787 }
1788#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001789 UNUSED(obj, f, self, referrer, test);
Fred Shih37f05ef2014-07-16 18:38:08 -07001790 LOG(INFO) << "Skipping set_short_instance as I don't know how to do that on " << kRuntimeISA;
1791 // Force-print to std::cout so it's also outside the logcat.
1792 std::cout << "Skipping set_short_instance as I don't know how to do that on " << kRuntimeISA << std::endl;
1793#endif
1794}
1795
Mathieu Chartiere401d142015-04-22 13:56:20 -07001796static void GetSet32Static(ArtField* f, Thread* self, ArtMethod* referrer,
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001797 StubTest* test)
Mathieu Chartier90443472015-07-16 20:32:27 -07001798 SHARED_REQUIRES(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001799#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1800 (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001801 uint32_t values[] = { 0, 1, 2, 255, 32768, 1000000, 0xFFFFFFFF };
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001802
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001803 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001804 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001805 static_cast<size_t>(values[i]),
1806 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001807 StubTest::GetEntrypoint(self, kQuickSet32Static),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001808 self,
1809 referrer);
1810
Mathieu Chartierc7853442015-03-27 14:35:38 -07001811 size_t res = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001812 0U, 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001813 StubTest::GetEntrypoint(self, kQuickGet32Static),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001814 self,
1815 referrer);
1816
Goran Jakovljevic04568812015-04-23 15:27:23 +02001817#if defined(__mips__) && defined(__LP64__)
1818 EXPECT_EQ(static_cast<uint32_t>(res), values[i]) << "Iteration " << i;
1819#else
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001820 EXPECT_EQ(res, values[i]) << "Iteration " << i;
Goran Jakovljevic04568812015-04-23 15:27:23 +02001821#endif
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001822 }
1823#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001824 UNUSED(f, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001825 LOG(INFO) << "Skipping set32static as I don't know how to do that on " << kRuntimeISA;
1826 // Force-print to std::cout so it's also outside the logcat.
1827 std::cout << "Skipping set32static as I don't know how to do that on " << kRuntimeISA << std::endl;
1828#endif
1829}
1830
1831
Mathieu Chartierc7853442015-03-27 14:35:38 -07001832static void GetSet32Instance(Handle<mirror::Object>* obj, ArtField* f,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001833 Thread* self, ArtMethod* referrer, StubTest* test)
Mathieu Chartier90443472015-07-16 20:32:27 -07001834 SHARED_REQUIRES(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001835#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1836 (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001837 uint32_t values[] = { 0, 1, 2, 255, 32768, 1000000, 0xFFFFFFFF };
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001838
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001839 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001840 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001841 reinterpret_cast<size_t>(obj->Get()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001842 static_cast<size_t>(values[i]),
Andreas Gampe29b38412014-08-13 00:15:43 -07001843 StubTest::GetEntrypoint(self, kQuickSet32Instance),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001844 self,
1845 referrer);
1846
Mathieu Chartierc7853442015-03-27 14:35:38 -07001847 int32_t res = f->GetInt(obj->Get());
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001848 EXPECT_EQ(res, static_cast<int32_t>(values[i])) << "Iteration " << i;
1849
1850 res++;
Mathieu Chartierc7853442015-03-27 14:35:38 -07001851 f->SetInt<false>(obj->Get(), res);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001852
Mathieu Chartierc7853442015-03-27 14:35:38 -07001853 size_t res2 = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001854 reinterpret_cast<size_t>(obj->Get()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001855 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001856 StubTest::GetEntrypoint(self, kQuickGet32Instance),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001857 self,
1858 referrer);
1859 EXPECT_EQ(res, static_cast<int32_t>(res2));
1860 }
1861#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001862 UNUSED(obj, f, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001863 LOG(INFO) << "Skipping set32instance as I don't know how to do that on " << kRuntimeISA;
1864 // Force-print to std::cout so it's also outside the logcat.
1865 std::cout << "Skipping set32instance as I don't know how to do that on " << kRuntimeISA << std::endl;
1866#endif
1867}
1868
1869
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001870#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1871 (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001872
1873static void set_and_check_static(uint32_t f_idx, mirror::Object* val, Thread* self,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001874 ArtMethod* referrer, StubTest* test)
Mathieu Chartier90443472015-07-16 20:32:27 -07001875 SHARED_REQUIRES(Locks::mutator_lock_) {
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001876 test->Invoke3WithReferrer(static_cast<size_t>(f_idx),
1877 reinterpret_cast<size_t>(val),
1878 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001879 StubTest::GetEntrypoint(self, kQuickSetObjStatic),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001880 self,
1881 referrer);
1882
1883 size_t res = test->Invoke3WithReferrer(static_cast<size_t>(f_idx),
1884 0U, 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001885 StubTest::GetEntrypoint(self, kQuickGetObjStatic),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001886 self,
1887 referrer);
1888
1889 EXPECT_EQ(res, reinterpret_cast<size_t>(val)) << "Value " << val;
1890}
1891#endif
1892
Mathieu Chartiere401d142015-04-22 13:56:20 -07001893static void GetSetObjStatic(ArtField* f, Thread* self, ArtMethod* referrer,
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001894 StubTest* test)
Mathieu Chartier90443472015-07-16 20:32:27 -07001895 SHARED_REQUIRES(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001896#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1897 (defined(__x86_64__) && !defined(__APPLE__))
Mathieu Chartierc7853442015-03-27 14:35:38 -07001898 set_and_check_static(f->GetDexFieldIndex(), nullptr, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001899
1900 // Allocate a string object for simplicity.
1901 mirror::String* str = mirror::String::AllocFromModifiedUtf8(self, "Test");
Mathieu Chartierc7853442015-03-27 14:35:38 -07001902 set_and_check_static(f->GetDexFieldIndex(), str, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001903
Mathieu Chartierc7853442015-03-27 14:35:38 -07001904 set_and_check_static(f->GetDexFieldIndex(), nullptr, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001905#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001906 UNUSED(f, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001907 LOG(INFO) << "Skipping setObjstatic as I don't know how to do that on " << kRuntimeISA;
1908 // Force-print to std::cout so it's also outside the logcat.
1909 std::cout << "Skipping setObjstatic as I don't know how to do that on " << kRuntimeISA << std::endl;
1910#endif
1911}
1912
1913
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001914#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1915 (defined(__x86_64__) && !defined(__APPLE__))
Mathieu Chartierc7853442015-03-27 14:35:38 -07001916static void set_and_check_instance(ArtField* f, mirror::Object* trg,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001917 mirror::Object* val, Thread* self, ArtMethod* referrer,
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001918 StubTest* test)
Mathieu Chartier90443472015-07-16 20:32:27 -07001919 SHARED_REQUIRES(Locks::mutator_lock_) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001920 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001921 reinterpret_cast<size_t>(trg),
1922 reinterpret_cast<size_t>(val),
Andreas Gampe29b38412014-08-13 00:15:43 -07001923 StubTest::GetEntrypoint(self, kQuickSetObjInstance),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001924 self,
1925 referrer);
1926
Mathieu Chartierc7853442015-03-27 14:35:38 -07001927 size_t res = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001928 reinterpret_cast<size_t>(trg),
1929 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001930 StubTest::GetEntrypoint(self, kQuickGetObjInstance),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001931 self,
1932 referrer);
1933
1934 EXPECT_EQ(res, reinterpret_cast<size_t>(val)) << "Value " << val;
1935
Mathieu Chartierc7853442015-03-27 14:35:38 -07001936 EXPECT_EQ(val, f->GetObj(trg));
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001937}
1938#endif
1939
Mathieu Chartierc7853442015-03-27 14:35:38 -07001940static void GetSetObjInstance(Handle<mirror::Object>* obj, ArtField* f,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001941 Thread* self, ArtMethod* referrer, StubTest* test)
Mathieu Chartier90443472015-07-16 20:32:27 -07001942 SHARED_REQUIRES(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001943#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1944 (defined(__x86_64__) && !defined(__APPLE__))
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001945 set_and_check_instance(f, obj->Get(), nullptr, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001946
1947 // Allocate a string object for simplicity.
1948 mirror::String* str = mirror::String::AllocFromModifiedUtf8(self, "Test");
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001949 set_and_check_instance(f, obj->Get(), str, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001950
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001951 set_and_check_instance(f, obj->Get(), nullptr, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001952#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001953 UNUSED(obj, f, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001954 LOG(INFO) << "Skipping setObjinstance as I don't know how to do that on " << kRuntimeISA;
1955 // Force-print to std::cout so it's also outside the logcat.
1956 std::cout << "Skipping setObjinstance as I don't know how to do that on " << kRuntimeISA << std::endl;
1957#endif
1958}
1959
1960
Calin Juravle872ab3f2015-10-02 07:27:51 +01001961// TODO: Complete these tests for 32b architectures
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001962
Mathieu Chartiere401d142015-04-22 13:56:20 -07001963static void GetSet64Static(ArtField* f, Thread* self, ArtMethod* referrer,
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001964 StubTest* test)
Mathieu Chartier90443472015-07-16 20:32:27 -07001965 SHARED_REQUIRES(Locks::mutator_lock_) {
Calin Juravle6e399ac2015-10-02 23:56:06 +01001966#if (defined(__x86_64__) && !defined(__APPLE__)) || (defined(__mips__) && defined(__LP64__)) \
1967 || defined(__aarch64__)
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001968 uint64_t values[] = { 0, 1, 2, 255, 32768, 1000000, 0xFFFFFFFF, 0xFFFFFFFFFFFF };
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001969
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001970 for (size_t i = 0; i < arraysize(values); ++i) {
Calin Juravle6e399ac2015-10-02 23:56:06 +01001971 // 64 bit FieldSet stores the set value in the second register.
1972 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Calin Juravle872ab3f2015-10-02 07:27:51 +01001973 0U,
Calin Juravle6e399ac2015-10-02 23:56:06 +01001974 values[i],
Calin Juravle872ab3f2015-10-02 07:27:51 +01001975 StubTest::GetEntrypoint(self, kQuickSet64Static),
1976 self,
1977 referrer);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001978
Mathieu Chartierc7853442015-03-27 14:35:38 -07001979 size_t res = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001980 0U, 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001981 StubTest::GetEntrypoint(self, kQuickGet64Static),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001982 self,
1983 referrer);
1984
1985 EXPECT_EQ(res, values[i]) << "Iteration " << i;
1986 }
1987#else
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001988 UNUSED(f, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001989 LOG(INFO) << "Skipping set64static as I don't know how to do that on " << kRuntimeISA;
1990 // Force-print to std::cout so it's also outside the logcat.
1991 std::cout << "Skipping set64static as I don't know how to do that on " << kRuntimeISA << std::endl;
1992#endif
1993}
1994
1995
Mathieu Chartierc7853442015-03-27 14:35:38 -07001996static void GetSet64Instance(Handle<mirror::Object>* obj, ArtField* f,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001997 Thread* self, ArtMethod* referrer, StubTest* test)
Mathieu Chartier90443472015-07-16 20:32:27 -07001998 SHARED_REQUIRES(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001999#if (defined(__x86_64__) && !defined(__APPLE__)) || (defined(__mips__) && defined(__LP64__)) || \
2000 defined(__aarch64__)
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07002001 uint64_t values[] = { 0, 1, 2, 255, 32768, 1000000, 0xFFFFFFFF, 0xFFFFFFFFFFFF };
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07002002
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07002003 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07002004 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07002005 reinterpret_cast<size_t>(obj->Get()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07002006 static_cast<size_t>(values[i]),
Andreas Gampe29b38412014-08-13 00:15:43 -07002007 StubTest::GetEntrypoint(self, kQuickSet64Instance),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07002008 self,
2009 referrer);
2010
Mathieu Chartierc7853442015-03-27 14:35:38 -07002011 int64_t res = f->GetLong(obj->Get());
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07002012 EXPECT_EQ(res, static_cast<int64_t>(values[i])) << "Iteration " << i;
2013
2014 res++;
Mathieu Chartierc7853442015-03-27 14:35:38 -07002015 f->SetLong<false>(obj->Get(), res);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07002016
Mathieu Chartierc7853442015-03-27 14:35:38 -07002017 size_t res2 = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07002018 reinterpret_cast<size_t>(obj->Get()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07002019 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07002020 StubTest::GetEntrypoint(self, kQuickGet64Instance),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07002021 self,
2022 referrer);
2023 EXPECT_EQ(res, static_cast<int64_t>(res2));
2024 }
2025#else
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07002026 UNUSED(obj, f, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07002027 LOG(INFO) << "Skipping set64instance as I don't know how to do that on " << kRuntimeISA;
2028 // Force-print to std::cout so it's also outside the logcat.
2029 std::cout << "Skipping set64instance as I don't know how to do that on " << kRuntimeISA << std::endl;
2030#endif
2031}
2032
2033static void TestFields(Thread* self, StubTest* test, Primitive::Type test_type) {
2034 // garbage is created during ClassLinker::Init
2035
2036 JNIEnv* env = Thread::Current()->GetJniEnv();
2037 jclass jc = env->FindClass("AllFields");
Mathieu Chartier2cebb242015-04-21 16:50:40 -07002038 CHECK(jc != nullptr);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07002039 jobject o = env->AllocObject(jc);
Mathieu Chartier2cebb242015-04-21 16:50:40 -07002040 CHECK(o != nullptr);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07002041
2042 ScopedObjectAccess soa(self);
Mathieu Chartiere401d142015-04-22 13:56:20 -07002043 StackHandleScope<3> hs(self);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07002044 Handle<mirror::Object> obj(hs.NewHandle(soa.Decode<mirror::Object*>(o)));
2045 Handle<mirror::Class> c(hs.NewHandle(obj->GetClass()));
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07002046 // Need a method as a referrer
Mathieu Chartiere401d142015-04-22 13:56:20 -07002047 ArtMethod* m = c->GetDirectMethod(0, sizeof(void*));
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07002048
2049 // Play with it...
2050
2051 // Static fields.
Mathieu Chartier54d220e2015-07-30 16:20:06 -07002052 for (ArtField& f : c->GetSFields()) {
2053 Primitive::Type type = f.GetTypeAsPrimitiveType();
Mathieu Chartierc7853442015-03-27 14:35:38 -07002054 if (test_type != type) {
2055 continue;
2056 }
2057 switch (type) {
2058 case Primitive::Type::kPrimBoolean:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07002059 GetSetBooleanStatic(&f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07002060 break;
2061 case Primitive::Type::kPrimByte:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07002062 GetSetByteStatic(&f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07002063 break;
2064 case Primitive::Type::kPrimChar:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07002065 GetSetCharStatic(&f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07002066 break;
2067 case Primitive::Type::kPrimShort:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07002068 GetSetShortStatic(&f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07002069 break;
2070 case Primitive::Type::kPrimInt:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07002071 GetSet32Static(&f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07002072 break;
2073 case Primitive::Type::kPrimLong:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07002074 GetSet64Static(&f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07002075 break;
2076 case Primitive::Type::kPrimNot:
2077 // Don't try array.
Mathieu Chartier54d220e2015-07-30 16:20:06 -07002078 if (f.GetTypeDescriptor()[0] != '[') {
2079 GetSetObjStatic(&f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07002080 }
2081 break;
2082 default:
2083 break; // Skip.
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07002084 }
2085 }
2086
2087 // Instance fields.
Mathieu Chartier54d220e2015-07-30 16:20:06 -07002088 for (ArtField& f : c->GetIFields()) {
2089 Primitive::Type type = f.GetTypeAsPrimitiveType();
Mathieu Chartierc7853442015-03-27 14:35:38 -07002090 if (test_type != type) {
2091 continue;
2092 }
2093 switch (type) {
2094 case Primitive::Type::kPrimBoolean:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07002095 GetSetBooleanInstance(&obj, &f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07002096 break;
2097 case Primitive::Type::kPrimByte:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07002098 GetSetByteInstance(&obj, &f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07002099 break;
2100 case Primitive::Type::kPrimChar:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07002101 GetSetCharInstance(&obj, &f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07002102 break;
2103 case Primitive::Type::kPrimShort:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07002104 GetSetShortInstance(&obj, &f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07002105 break;
2106 case Primitive::Type::kPrimInt:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07002107 GetSet32Instance(&obj, &f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07002108 break;
2109 case Primitive::Type::kPrimLong:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07002110 GetSet64Instance(&obj, &f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07002111 break;
2112 case Primitive::Type::kPrimNot:
2113 // Don't try array.
Mathieu Chartier54d220e2015-07-30 16:20:06 -07002114 if (f.GetTypeDescriptor()[0] != '[') {
2115 GetSetObjInstance(&obj, &f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07002116 }
2117 break;
2118 default:
2119 break; // Skip.
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07002120 }
2121 }
2122
2123 // TODO: Deallocate things.
2124}
2125
Fred Shih37f05ef2014-07-16 18:38:08 -07002126TEST_F(StubTest, Fields8) {
Fred Shih37f05ef2014-07-16 18:38:08 -07002127 Thread* self = Thread::Current();
2128
2129 self->TransitionFromSuspendedToRunnable();
2130 LoadDex("AllFields");
2131 bool started = runtime_->Start();
2132 CHECK(started);
2133
2134 TestFields(self, this, Primitive::Type::kPrimBoolean);
2135 TestFields(self, this, Primitive::Type::kPrimByte);
2136}
2137
2138TEST_F(StubTest, Fields16) {
Fred Shih37f05ef2014-07-16 18:38:08 -07002139 Thread* self = Thread::Current();
2140
2141 self->TransitionFromSuspendedToRunnable();
2142 LoadDex("AllFields");
2143 bool started = runtime_->Start();
2144 CHECK(started);
2145
2146 TestFields(self, this, Primitive::Type::kPrimChar);
2147 TestFields(self, this, Primitive::Type::kPrimShort);
2148}
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07002149
2150TEST_F(StubTest, Fields32) {
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07002151 Thread* self = Thread::Current();
2152
2153 self->TransitionFromSuspendedToRunnable();
2154 LoadDex("AllFields");
2155 bool started = runtime_->Start();
2156 CHECK(started);
2157
2158 TestFields(self, this, Primitive::Type::kPrimInt);
2159}
2160
2161TEST_F(StubTest, FieldsObj) {
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07002162 Thread* self = Thread::Current();
2163
2164 self->TransitionFromSuspendedToRunnable();
2165 LoadDex("AllFields");
2166 bool started = runtime_->Start();
2167 CHECK(started);
2168
2169 TestFields(self, this, Primitive::Type::kPrimNot);
2170}
2171
2172TEST_F(StubTest, Fields64) {
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07002173 Thread* self = Thread::Current();
2174
2175 self->TransitionFromSuspendedToRunnable();
2176 LoadDex("AllFields");
2177 bool started = runtime_->Start();
2178 CHECK(started);
2179
2180 TestFields(self, this, Primitive::Type::kPrimLong);
2181}
2182
Andreas Gampe51f76352014-05-21 08:28:48 -07002183TEST_F(StubTest, IMT) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02002184#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
2185 (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe51f76352014-05-21 08:28:48 -07002186 Thread* self = Thread::Current();
2187
2188 ScopedObjectAccess soa(self);
2189 StackHandleScope<7> hs(self);
2190
2191 JNIEnv* env = Thread::Current()->GetJniEnv();
2192
2193 // ArrayList
2194
2195 // Load ArrayList and used methods (JNI).
2196 jclass arraylist_jclass = env->FindClass("java/util/ArrayList");
2197 ASSERT_NE(nullptr, arraylist_jclass);
2198 jmethodID arraylist_constructor = env->GetMethodID(arraylist_jclass, "<init>", "()V");
2199 ASSERT_NE(nullptr, arraylist_constructor);
Mathieu Chartiere401d142015-04-22 13:56:20 -07002200 jmethodID contains_jmethod = env->GetMethodID(
2201 arraylist_jclass, "contains", "(Ljava/lang/Object;)Z");
Andreas Gampe51f76352014-05-21 08:28:48 -07002202 ASSERT_NE(nullptr, contains_jmethod);
2203 jmethodID add_jmethod = env->GetMethodID(arraylist_jclass, "add", "(Ljava/lang/Object;)Z");
2204 ASSERT_NE(nullptr, add_jmethod);
2205
Mathieu Chartiere401d142015-04-22 13:56:20 -07002206 // Get representation.
2207 ArtMethod* contains_amethod = soa.DecodeMethod(contains_jmethod);
Andreas Gampe51f76352014-05-21 08:28:48 -07002208
2209 // Patch up ArrayList.contains.
Mathieu Chartiere401d142015-04-22 13:56:20 -07002210 if (contains_amethod->GetEntryPointFromQuickCompiledCode() == nullptr) {
2211 contains_amethod->SetEntryPointFromQuickCompiledCode(reinterpret_cast<void*>(
Andreas Gampe29b38412014-08-13 00:15:43 -07002212 StubTest::GetEntrypoint(self, kQuickQuickToInterpreterBridge)));
Andreas Gampe51f76352014-05-21 08:28:48 -07002213 }
2214
2215 // List
2216
2217 // Load List and used methods (JNI).
2218 jclass list_jclass = env->FindClass("java/util/List");
2219 ASSERT_NE(nullptr, list_jclass);
Mathieu Chartiere401d142015-04-22 13:56:20 -07002220 jmethodID inf_contains_jmethod = env->GetMethodID(
2221 list_jclass, "contains", "(Ljava/lang/Object;)Z");
Andreas Gampe51f76352014-05-21 08:28:48 -07002222 ASSERT_NE(nullptr, inf_contains_jmethod);
2223
2224 // Get mirror representation.
Mathieu Chartiere401d142015-04-22 13:56:20 -07002225 ArtMethod* inf_contains = soa.DecodeMethod(inf_contains_jmethod);
Andreas Gampe51f76352014-05-21 08:28:48 -07002226
2227 // Object
2228
2229 jclass obj_jclass = env->FindClass("java/lang/Object");
2230 ASSERT_NE(nullptr, obj_jclass);
2231 jmethodID obj_constructor = env->GetMethodID(obj_jclass, "<init>", "()V");
2232 ASSERT_NE(nullptr, obj_constructor);
2233
Andreas Gampe51f76352014-05-21 08:28:48 -07002234 // Create instances.
2235
2236 jobject jarray_list = env->NewObject(arraylist_jclass, arraylist_constructor);
2237 ASSERT_NE(nullptr, jarray_list);
2238 Handle<mirror::Object> array_list(hs.NewHandle(soa.Decode<mirror::Object*>(jarray_list)));
2239
2240 jobject jobj = env->NewObject(obj_jclass, obj_constructor);
2241 ASSERT_NE(nullptr, jobj);
2242 Handle<mirror::Object> obj(hs.NewHandle(soa.Decode<mirror::Object*>(jobj)));
2243
Andreas Gampe1a7e2922014-05-21 15:37:53 -07002244 // Invocation tests.
2245
2246 // 1. imt_conflict
2247
2248 // Contains.
Andreas Gampe51f76352014-05-21 08:28:48 -07002249
2250 size_t result =
2251 Invoke3WithReferrerAndHidden(0U, reinterpret_cast<size_t>(array_list.Get()),
2252 reinterpret_cast<size_t>(obj.Get()),
Andreas Gampe29b38412014-08-13 00:15:43 -07002253 StubTest::GetEntrypoint(self, kQuickQuickImtConflictTrampoline),
Mathieu Chartiere401d142015-04-22 13:56:20 -07002254 self, contains_amethod,
2255 static_cast<size_t>(inf_contains->GetDexMethodIndex()));
Andreas Gampe51f76352014-05-21 08:28:48 -07002256
2257 ASSERT_FALSE(self->IsExceptionPending());
2258 EXPECT_EQ(static_cast<size_t>(JNI_FALSE), result);
2259
2260 // Add object.
2261
2262 env->CallBooleanMethod(jarray_list, add_jmethod, jobj);
2263
Nicolas Geoffray14691c52015-03-05 10:40:17 +00002264 ASSERT_FALSE(self->IsExceptionPending()) << PrettyTypeOf(self->GetException());
Andreas Gampe51f76352014-05-21 08:28:48 -07002265
Andreas Gampe1a7e2922014-05-21 15:37:53 -07002266 // Contains.
Andreas Gampe51f76352014-05-21 08:28:48 -07002267
Mathieu Chartiere401d142015-04-22 13:56:20 -07002268 result = Invoke3WithReferrerAndHidden(
2269 0U, reinterpret_cast<size_t>(array_list.Get()), reinterpret_cast<size_t>(obj.Get()),
2270 StubTest::GetEntrypoint(self, kQuickQuickImtConflictTrampoline), self, contains_amethod,
2271 static_cast<size_t>(inf_contains->GetDexMethodIndex()));
Andreas Gampe51f76352014-05-21 08:28:48 -07002272
2273 ASSERT_FALSE(self->IsExceptionPending());
2274 EXPECT_EQ(static_cast<size_t>(JNI_TRUE), result);
Andreas Gampe1a7e2922014-05-21 15:37:53 -07002275
2276 // 2. regular interface trampoline
2277
Mathieu Chartiere401d142015-04-22 13:56:20 -07002278 result = Invoke3WithReferrer(static_cast<size_t>(inf_contains->GetDexMethodIndex()),
Andreas Gampe1a7e2922014-05-21 15:37:53 -07002279 reinterpret_cast<size_t>(array_list.Get()),
2280 reinterpret_cast<size_t>(obj.Get()),
2281 StubTest::GetEntrypoint(self,
2282 kQuickInvokeInterfaceTrampolineWithAccessCheck),
Mathieu Chartiere401d142015-04-22 13:56:20 -07002283 self, contains_amethod);
Andreas Gampe1a7e2922014-05-21 15:37:53 -07002284
2285 ASSERT_FALSE(self->IsExceptionPending());
2286 EXPECT_EQ(static_cast<size_t>(JNI_TRUE), result);
2287
Mathieu Chartiere401d142015-04-22 13:56:20 -07002288 result = Invoke3WithReferrer(
2289 static_cast<size_t>(inf_contains->GetDexMethodIndex()),
2290 reinterpret_cast<size_t>(array_list.Get()), reinterpret_cast<size_t>(array_list.Get()),
2291 StubTest::GetEntrypoint(self, kQuickInvokeInterfaceTrampolineWithAccessCheck), self,
2292 contains_amethod);
Andreas Gampe1a7e2922014-05-21 15:37:53 -07002293
2294 ASSERT_FALSE(self->IsExceptionPending());
2295 EXPECT_EQ(static_cast<size_t>(JNI_FALSE), result);
Andreas Gampe51f76352014-05-21 08:28:48 -07002296#else
Andreas Gampe6aac3552014-06-09 14:55:53 -07002297 LOG(INFO) << "Skipping imt as I don't know how to do that on " << kRuntimeISA;
Andreas Gampe51f76352014-05-21 08:28:48 -07002298 // Force-print to std::cout so it's also outside the logcat.
Andreas Gampe6aac3552014-06-09 14:55:53 -07002299 std::cout << "Skipping imt as I don't know how to do that on " << kRuntimeISA << std::endl;
2300#endif
2301}
2302
Andreas Gampe6aac3552014-06-09 14:55:53 -07002303TEST_F(StubTest, StringIndexOf) {
2304#if defined(__arm__) || defined(__aarch64__)
2305 Thread* self = Thread::Current();
2306 ScopedObjectAccess soa(self);
2307 // garbage is created during ClassLinker::Init
2308
2309 // Create some strings
2310 // Use array so we can index into it and use a matrix for expected results
2311 // Setup: The first half is standard. The second half uses a non-zero offset.
2312 // TODO: Shared backing arrays.
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07002313 const char* c_str[] = { "", "a", "ba", "cba", "dcba", "edcba", "asdfghjkl" };
2314 static constexpr size_t kStringCount = arraysize(c_str);
2315 const char c_char[] = { 'a', 'b', 'c', 'd', 'e' };
2316 static constexpr size_t kCharCount = arraysize(c_char);
Andreas Gampe6aac3552014-06-09 14:55:53 -07002317
2318 StackHandleScope<kStringCount> hs(self);
2319 Handle<mirror::String> s[kStringCount];
2320
2321 for (size_t i = 0; i < kStringCount; ++i) {
2322 s[i] = hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), c_str[i]));
2323 }
2324
2325 // Matrix of expectations. First component is first parameter. Note we only check against the
2326 // sign, not the value. As we are testing random offsets, we need to compute this and need to
2327 // rely on String::CompareTo being correct.
2328 static constexpr size_t kMaxLen = 9;
2329 DCHECK_LE(strlen(c_str[kStringCount-1]), kMaxLen) << "Please fix the indexof test.";
2330
2331 // Last dimension: start, offset by 1.
2332 int32_t expected[kStringCount][kCharCount][kMaxLen + 3];
2333 for (size_t x = 0; x < kStringCount; ++x) {
2334 for (size_t y = 0; y < kCharCount; ++y) {
2335 for (size_t z = 0; z <= kMaxLen + 2; ++z) {
2336 expected[x][y][z] = s[x]->FastIndexOf(c_char[y], static_cast<int32_t>(z) - 1);
2337 }
2338 }
2339 }
2340
2341 // Play with it...
2342
2343 for (size_t x = 0; x < kStringCount; ++x) {
2344 for (size_t y = 0; y < kCharCount; ++y) {
2345 for (size_t z = 0; z <= kMaxLen + 2; ++z) {
2346 int32_t start = static_cast<int32_t>(z) - 1;
2347
2348 // Test string_compareto x y
2349 size_t result = Invoke3(reinterpret_cast<size_t>(s[x].Get()), c_char[y], start,
Andreas Gampe29b38412014-08-13 00:15:43 -07002350 StubTest::GetEntrypoint(self, kQuickIndexOf), self);
Andreas Gampe6aac3552014-06-09 14:55:53 -07002351
2352 EXPECT_FALSE(self->IsExceptionPending());
2353
2354 // The result is a 32b signed integer
2355 union {
2356 size_t r;
2357 int32_t i;
2358 } conv;
2359 conv.r = result;
2360
2361 EXPECT_EQ(expected[x][y][z], conv.i) << "Wrong result for " << c_str[x] << " / " <<
2362 c_char[y] << " @ " << start;
2363 }
2364 }
2365 }
2366
2367 // TODO: Deallocate things.
2368
2369 // Tests done.
2370#else
2371 LOG(INFO) << "Skipping indexof as I don't know how to do that on " << kRuntimeISA;
2372 // Force-print to std::cout so it's also outside the logcat.
2373 std::cout << "Skipping indexof as I don't know how to do that on " << kRuntimeISA << std::endl;
Andreas Gampe51f76352014-05-21 08:28:48 -07002374#endif
2375}
2376
Man Cao1aee9002015-07-14 22:31:42 -07002377TEST_F(StubTest, ReadBarrier) {
2378#if defined(ART_USE_READ_BARRIER) && (defined(__i386__) || defined(__arm__) || \
2379 defined(__aarch64__) || defined(__mips__) || (defined(__x86_64__) && !defined(__APPLE__)))
2380 Thread* self = Thread::Current();
2381
2382 const uintptr_t readBarrierSlow = StubTest::GetEntrypoint(self, kQuickReadBarrierSlow);
2383
2384 // Create an object
2385 ScopedObjectAccess soa(self);
2386 // garbage is created during ClassLinker::Init
2387
2388 StackHandleScope<2> hs(soa.Self());
2389 Handle<mirror::Class> c(
2390 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "Ljava/lang/Object;")));
2391
2392 // Build an object instance
2393 Handle<mirror::Object> obj(hs.NewHandle(c->AllocObject(soa.Self())));
2394
2395 EXPECT_FALSE(self->IsExceptionPending());
2396
2397 size_t result = Invoke3(0U, reinterpret_cast<size_t>(obj.Get()),
2398 mirror::Object::ClassOffset().SizeValue(), readBarrierSlow, self);
2399
2400 EXPECT_FALSE(self->IsExceptionPending());
2401 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
2402 mirror::Class* klass = reinterpret_cast<mirror::Class*>(result);
2403 EXPECT_EQ(klass, obj->GetClass());
2404
2405 // Tests done.
2406#else
2407 LOG(INFO) << "Skipping read_barrier_slow";
2408 // Force-print to std::cout so it's also outside the logcat.
2409 std::cout << "Skipping read_barrier_slow" << std::endl;
2410#endif
2411}
2412
Andreas Gampe525cde22014-04-22 15:44:50 -07002413} // namespace art