blob: 016c664b0c10335f3cf91fc238c691ace3b12553 [file] [log] [blame]
Andreas Gampe525cde22014-04-22 15:44:50 -07001/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
Ian Rogerse63db272014-07-15 15:36:11 -070017#include <cstdio>
18
Mathieu Chartierc7853442015-03-27 14:35:38 -070019#include "art_field-inl.h"
Mathieu Chartiere401d142015-04-22 13:56:20 -070020#include "art_method-inl.h"
Vladimir Marko3481ba22015-04-13 12:22:36 +010021#include "class_linker-inl.h"
Andreas Gampe525cde22014-04-22 15:44:50 -070022#include "common_runtime_test.h"
Andreas Gampe29b38412014-08-13 00:15:43 -070023#include "entrypoints/quick/quick_entrypoints_enum.h"
Andreas Gampe51f76352014-05-21 08:28:48 -070024#include "mirror/class-inl.h"
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -070025#include "mirror/string-inl.h"
Ian Rogerse63db272014-07-15 15:36:11 -070026#include "scoped_thread_state_change.h"
Andreas Gampe525cde22014-04-22 15:44:50 -070027
28namespace art {
29
30
31class StubTest : public CommonRuntimeTest {
32 protected:
33 // We need callee-save methods set up in the Runtime for exceptions.
34 void SetUp() OVERRIDE {
35 // Do the normal setup.
36 CommonRuntimeTest::SetUp();
37
38 {
39 // Create callee-save methods
40 ScopedObjectAccess soa(Thread::Current());
Vladimir Marko7624d252014-05-02 14:40:15 +010041 runtime_->SetInstructionSet(kRuntimeISA);
Andreas Gampe525cde22014-04-22 15:44:50 -070042 for (int i = 0; i < Runtime::kLastCalleeSaveType; i++) {
43 Runtime::CalleeSaveType type = Runtime::CalleeSaveType(i);
44 if (!runtime_->HasCalleeSaveMethod(type)) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -070045 runtime_->SetCalleeSaveMethod(runtime_->CreateCalleeSaveMethod(), type);
Andreas Gampe525cde22014-04-22 15:44:50 -070046 }
47 }
48 }
49 }
50
Ian Rogerse63db272014-07-15 15:36:11 -070051 void SetUpRuntimeOptions(RuntimeOptions *options) OVERRIDE {
Andreas Gampe00c1e6d2014-04-25 15:47:13 -070052 // Use a smaller heap
53 for (std::pair<std::string, const void*>& pair : *options) {
54 if (pair.first.find("-Xmx") == 0) {
55 pair.first = "-Xmx4M"; // Smallest we can go.
56 }
57 }
Andreas Gampe51f76352014-05-21 08:28:48 -070058 options->push_back(std::make_pair("-Xint", nullptr));
Andreas Gampe00c1e6d2014-04-25 15:47:13 -070059 }
Andreas Gampe525cde22014-04-22 15:44:50 -070060
Mathieu Chartier119c6bd2014-05-09 14:11:47 -070061 // Helper function needed since TEST_F makes a new class.
62 Thread::tls_ptr_sized_values* GetTlsPtr(Thread* self) {
63 return &self->tlsPtr_;
64 }
65
Andreas Gampe4fc046e2014-05-06 16:56:39 -070066 public:
Andreas Gampe525cde22014-04-22 15:44:50 -070067 size_t Invoke3(size_t arg0, size_t arg1, size_t arg2, uintptr_t code, Thread* self) {
Andreas Gampe6cf80102014-05-19 11:32:41 -070068 return Invoke3WithReferrer(arg0, arg1, arg2, code, self, nullptr);
Andreas Gampe525cde22014-04-22 15:44:50 -070069 }
Andreas Gampe6e4e59c2014-05-05 20:11:02 -070070
Andreas Gampe6e4e59c2014-05-05 20:11:02 -070071 // TODO: Set up a frame according to referrer's specs.
72 size_t Invoke3WithReferrer(size_t arg0, size_t arg1, size_t arg2, uintptr_t code, Thread* self,
Mathieu Chartiere401d142015-04-22 13:56:20 -070073 ArtMethod* referrer) {
Andreas Gampe6e4e59c2014-05-05 20:11:02 -070074 // Push a transition back into managed code onto the linked list in thread.
75 ManagedStack fragment;
76 self->PushManagedStackFragment(&fragment);
77
78 size_t result;
Andreas Gampe6cf80102014-05-19 11:32:41 -070079 size_t fpr_result = 0;
Andreas Gampe6e4e59c2014-05-05 20:11:02 -070080#if defined(__i386__)
81 // TODO: Set the thread?
82 __asm__ __volatile__(
Ian Rogersc5f17732014-06-05 20:48:42 -070083 "subl $12, %%esp\n\t" // Align stack.
84 "pushl %[referrer]\n\t" // Store referrer.
Andreas Gampe6e4e59c2014-05-05 20:11:02 -070085 "call *%%edi\n\t" // Call the stub
Ian Rogersc5f17732014-06-05 20:48:42 -070086 "addl $16, %%esp" // Pop referrer
Andreas Gampe6e4e59c2014-05-05 20:11:02 -070087 : "=a" (result)
88 // Use the result from eax
Andreas Gampe2f6e3512014-06-07 01:32:33 -070089 : "a"(arg0), "c"(arg1), "d"(arg2), "D"(code), [referrer]"r"(referrer)
90 // This places code into edi, arg0 into eax, arg1 into ecx, and arg2 into edx
91 : "memory"); // clobber.
Andreas Gampe6e4e59c2014-05-05 20:11:02 -070092 // TODO: Should we clobber the other registers? EBX gets clobbered by some of the stubs,
93 // but compilation fails when declaring that.
94#elif defined(__arm__)
95 __asm__ __volatile__(
96 "push {r1-r12, lr}\n\t" // Save state, 13*4B = 52B
97 ".cfi_adjust_cfa_offset 52\n\t"
98 "push {r9}\n\t"
99 ".cfi_adjust_cfa_offset 4\n\t"
100 "mov r9, %[referrer]\n\n"
101 "str r9, [sp, #-8]!\n\t" // Push referrer, +8B padding so 16B aligned
102 ".cfi_adjust_cfa_offset 8\n\t"
103 "ldr r9, [sp, #8]\n\t"
104
105 // Push everything on the stack, so we don't rely on the order. What a mess. :-(
106 "sub sp, sp, #20\n\t"
107 "str %[arg0], [sp]\n\t"
108 "str %[arg1], [sp, #4]\n\t"
109 "str %[arg2], [sp, #8]\n\t"
110 "str %[code], [sp, #12]\n\t"
111 "str %[self], [sp, #16]\n\t"
112 "ldr r0, [sp]\n\t"
113 "ldr r1, [sp, #4]\n\t"
114 "ldr r2, [sp, #8]\n\t"
115 "ldr r3, [sp, #12]\n\t"
116 "ldr r9, [sp, #16]\n\t"
117 "add sp, sp, #20\n\t"
118
119 "blx r3\n\t" // Call the stub
Mathieu Chartier2cebb242015-04-21 16:50:40 -0700120 "add sp, sp, #12\n\t" // Pop null and padding
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700121 ".cfi_adjust_cfa_offset -12\n\t"
122 "pop {r1-r12, lr}\n\t" // Restore state
123 ".cfi_adjust_cfa_offset -52\n\t"
124 "mov %[result], r0\n\t" // Save the result
125 : [result] "=r" (result)
126 // Use the result from r0
127 : [arg0] "r"(arg0), [arg1] "r"(arg1), [arg2] "r"(arg2), [code] "r"(code), [self] "r"(self),
128 [referrer] "r"(referrer)
Andreas Gampeff7b1142015-08-03 10:25:06 -0700129 : "r0", "memory"); // clobber.
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700130#elif defined(__aarch64__)
131 __asm__ __volatile__(
Andreas Gampef39b3782014-06-03 14:38:30 -0700132 // Spill x0-x7 which we say we don't clobber. May contain args.
Andreas Gampe6cf80102014-05-19 11:32:41 -0700133 "sub sp, sp, #64\n\t"
134 ".cfi_adjust_cfa_offset 64\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700135 "stp x0, x1, [sp]\n\t"
136 "stp x2, x3, [sp, #16]\n\t"
137 "stp x4, x5, [sp, #32]\n\t"
138 "stp x6, x7, [sp, #48]\n\t"
Andreas Gampe6cf80102014-05-19 11:32:41 -0700139
Andreas Gampef39b3782014-06-03 14:38:30 -0700140 "sub sp, sp, #16\n\t" // Reserve stack space, 16B aligned
141 ".cfi_adjust_cfa_offset 16\n\t"
142 "str %[referrer], [sp]\n\t" // referrer
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700143
144 // Push everything on the stack, so we don't rely on the order. What a mess. :-(
145 "sub sp, sp, #48\n\t"
Andreas Gampe6cf80102014-05-19 11:32:41 -0700146 ".cfi_adjust_cfa_offset 48\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700147 // All things are "r" constraints, so direct str/stp should work.
148 "stp %[arg0], %[arg1], [sp]\n\t"
149 "stp %[arg2], %[code], [sp, #16]\n\t"
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700150 "str %[self], [sp, #32]\n\t"
Andreas Gampe6cf80102014-05-19 11:32:41 -0700151
152 // Now we definitely have x0-x3 free, use it to garble d8 - d15
153 "movk x0, #0xfad0\n\t"
154 "movk x0, #0xebad, lsl #16\n\t"
155 "movk x0, #0xfad0, lsl #32\n\t"
156 "movk x0, #0xebad, lsl #48\n\t"
157 "fmov d8, x0\n\t"
158 "add x0, x0, 1\n\t"
159 "fmov d9, x0\n\t"
160 "add x0, x0, 1\n\t"
161 "fmov d10, x0\n\t"
162 "add x0, x0, 1\n\t"
163 "fmov d11, x0\n\t"
164 "add x0, x0, 1\n\t"
165 "fmov d12, x0\n\t"
166 "add x0, x0, 1\n\t"
167 "fmov d13, x0\n\t"
168 "add x0, x0, 1\n\t"
169 "fmov d14, x0\n\t"
170 "add x0, x0, 1\n\t"
171 "fmov d15, x0\n\t"
172
Andreas Gampef39b3782014-06-03 14:38:30 -0700173 // Load call params into the right registers.
174 "ldp x0, x1, [sp]\n\t"
175 "ldp x2, x3, [sp, #16]\n\t"
Serban Constantinescu9bd88b02015-04-22 16:24:46 +0100176 "ldr x19, [sp, #32]\n\t"
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700177 "add sp, sp, #48\n\t"
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700178 ".cfi_adjust_cfa_offset -48\n\t"
179
Andreas Gampe6cf80102014-05-19 11:32:41 -0700180
181 "blr x3\n\t" // Call the stub
Andreas Gampef39b3782014-06-03 14:38:30 -0700182 "mov x8, x0\n\t" // Store result
183 "add sp, sp, #16\n\t" // Drop the quick "frame"
184 ".cfi_adjust_cfa_offset -16\n\t"
Andreas Gampe6cf80102014-05-19 11:32:41 -0700185
186 // Test d8 - d15. We can use x1 and x2.
187 "movk x1, #0xfad0\n\t"
188 "movk x1, #0xebad, lsl #16\n\t"
189 "movk x1, #0xfad0, lsl #32\n\t"
190 "movk x1, #0xebad, lsl #48\n\t"
191 "fmov x2, d8\n\t"
192 "cmp x1, x2\n\t"
193 "b.ne 1f\n\t"
194 "add x1, x1, 1\n\t"
195
196 "fmov x2, d9\n\t"
197 "cmp x1, x2\n\t"
198 "b.ne 1f\n\t"
199 "add x1, x1, 1\n\t"
200
201 "fmov x2, d10\n\t"
202 "cmp x1, x2\n\t"
203 "b.ne 1f\n\t"
204 "add x1, x1, 1\n\t"
205
206 "fmov x2, d11\n\t"
207 "cmp x1, x2\n\t"
208 "b.ne 1f\n\t"
209 "add x1, x1, 1\n\t"
210
211 "fmov x2, d12\n\t"
212 "cmp x1, x2\n\t"
213 "b.ne 1f\n\t"
214 "add x1, x1, 1\n\t"
215
216 "fmov x2, d13\n\t"
217 "cmp x1, x2\n\t"
218 "b.ne 1f\n\t"
219 "add x1, x1, 1\n\t"
220
221 "fmov x2, d14\n\t"
222 "cmp x1, x2\n\t"
223 "b.ne 1f\n\t"
224 "add x1, x1, 1\n\t"
225
226 "fmov x2, d15\n\t"
227 "cmp x1, x2\n\t"
228 "b.ne 1f\n\t"
229
Andreas Gampef39b3782014-06-03 14:38:30 -0700230 "mov x9, #0\n\t" // Use x9 as flag, in clobber list
Andreas Gampe6cf80102014-05-19 11:32:41 -0700231
232 // Finish up.
233 "2:\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700234 "ldp x0, x1, [sp]\n\t" // Restore stuff not named clobbered, may contain fpr_result
235 "ldp x2, x3, [sp, #16]\n\t"
236 "ldp x4, x5, [sp, #32]\n\t"
237 "ldp x6, x7, [sp, #48]\n\t"
238 "add sp, sp, #64\n\t" // Free stack space, now sp as on entry
Andreas Gampe6cf80102014-05-19 11:32:41 -0700239 ".cfi_adjust_cfa_offset -64\n\t"
240
Andreas Gampef39b3782014-06-03 14:38:30 -0700241 "str x9, %[fpr_result]\n\t" // Store the FPR comparison result
242 "mov %[result], x8\n\t" // Store the call result
243
Andreas Gampe6cf80102014-05-19 11:32:41 -0700244 "b 3f\n\t" // Goto end
245
246 // Failed fpr verification.
247 "1:\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700248 "mov x9, #1\n\t"
Andreas Gampe6cf80102014-05-19 11:32:41 -0700249 "b 2b\n\t" // Goto finish-up
250
251 // End
252 "3:\n\t"
Andreas Gampecf4035a2014-05-28 22:43:01 -0700253 : [result] "=r" (result)
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700254 // Use the result from r0
255 : [arg0] "0"(arg0), [arg1] "r"(arg1), [arg2] "r"(arg2), [code] "r"(code), [self] "r"(self),
Andreas Gampecf4035a2014-05-28 22:43:01 -0700256 [referrer] "r"(referrer), [fpr_result] "m" (fpr_result)
Andreas Gampef39b3782014-06-03 14:38:30 -0700257 : "x8", "x9", "x10", "x11", "x12", "x13", "x14", "x15", "x16", "x17", "x18", "x19", "x20",
258 "x21", "x22", "x23", "x24", "x25", "x26", "x27", "x28", "x30",
259 "d0", "d1", "d2", "d3", "d4", "d5", "d6", "d7",
260 "d8", "d9", "d10", "d11", "d12", "d13", "d14", "d15",
261 "d16", "d17", "d18", "d19", "d20", "d21", "d22", "d23",
Andreas Gampe2f6e3512014-06-07 01:32:33 -0700262 "d24", "d25", "d26", "d27", "d28", "d29", "d30", "d31",
263 "memory"); // clobber.
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200264#elif defined(__mips__) && !defined(__LP64__)
265 __asm__ __volatile__ (
266 // Spill a0-a3 and t0-t7 which we say we don't clobber. May contain args.
267 "addiu $sp, $sp, -64\n\t"
268 "sw $a0, 0($sp)\n\t"
269 "sw $a1, 4($sp)\n\t"
270 "sw $a2, 8($sp)\n\t"
271 "sw $a3, 12($sp)\n\t"
272 "sw $t0, 16($sp)\n\t"
273 "sw $t1, 20($sp)\n\t"
274 "sw $t2, 24($sp)\n\t"
275 "sw $t3, 28($sp)\n\t"
276 "sw $t4, 32($sp)\n\t"
277 "sw $t5, 36($sp)\n\t"
278 "sw $t6, 40($sp)\n\t"
279 "sw $t7, 44($sp)\n\t"
280 // Spill gp register since it is caller save.
281 "sw $gp, 52($sp)\n\t"
282
283 "addiu $sp, $sp, -16\n\t" // Reserve stack space, 16B aligned.
284 "sw %[referrer], 0($sp)\n\t"
285
286 // Push everything on the stack, so we don't rely on the order.
287 "addiu $sp, $sp, -20\n\t"
288 "sw %[arg0], 0($sp)\n\t"
289 "sw %[arg1], 4($sp)\n\t"
290 "sw %[arg2], 8($sp)\n\t"
291 "sw %[code], 12($sp)\n\t"
292 "sw %[self], 16($sp)\n\t"
293
294 // Load call params into the right registers.
295 "lw $a0, 0($sp)\n\t"
296 "lw $a1, 4($sp)\n\t"
297 "lw $a2, 8($sp)\n\t"
298 "lw $t9, 12($sp)\n\t"
299 "lw $s1, 16($sp)\n\t"
300 "addiu $sp, $sp, 20\n\t"
301
302 "jalr $t9\n\t" // Call the stub.
303 "nop\n\t"
304 "addiu $sp, $sp, 16\n\t" // Drop the quick "frame".
305
306 // Restore stuff not named clobbered.
307 "lw $a0, 0($sp)\n\t"
308 "lw $a1, 4($sp)\n\t"
309 "lw $a2, 8($sp)\n\t"
310 "lw $a3, 12($sp)\n\t"
311 "lw $t0, 16($sp)\n\t"
312 "lw $t1, 20($sp)\n\t"
313 "lw $t2, 24($sp)\n\t"
314 "lw $t3, 28($sp)\n\t"
315 "lw $t4, 32($sp)\n\t"
316 "lw $t5, 36($sp)\n\t"
317 "lw $t6, 40($sp)\n\t"
318 "lw $t7, 44($sp)\n\t"
319 // Restore gp.
320 "lw $gp, 52($sp)\n\t"
321 "addiu $sp, $sp, 64\n\t" // Free stack space, now sp as on entry.
322
323 "move %[result], $v0\n\t" // Store the call result.
324 : [result] "=r" (result)
325 : [arg0] "r"(arg0), [arg1] "r"(arg1), [arg2] "r"(arg2), [code] "r"(code), [self] "r"(self),
326 [referrer] "r"(referrer)
327 : "at", "v0", "v1", "s0", "s1", "s2", "s3", "s4", "s5", "s6", "s7", "t8", "t9", "k0", "k1",
328 "fp", "ra",
Nicolas Geoffrayc5b4b322015-09-15 16:36:50 +0100329 "$f0", "$f1", "$f2", "$f3", "$f4", "$f5", "$f6", "$f7", "$f8", "$f9", "$f10", "$f11",
330 "$f12", "$f13", "$f14", "$f15", "$f16", "$f17", "$f18", "$f19", "$f20", "$f21", "$f22",
331 "$f23", "$f24", "$f25", "$f26", "$f27", "$f28", "$f29", "$f30", "$f31",
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200332 "memory"); // clobber.
333#elif defined(__mips__) && defined(__LP64__)
334 __asm__ __volatile__ (
335 // Spill a0-a7 which we say we don't clobber. May contain args.
336 "daddiu $sp, $sp, -64\n\t"
337 "sd $a0, 0($sp)\n\t"
338 "sd $a1, 8($sp)\n\t"
339 "sd $a2, 16($sp)\n\t"
340 "sd $a3, 24($sp)\n\t"
341 "sd $a4, 32($sp)\n\t"
342 "sd $a5, 40($sp)\n\t"
343 "sd $a6, 48($sp)\n\t"
344 "sd $a7, 56($sp)\n\t"
345
346 "daddiu $sp, $sp, -16\n\t" // Reserve stack space, 16B aligned.
347 "sd %[referrer], 0($sp)\n\t"
348
349 // Push everything on the stack, so we don't rely on the order.
350 "daddiu $sp, $sp, -40\n\t"
351 "sd %[arg0], 0($sp)\n\t"
352 "sd %[arg1], 8($sp)\n\t"
353 "sd %[arg2], 16($sp)\n\t"
354 "sd %[code], 24($sp)\n\t"
355 "sd %[self], 32($sp)\n\t"
356
357 // Load call params into the right registers.
358 "ld $a0, 0($sp)\n\t"
359 "ld $a1, 8($sp)\n\t"
360 "ld $a2, 16($sp)\n\t"
361 "ld $t9, 24($sp)\n\t"
362 "ld $s1, 32($sp)\n\t"
363 "daddiu $sp, $sp, 40\n\t"
364
365 "jalr $t9\n\t" // Call the stub.
366 "nop\n\t"
367 "daddiu $sp, $sp, 16\n\t" // Drop the quick "frame".
368
369 // Restore stuff not named clobbered.
370 "ld $a0, 0($sp)\n\t"
371 "ld $a1, 8($sp)\n\t"
372 "ld $a2, 16($sp)\n\t"
373 "ld $a3, 24($sp)\n\t"
374 "ld $a4, 32($sp)\n\t"
375 "ld $a5, 40($sp)\n\t"
376 "ld $a6, 48($sp)\n\t"
377 "ld $a7, 56($sp)\n\t"
378 "daddiu $sp, $sp, 64\n\t"
379
380 "move %[result], $v0\n\t" // Store the call result.
381 : [result] "=r" (result)
382 : [arg0] "r"(arg0), [arg1] "r"(arg1), [arg2] "r"(arg2), [code] "r"(code), [self] "r"(self),
383 [referrer] "r"(referrer)
384 : "at", "v0", "v1", "t0", "t1", "t2", "t3", "s0", "s1", "s2", "s3", "s4", "s5", "s6", "s7",
385 "t8", "t9", "k0", "k1", "fp", "ra",
386 "f0", "f1", "f2", "f3", "f4", "f5", "f6", "f7", "f8", "f9", "f10", "f11", "f12", "f13",
387 "f14", "f15", "f16", "f17", "f18", "f19", "f20", "f21", "f22", "f23", "f24", "f25", "f26",
388 "f27", "f28", "f29", "f30", "f31",
389 "memory"); // clobber.
Ian Rogers6f3dbba2014-10-14 17:41:57 -0700390#elif defined(__x86_64__) && !defined(__APPLE__) && defined(__clang__)
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700391 // Note: Uses the native convention
392 // TODO: Set the thread?
393 __asm__ __volatile__(
394 "pushq %[referrer]\n\t" // Push referrer
395 "pushq (%%rsp)\n\t" // & 16B alignment padding
396 ".cfi_adjust_cfa_offset 16\n\t"
397 "call *%%rax\n\t" // Call the stub
Mathieu Chartier2cebb242015-04-21 16:50:40 -0700398 "addq $16, %%rsp\n\t" // Pop null and padding
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700399 ".cfi_adjust_cfa_offset -16\n\t"
400 : "=a" (result)
401 // Use the result from rax
Andreas Gampe5c3d3a92015-01-21 12:23:50 -0800402 : "D"(arg0), "S"(arg1), "d"(arg2), "a"(code), [referrer] "c"(referrer)
Andreas Gampe2f6e3512014-06-07 01:32:33 -0700403 // This places arg0 into rdi, arg1 into rsi, arg2 into rdx, and code into rax
Andreas Gampe5c3d3a92015-01-21 12:23:50 -0800404 : "rbx", "rbp", "r8", "r9", "r10", "r11", "r12", "r13", "r14", "r15",
Andreas Gampe2f6e3512014-06-07 01:32:33 -0700405 "memory"); // clobber all
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700406 // TODO: Should we clobber the other registers?
407#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -0800408 UNUSED(arg0, arg1, arg2, code, referrer);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700409 LOG(WARNING) << "Was asked to invoke for an architecture I do not understand.";
410 result = 0;
411#endif
412 // Pop transition.
413 self->PopManagedStackFragment(fragment);
Andreas Gampe6cf80102014-05-19 11:32:41 -0700414
415 fp_result = fpr_result;
416 EXPECT_EQ(0U, fp_result);
417
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700418 return result;
419 }
420
Calin Juravle872ab3f2015-10-02 07:27:51 +0100421 // 64bit static field set use a slightly different register order than Invoke3WithReferrer.
422 // TODO: implement for other architectures
423 // TODO: try merge with Invoke3WithReferrer
424 size_t Invoke64StaticSet(size_t arg0, size_t arg1, size_t arg2, uintptr_t code, Thread* self,
425 ArtMethod* referrer) {
426 // Push a transition back into managed code onto the linked list in thread.
427 ManagedStack fragment;
428 self->PushManagedStackFragment(&fragment);
429
430 size_t result;
431 size_t fpr_result = 0;
432#if defined(__x86_64__) && !defined(__APPLE__) && defined(__clang__)
433 // Note: Uses the native convention
434 // TODO: Set the thread?
435 __asm__ __volatile__(
436 "pushq %[referrer]\n\t" // Push referrer
437 "pushq (%%rsp)\n\t" // & 16B alignment padding
438 ".cfi_adjust_cfa_offset 16\n\t"
439 "call *%%rax\n\t" // Call the stub
440 "addq $16, %%rsp\n\t" // Pop null and padding
441 ".cfi_adjust_cfa_offset -16\n\t"
442 : "=a" (result)
443 // Use the result from rax
444 : "D"(arg0), "d"(arg1), "S"(arg2), "a"(code), [referrer] "c"(referrer)
445 // This places arg0 into rdi, arg1 into rdx, arg2 into rsi, and code into rax
446 : "rbx", "rbp", "r8", "r9", "r10", "r11", "r12", "r13", "r14", "r15",
447 "memory"); // clobber all
448 // TODO: Should we clobber the other registers?
449#else
450 UNUSED(arg0, arg1, arg2, code, referrer);
451 LOG(WARNING) << "Was asked to invoke for an architecture I do not understand.";
452 result = 0;
453#endif
454 // Pop transition.
455 self->PopManagedStackFragment(fragment);
456
457 fp_result = fpr_result;
458 EXPECT_EQ(0U, fp_result);
459
460 return result;
461 }
462
Andreas Gampe51f76352014-05-21 08:28:48 -0700463 // TODO: Set up a frame according to referrer's specs.
464 size_t Invoke3WithReferrerAndHidden(size_t arg0, size_t arg1, size_t arg2, uintptr_t code,
Mathieu Chartiere401d142015-04-22 13:56:20 -0700465 Thread* self, ArtMethod* referrer, size_t hidden) {
Andreas Gampe51f76352014-05-21 08:28:48 -0700466 // Push a transition back into managed code onto the linked list in thread.
467 ManagedStack fragment;
468 self->PushManagedStackFragment(&fragment);
469
470 size_t result;
471 size_t fpr_result = 0;
472#if defined(__i386__)
473 // TODO: Set the thread?
474 __asm__ __volatile__(
Mark P Mendell966c3ae2015-01-27 15:45:27 +0000475 "movd %[hidden], %%xmm7\n\t"
Ian Rogersc5f17732014-06-05 20:48:42 -0700476 "subl $12, %%esp\n\t" // Align stack.
Andreas Gampe51f76352014-05-21 08:28:48 -0700477 "pushl %[referrer]\n\t" // Store referrer
478 "call *%%edi\n\t" // Call the stub
Ian Rogersc5f17732014-06-05 20:48:42 -0700479 "addl $16, %%esp" // Pop referrer
Andreas Gampe51f76352014-05-21 08:28:48 -0700480 : "=a" (result)
481 // Use the result from eax
Andreas Gampe1a7e2922014-05-21 15:37:53 -0700482 : "a"(arg0), "c"(arg1), "d"(arg2), "D"(code), [referrer]"r"(referrer), [hidden]"m"(hidden)
Andreas Gampe2f6e3512014-06-07 01:32:33 -0700483 // This places code into edi, arg0 into eax, arg1 into ecx, and arg2 into edx
484 : "memory"); // clobber.
Andreas Gampe51f76352014-05-21 08:28:48 -0700485 // TODO: Should we clobber the other registers? EBX gets clobbered by some of the stubs,
486 // but compilation fails when declaring that.
487#elif defined(__arm__)
488 __asm__ __volatile__(
489 "push {r1-r12, lr}\n\t" // Save state, 13*4B = 52B
490 ".cfi_adjust_cfa_offset 52\n\t"
491 "push {r9}\n\t"
492 ".cfi_adjust_cfa_offset 4\n\t"
493 "mov r9, %[referrer]\n\n"
494 "str r9, [sp, #-8]!\n\t" // Push referrer, +8B padding so 16B aligned
495 ".cfi_adjust_cfa_offset 8\n\t"
496 "ldr r9, [sp, #8]\n\t"
497
498 // Push everything on the stack, so we don't rely on the order. What a mess. :-(
499 "sub sp, sp, #24\n\t"
500 "str %[arg0], [sp]\n\t"
501 "str %[arg1], [sp, #4]\n\t"
502 "str %[arg2], [sp, #8]\n\t"
503 "str %[code], [sp, #12]\n\t"
504 "str %[self], [sp, #16]\n\t"
505 "str %[hidden], [sp, #20]\n\t"
506 "ldr r0, [sp]\n\t"
507 "ldr r1, [sp, #4]\n\t"
508 "ldr r2, [sp, #8]\n\t"
509 "ldr r3, [sp, #12]\n\t"
510 "ldr r9, [sp, #16]\n\t"
511 "ldr r12, [sp, #20]\n\t"
512 "add sp, sp, #24\n\t"
513
514 "blx r3\n\t" // Call the stub
Mathieu Chartier2cebb242015-04-21 16:50:40 -0700515 "add sp, sp, #12\n\t" // Pop null and padding
Andreas Gampe51f76352014-05-21 08:28:48 -0700516 ".cfi_adjust_cfa_offset -12\n\t"
517 "pop {r1-r12, lr}\n\t" // Restore state
518 ".cfi_adjust_cfa_offset -52\n\t"
519 "mov %[result], r0\n\t" // Save the result
520 : [result] "=r" (result)
521 // Use the result from r0
Andreas Gampe2f6e3512014-06-07 01:32:33 -0700522 : [arg0] "r"(arg0), [arg1] "r"(arg1), [arg2] "r"(arg2), [code] "r"(code), [self] "r"(self),
523 [referrer] "r"(referrer), [hidden] "r"(hidden)
Andreas Gampeff7b1142015-08-03 10:25:06 -0700524 : "r0", "memory"); // clobber.
Andreas Gampe51f76352014-05-21 08:28:48 -0700525#elif defined(__aarch64__)
526 __asm__ __volatile__(
Andreas Gampef39b3782014-06-03 14:38:30 -0700527 // Spill x0-x7 which we say we don't clobber. May contain args.
Andreas Gampe51f76352014-05-21 08:28:48 -0700528 "sub sp, sp, #64\n\t"
529 ".cfi_adjust_cfa_offset 64\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700530 "stp x0, x1, [sp]\n\t"
531 "stp x2, x3, [sp, #16]\n\t"
532 "stp x4, x5, [sp, #32]\n\t"
533 "stp x6, x7, [sp, #48]\n\t"
Andreas Gampe51f76352014-05-21 08:28:48 -0700534
Andreas Gampef39b3782014-06-03 14:38:30 -0700535 "sub sp, sp, #16\n\t" // Reserve stack space, 16B aligned
536 ".cfi_adjust_cfa_offset 16\n\t"
537 "str %[referrer], [sp]\n\t" // referrer
Andreas Gampe51f76352014-05-21 08:28:48 -0700538
539 // Push everything on the stack, so we don't rely on the order. What a mess. :-(
540 "sub sp, sp, #48\n\t"
541 ".cfi_adjust_cfa_offset 48\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700542 // All things are "r" constraints, so direct str/stp should work.
543 "stp %[arg0], %[arg1], [sp]\n\t"
544 "stp %[arg2], %[code], [sp, #16]\n\t"
545 "stp %[self], %[hidden], [sp, #32]\n\t"
Andreas Gampe51f76352014-05-21 08:28:48 -0700546
547 // Now we definitely have x0-x3 free, use it to garble d8 - d15
548 "movk x0, #0xfad0\n\t"
549 "movk x0, #0xebad, lsl #16\n\t"
550 "movk x0, #0xfad0, lsl #32\n\t"
551 "movk x0, #0xebad, lsl #48\n\t"
552 "fmov d8, x0\n\t"
553 "add x0, x0, 1\n\t"
554 "fmov d9, x0\n\t"
555 "add x0, x0, 1\n\t"
556 "fmov d10, x0\n\t"
557 "add x0, x0, 1\n\t"
558 "fmov d11, x0\n\t"
559 "add x0, x0, 1\n\t"
560 "fmov d12, x0\n\t"
561 "add x0, x0, 1\n\t"
562 "fmov d13, x0\n\t"
563 "add x0, x0, 1\n\t"
564 "fmov d14, x0\n\t"
565 "add x0, x0, 1\n\t"
566 "fmov d15, x0\n\t"
567
Andreas Gampef39b3782014-06-03 14:38:30 -0700568 // Load call params into the right registers.
569 "ldp x0, x1, [sp]\n\t"
570 "ldp x2, x3, [sp, #16]\n\t"
Serban Constantinescu9bd88b02015-04-22 16:24:46 +0100571 "ldp x19, x17, [sp, #32]\n\t"
Andreas Gampe51f76352014-05-21 08:28:48 -0700572 "add sp, sp, #48\n\t"
573 ".cfi_adjust_cfa_offset -48\n\t"
574
Andreas Gampe51f76352014-05-21 08:28:48 -0700575 "blr x3\n\t" // Call the stub
Andreas Gampef39b3782014-06-03 14:38:30 -0700576 "mov x8, x0\n\t" // Store result
577 "add sp, sp, #16\n\t" // Drop the quick "frame"
578 ".cfi_adjust_cfa_offset -16\n\t"
Andreas Gampe51f76352014-05-21 08:28:48 -0700579
580 // Test d8 - d15. We can use x1 and x2.
581 "movk x1, #0xfad0\n\t"
582 "movk x1, #0xebad, lsl #16\n\t"
583 "movk x1, #0xfad0, lsl #32\n\t"
584 "movk x1, #0xebad, lsl #48\n\t"
585 "fmov x2, d8\n\t"
586 "cmp x1, x2\n\t"
587 "b.ne 1f\n\t"
588 "add x1, x1, 1\n\t"
589
590 "fmov x2, d9\n\t"
591 "cmp x1, x2\n\t"
592 "b.ne 1f\n\t"
593 "add x1, x1, 1\n\t"
594
595 "fmov x2, d10\n\t"
596 "cmp x1, x2\n\t"
597 "b.ne 1f\n\t"
598 "add x1, x1, 1\n\t"
599
600 "fmov x2, d11\n\t"
601 "cmp x1, x2\n\t"
602 "b.ne 1f\n\t"
603 "add x1, x1, 1\n\t"
604
605 "fmov x2, d12\n\t"
606 "cmp x1, x2\n\t"
607 "b.ne 1f\n\t"
608 "add x1, x1, 1\n\t"
609
610 "fmov x2, d13\n\t"
611 "cmp x1, x2\n\t"
612 "b.ne 1f\n\t"
613 "add x1, x1, 1\n\t"
614
615 "fmov x2, d14\n\t"
616 "cmp x1, x2\n\t"
617 "b.ne 1f\n\t"
618 "add x1, x1, 1\n\t"
619
620 "fmov x2, d15\n\t"
621 "cmp x1, x2\n\t"
622 "b.ne 1f\n\t"
623
Andreas Gampef39b3782014-06-03 14:38:30 -0700624 "mov x9, #0\n\t" // Use x9 as flag, in clobber list
Andreas Gampe51f76352014-05-21 08:28:48 -0700625
626 // Finish up.
627 "2:\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700628 "ldp x0, x1, [sp]\n\t" // Restore stuff not named clobbered, may contain fpr_result
629 "ldp x2, x3, [sp, #16]\n\t"
630 "ldp x4, x5, [sp, #32]\n\t"
631 "ldp x6, x7, [sp, #48]\n\t"
632 "add sp, sp, #64\n\t" // Free stack space, now sp as on entry
Andreas Gampe51f76352014-05-21 08:28:48 -0700633 ".cfi_adjust_cfa_offset -64\n\t"
634
Andreas Gampef39b3782014-06-03 14:38:30 -0700635 "str x9, %[fpr_result]\n\t" // Store the FPR comparison result
636 "mov %[result], x8\n\t" // Store the call result
637
Andreas Gampe51f76352014-05-21 08:28:48 -0700638 "b 3f\n\t" // Goto end
639
640 // Failed fpr verification.
641 "1:\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700642 "mov x9, #1\n\t"
Andreas Gampe51f76352014-05-21 08:28:48 -0700643 "b 2b\n\t" // Goto finish-up
644
645 // End
646 "3:\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700647 : [result] "=r" (result)
648 // Use the result from r0
Andreas Gampe51f76352014-05-21 08:28:48 -0700649 : [arg0] "0"(arg0), [arg1] "r"(arg1), [arg2] "r"(arg2), [code] "r"(code), [self] "r"(self),
Andreas Gampef39b3782014-06-03 14:38:30 -0700650 [referrer] "r"(referrer), [hidden] "r"(hidden), [fpr_result] "m" (fpr_result)
651 : "x8", "x9", "x10", "x11", "x12", "x13", "x14", "x15", "x16", "x17", "x18", "x19", "x20",
652 "x21", "x22", "x23", "x24", "x25", "x26", "x27", "x28", "x30",
653 "d0", "d1", "d2", "d3", "d4", "d5", "d6", "d7",
654 "d8", "d9", "d10", "d11", "d12", "d13", "d14", "d15",
655 "d16", "d17", "d18", "d19", "d20", "d21", "d22", "d23",
Andreas Gampe2f6e3512014-06-07 01:32:33 -0700656 "d24", "d25", "d26", "d27", "d28", "d29", "d30", "d31",
657 "memory"); // clobber.
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200658#elif defined(__mips__) && !defined(__LP64__)
659 __asm__ __volatile__ (
660 // Spill a0-a3 and t0-t7 which we say we don't clobber. May contain args.
661 "addiu $sp, $sp, -64\n\t"
662 "sw $a0, 0($sp)\n\t"
663 "sw $a1, 4($sp)\n\t"
664 "sw $a2, 8($sp)\n\t"
665 "sw $a3, 12($sp)\n\t"
666 "sw $t0, 16($sp)\n\t"
667 "sw $t1, 20($sp)\n\t"
668 "sw $t2, 24($sp)\n\t"
669 "sw $t3, 28($sp)\n\t"
670 "sw $t4, 32($sp)\n\t"
671 "sw $t5, 36($sp)\n\t"
672 "sw $t6, 40($sp)\n\t"
673 "sw $t7, 44($sp)\n\t"
674 // Spill gp register since it is caller save.
675 "sw $gp, 52($sp)\n\t"
676
677 "addiu $sp, $sp, -16\n\t" // Reserve stack space, 16B aligned.
678 "sw %[referrer], 0($sp)\n\t"
679
680 // Push everything on the stack, so we don't rely on the order.
681 "addiu $sp, $sp, -24\n\t"
682 "sw %[arg0], 0($sp)\n\t"
683 "sw %[arg1], 4($sp)\n\t"
684 "sw %[arg2], 8($sp)\n\t"
685 "sw %[code], 12($sp)\n\t"
686 "sw %[self], 16($sp)\n\t"
687 "sw %[hidden], 20($sp)\n\t"
688
689 // Load call params into the right registers.
690 "lw $a0, 0($sp)\n\t"
691 "lw $a1, 4($sp)\n\t"
692 "lw $a2, 8($sp)\n\t"
693 "lw $t9, 12($sp)\n\t"
694 "lw $s1, 16($sp)\n\t"
695 "lw $t0, 20($sp)\n\t"
696 "addiu $sp, $sp, 24\n\t"
697
698 "jalr $t9\n\t" // Call the stub.
699 "nop\n\t"
700 "addiu $sp, $sp, 16\n\t" // Drop the quick "frame".
701
702 // Restore stuff not named clobbered.
703 "lw $a0, 0($sp)\n\t"
704 "lw $a1, 4($sp)\n\t"
705 "lw $a2, 8($sp)\n\t"
706 "lw $a3, 12($sp)\n\t"
707 "lw $t0, 16($sp)\n\t"
708 "lw $t1, 20($sp)\n\t"
709 "lw $t2, 24($sp)\n\t"
710 "lw $t3, 28($sp)\n\t"
711 "lw $t4, 32($sp)\n\t"
712 "lw $t5, 36($sp)\n\t"
713 "lw $t6, 40($sp)\n\t"
714 "lw $t7, 44($sp)\n\t"
715 // Restore gp.
716 "lw $gp, 52($sp)\n\t"
717 "addiu $sp, $sp, 64\n\t" // Free stack space, now sp as on entry.
718
719 "move %[result], $v0\n\t" // Store the call result.
720 : [result] "=r" (result)
721 : [arg0] "r"(arg0), [arg1] "r"(arg1), [arg2] "r"(arg2), [code] "r"(code), [self] "r"(self),
722 [referrer] "r"(referrer), [hidden] "r"(hidden)
723 : "at", "v0", "v1", "s0", "s1", "s2", "s3", "s4", "s5", "s6", "s7", "t8", "t9", "k0", "k1",
724 "fp", "ra",
Nicolas Geoffrayc5b4b322015-09-15 16:36:50 +0100725 "$f0", "$f1", "$f2", "$f3", "$f4", "$f5", "$f6", "$f7", "$f8", "$f9", "$f10", "$f11",
726 "$f12", "$f13", "$f14", "$f15", "$f16", "$f17", "$f18", "$f19", "$f20", "$f21", "$f22",
727 "$f23", "$f24", "$f25", "$f26", "$f27", "$f28", "$f29", "$f30", "$f31",
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200728 "memory"); // clobber.
729#elif defined(__mips__) && defined(__LP64__)
730 __asm__ __volatile__ (
731 // Spill a0-a7 which we say we don't clobber. May contain args.
732 "daddiu $sp, $sp, -64\n\t"
733 "sd $a0, 0($sp)\n\t"
734 "sd $a1, 8($sp)\n\t"
735 "sd $a2, 16($sp)\n\t"
736 "sd $a3, 24($sp)\n\t"
737 "sd $a4, 32($sp)\n\t"
738 "sd $a5, 40($sp)\n\t"
739 "sd $a6, 48($sp)\n\t"
740 "sd $a7, 56($sp)\n\t"
741
742 "daddiu $sp, $sp, -16\n\t" // Reserve stack space, 16B aligned.
743 "sd %[referrer], 0($sp)\n\t"
744
745 // Push everything on the stack, so we don't rely on the order.
746 "daddiu $sp, $sp, -48\n\t"
747 "sd %[arg0], 0($sp)\n\t"
748 "sd %[arg1], 8($sp)\n\t"
749 "sd %[arg2], 16($sp)\n\t"
750 "sd %[code], 24($sp)\n\t"
751 "sd %[self], 32($sp)\n\t"
752 "sd %[hidden], 40($sp)\n\t"
753
754 // Load call params into the right registers.
755 "ld $a0, 0($sp)\n\t"
756 "ld $a1, 8($sp)\n\t"
757 "ld $a2, 16($sp)\n\t"
758 "ld $t9, 24($sp)\n\t"
759 "ld $s1, 32($sp)\n\t"
760 "ld $t0, 40($sp)\n\t"
761 "daddiu $sp, $sp, 48\n\t"
762
763 "jalr $t9\n\t" // Call the stub.
764 "nop\n\t"
765 "daddiu $sp, $sp, 16\n\t" // Drop the quick "frame".
766
767 // Restore stuff not named clobbered.
768 "ld $a0, 0($sp)\n\t"
769 "ld $a1, 8($sp)\n\t"
770 "ld $a2, 16($sp)\n\t"
771 "ld $a3, 24($sp)\n\t"
772 "ld $a4, 32($sp)\n\t"
773 "ld $a5, 40($sp)\n\t"
774 "ld $a6, 48($sp)\n\t"
775 "ld $a7, 56($sp)\n\t"
776 "daddiu $sp, $sp, 64\n\t"
777
778 "move %[result], $v0\n\t" // Store the call result.
779 : [result] "=r" (result)
780 : [arg0] "r"(arg0), [arg1] "r"(arg1), [arg2] "r"(arg2), [code] "r"(code), [self] "r"(self),
781 [referrer] "r"(referrer), [hidden] "r"(hidden)
782 : "at", "v0", "v1", "t0", "t1", "t2", "t3", "s0", "s1", "s2", "s3", "s4", "s5", "s6", "s7",
783 "t8", "t9", "k0", "k1", "fp", "ra",
784 "f0", "f1", "f2", "f3", "f4", "f5", "f6", "f7", "f8", "f9", "f10", "f11", "f12", "f13",
785 "f14", "f15", "f16", "f17", "f18", "f19", "f20", "f21", "f22", "f23", "f24", "f25", "f26",
786 "f27", "f28", "f29", "f30", "f31",
787 "memory"); // clobber.
Ian Rogers6f3dbba2014-10-14 17:41:57 -0700788#elif defined(__x86_64__) && !defined(__APPLE__) && defined(__clang__)
Andreas Gampe51f76352014-05-21 08:28:48 -0700789 // Note: Uses the native convention
790 // TODO: Set the thread?
791 __asm__ __volatile__(
Andreas Gampe51f76352014-05-21 08:28:48 -0700792 "pushq %[referrer]\n\t" // Push referrer
793 "pushq (%%rsp)\n\t" // & 16B alignment padding
794 ".cfi_adjust_cfa_offset 16\n\t"
Andreas Gampe1a7e2922014-05-21 15:37:53 -0700795 "call *%%rbx\n\t" // Call the stub
Mathieu Chartier2cebb242015-04-21 16:50:40 -0700796 "addq $16, %%rsp\n\t" // Pop null and padding
Andreas Gampe51f76352014-05-21 08:28:48 -0700797 ".cfi_adjust_cfa_offset -16\n\t"
798 : "=a" (result)
799 // Use the result from rax
Andreas Gampe1a7e2922014-05-21 15:37:53 -0700800 : "D"(arg0), "S"(arg1), "d"(arg2), "b"(code), [referrer] "c"(referrer), [hidden] "a"(hidden)
Andreas Gampe51f76352014-05-21 08:28:48 -0700801 // This places arg0 into rdi, arg1 into rsi, arg2 into rdx, and code into rax
Andreas Gampe1a7e2922014-05-21 15:37:53 -0700802 : "rbp", "r8", "r9", "r10", "r11", "r12", "r13", "r14", "r15",
Andreas Gampe2f6e3512014-06-07 01:32:33 -0700803 "memory"); // clobber all
Andreas Gampe51f76352014-05-21 08:28:48 -0700804 // TODO: Should we clobber the other registers?
805#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -0800806 UNUSED(arg0, arg1, arg2, code, referrer, hidden);
Andreas Gampe51f76352014-05-21 08:28:48 -0700807 LOG(WARNING) << "Was asked to invoke for an architecture I do not understand.";
808 result = 0;
809#endif
810 // Pop transition.
811 self->PopManagedStackFragment(fragment);
812
813 fp_result = fpr_result;
814 EXPECT_EQ(0U, fp_result);
815
816 return result;
817 }
818
Andreas Gampe29b38412014-08-13 00:15:43 -0700819 static uintptr_t GetEntrypoint(Thread* self, QuickEntrypointEnum entrypoint) {
820 int32_t offset;
821#ifdef __LP64__
822 offset = GetThreadOffset<8>(entrypoint).Int32Value();
823#else
824 offset = GetThreadOffset<4>(entrypoint).Int32Value();
825#endif
826 return *reinterpret_cast<uintptr_t*>(reinterpret_cast<uint8_t*>(self) + offset);
827 }
828
Andreas Gampe6cf80102014-05-19 11:32:41 -0700829 protected:
830 size_t fp_result;
Andreas Gampe525cde22014-04-22 15:44:50 -0700831};
832
833
Andreas Gampe525cde22014-04-22 15:44:50 -0700834TEST_F(StubTest, Memcpy) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200835#if defined(__i386__) || (defined(__x86_64__) && !defined(__APPLE__)) || defined(__mips__)
Andreas Gampe525cde22014-04-22 15:44:50 -0700836 Thread* self = Thread::Current();
837
838 uint32_t orig[20];
839 uint32_t trg[20];
840 for (size_t i = 0; i < 20; ++i) {
841 orig[i] = i;
842 trg[i] = 0;
843 }
844
845 Invoke3(reinterpret_cast<size_t>(&trg[4]), reinterpret_cast<size_t>(&orig[4]),
Andreas Gampe29b38412014-08-13 00:15:43 -0700846 10 * sizeof(uint32_t), StubTest::GetEntrypoint(self, kQuickMemcpy), self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700847
848 EXPECT_EQ(orig[0], trg[0]);
849
850 for (size_t i = 1; i < 4; ++i) {
851 EXPECT_NE(orig[i], trg[i]);
852 }
853
854 for (size_t i = 4; i < 14; ++i) {
855 EXPECT_EQ(orig[i], trg[i]);
856 }
857
858 for (size_t i = 14; i < 20; ++i) {
859 EXPECT_NE(orig[i], trg[i]);
860 }
861
862 // TODO: Test overlapping?
863
864#else
865 LOG(INFO) << "Skipping memcpy as I don't know how to do that on " << kRuntimeISA;
866 // Force-print to std::cout so it's also outside the logcat.
867 std::cout << "Skipping memcpy as I don't know how to do that on " << kRuntimeISA << std::endl;
868#endif
869}
870
Andreas Gampe525cde22014-04-22 15:44:50 -0700871TEST_F(StubTest, LockObject) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200872#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
873 (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -0700874 static constexpr size_t kThinLockLoops = 100;
875
Andreas Gampe525cde22014-04-22 15:44:50 -0700876 Thread* self = Thread::Current();
Andreas Gampe29b38412014-08-13 00:15:43 -0700877
878 const uintptr_t art_quick_lock_object = StubTest::GetEntrypoint(self, kQuickLockObject);
879
Andreas Gampe525cde22014-04-22 15:44:50 -0700880 // Create an object
881 ScopedObjectAccess soa(self);
882 // garbage is created during ClassLinker::Init
883
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700884 StackHandleScope<2> hs(soa.Self());
885 Handle<mirror::String> obj(
886 hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), "hello, world!")));
Andreas Gampe525cde22014-04-22 15:44:50 -0700887 LockWord lock = obj->GetLockWord(false);
888 LockWord::LockState old_state = lock.GetState();
889 EXPECT_EQ(LockWord::LockState::kUnlocked, old_state);
890
Andreas Gampe29b38412014-08-13 00:15:43 -0700891 Invoke3(reinterpret_cast<size_t>(obj.Get()), 0U, 0U, art_quick_lock_object, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700892
893 LockWord lock_after = obj->GetLockWord(false);
894 LockWord::LockState new_state = lock_after.GetState();
895 EXPECT_EQ(LockWord::LockState::kThinLocked, new_state);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700896 EXPECT_EQ(lock_after.ThinLockCount(), 0U); // Thin lock starts count at zero
897
898 for (size_t i = 1; i < kThinLockLoops; ++i) {
Andreas Gampe29b38412014-08-13 00:15:43 -0700899 Invoke3(reinterpret_cast<size_t>(obj.Get()), 0U, 0U, art_quick_lock_object, self);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700900
901 // Check we're at lock count i
902
903 LockWord l_inc = obj->GetLockWord(false);
904 LockWord::LockState l_inc_state = l_inc.GetState();
905 EXPECT_EQ(LockWord::LockState::kThinLocked, l_inc_state);
906 EXPECT_EQ(l_inc.ThinLockCount(), i);
907 }
908
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700909 // Force a fat lock by running identity hashcode to fill up lock word.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700910 Handle<mirror::String> obj2(hs.NewHandle(
911 mirror::String::AllocFromModifiedUtf8(soa.Self(), "hello, world!")));
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700912
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700913 obj2->IdentityHashCode();
914
Andreas Gampe29b38412014-08-13 00:15:43 -0700915 Invoke3(reinterpret_cast<size_t>(obj2.Get()), 0U, 0U, art_quick_lock_object, self);
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700916
917 LockWord lock_after2 = obj2->GetLockWord(false);
918 LockWord::LockState new_state2 = lock_after2.GetState();
919 EXPECT_EQ(LockWord::LockState::kFatLocked, new_state2);
920 EXPECT_NE(lock_after2.FatLockMonitor(), static_cast<Monitor*>(nullptr));
921
922 // Test done.
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700923#else
924 LOG(INFO) << "Skipping lock_object as I don't know how to do that on " << kRuntimeISA;
925 // Force-print to std::cout so it's also outside the logcat.
926 std::cout << "Skipping lock_object as I don't know how to do that on " << kRuntimeISA << std::endl;
927#endif
928}
929
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700930
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700931class RandGen {
932 public:
933 explicit RandGen(uint32_t seed) : val_(seed) {}
934
935 uint32_t next() {
936 val_ = val_ * 48271 % 2147483647 + 13;
937 return val_;
938 }
939
940 uint32_t val_;
941};
942
943
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700944// NO_THREAD_SAFETY_ANALYSIS as we do not want to grab exclusive mutator lock for MonitorInfo.
945static void TestUnlockObject(StubTest* test) NO_THREAD_SAFETY_ANALYSIS {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200946#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
947 (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -0700948 static constexpr size_t kThinLockLoops = 100;
949
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700950 Thread* self = Thread::Current();
Andreas Gampe29b38412014-08-13 00:15:43 -0700951
952 const uintptr_t art_quick_lock_object = StubTest::GetEntrypoint(self, kQuickLockObject);
953 const uintptr_t art_quick_unlock_object = StubTest::GetEntrypoint(self, kQuickUnlockObject);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700954 // Create an object
955 ScopedObjectAccess soa(self);
956 // garbage is created during ClassLinker::Init
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700957 static constexpr size_t kNumberOfLocks = 10; // Number of objects = lock
958 StackHandleScope<kNumberOfLocks + 1> hs(self);
959 Handle<mirror::String> obj(
960 hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), "hello, world!")));
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700961 LockWord lock = obj->GetLockWord(false);
962 LockWord::LockState old_state = lock.GetState();
963 EXPECT_EQ(LockWord::LockState::kUnlocked, old_state);
964
Andreas Gampe29b38412014-08-13 00:15:43 -0700965 test->Invoke3(reinterpret_cast<size_t>(obj.Get()), 0U, 0U, art_quick_unlock_object, self);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700966 // This should be an illegal monitor state.
967 EXPECT_TRUE(self->IsExceptionPending());
968 self->ClearException();
969
970 LockWord lock_after = obj->GetLockWord(false);
971 LockWord::LockState new_state = lock_after.GetState();
972 EXPECT_EQ(LockWord::LockState::kUnlocked, new_state);
Andreas Gampe525cde22014-04-22 15:44:50 -0700973
Andreas Gampe29b38412014-08-13 00:15:43 -0700974 test->Invoke3(reinterpret_cast<size_t>(obj.Get()), 0U, 0U, art_quick_lock_object, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700975
976 LockWord lock_after2 = obj->GetLockWord(false);
977 LockWord::LockState new_state2 = lock_after2.GetState();
978 EXPECT_EQ(LockWord::LockState::kThinLocked, new_state2);
979
Andreas Gampe29b38412014-08-13 00:15:43 -0700980 test->Invoke3(reinterpret_cast<size_t>(obj.Get()), 0U, 0U, art_quick_unlock_object, self);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700981
982 LockWord lock_after3 = obj->GetLockWord(false);
983 LockWord::LockState new_state3 = lock_after3.GetState();
984 EXPECT_EQ(LockWord::LockState::kUnlocked, new_state3);
985
986 // Stress test:
987 // Keep a number of objects and their locks in flight. Randomly lock or unlock one of them in
988 // each step.
989
990 RandGen r(0x1234);
991
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700992 constexpr size_t kIterations = 10000; // Number of iterations
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700993 constexpr size_t kMoveToFat = 1000; // Chance of 1:kMoveFat to make a lock fat.
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700994
995 size_t counts[kNumberOfLocks];
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700996 bool fat[kNumberOfLocks]; // Whether a lock should be thin or fat.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700997 Handle<mirror::String> objects[kNumberOfLocks];
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700998
999 // Initialize = allocate.
1000 for (size_t i = 0; i < kNumberOfLocks; ++i) {
1001 counts[i] = 0;
Andreas Gampe4fc046e2014-05-06 16:56:39 -07001002 fat[i] = false;
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001003 objects[i] = hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), ""));
Andreas Gampe7177d7c2014-05-02 12:10:02 -07001004 }
1005
1006 for (size_t i = 0; i < kIterations; ++i) {
1007 // Select which lock to update.
1008 size_t index = r.next() % kNumberOfLocks;
1009
Andreas Gampe4fc046e2014-05-06 16:56:39 -07001010 // Make lock fat?
1011 if (!fat[index] && (r.next() % kMoveToFat == 0)) {
1012 fat[index] = true;
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001013 objects[index]->IdentityHashCode();
Andreas Gampe7177d7c2014-05-02 12:10:02 -07001014
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001015 LockWord lock_iter = objects[index]->GetLockWord(false);
Andreas Gampe4fc046e2014-05-06 16:56:39 -07001016 LockWord::LockState iter_state = lock_iter.GetState();
1017 if (counts[index] == 0) {
1018 EXPECT_EQ(LockWord::LockState::kHashCode, iter_state);
1019 } else {
1020 EXPECT_EQ(LockWord::LockState::kFatLocked, iter_state);
1021 }
Andreas Gampe7177d7c2014-05-02 12:10:02 -07001022 } else {
Andreas Gampe277ccbd2014-11-03 21:36:10 -08001023 bool take_lock; // Whether to lock or unlock in this step.
Andreas Gampe4fc046e2014-05-06 16:56:39 -07001024 if (counts[index] == 0) {
Andreas Gampe277ccbd2014-11-03 21:36:10 -08001025 take_lock = true;
Andreas Gampe4fc046e2014-05-06 16:56:39 -07001026 } else if (counts[index] == kThinLockLoops) {
Andreas Gampe277ccbd2014-11-03 21:36:10 -08001027 take_lock = false;
Andreas Gampe4fc046e2014-05-06 16:56:39 -07001028 } else {
1029 // Randomly.
Andreas Gampe277ccbd2014-11-03 21:36:10 -08001030 take_lock = r.next() % 2 == 0;
Andreas Gampe4fc046e2014-05-06 16:56:39 -07001031 }
Andreas Gampe7177d7c2014-05-02 12:10:02 -07001032
Andreas Gampe277ccbd2014-11-03 21:36:10 -08001033 if (take_lock) {
Andreas Gampe29b38412014-08-13 00:15:43 -07001034 test->Invoke3(reinterpret_cast<size_t>(objects[index].Get()), 0U, 0U, art_quick_lock_object,
1035 self);
Andreas Gampe4fc046e2014-05-06 16:56:39 -07001036 counts[index]++;
1037 } else {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001038 test->Invoke3(reinterpret_cast<size_t>(objects[index].Get()), 0U, 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001039 art_quick_unlock_object, self);
Andreas Gampe4fc046e2014-05-06 16:56:39 -07001040 counts[index]--;
1041 }
Andreas Gampe7177d7c2014-05-02 12:10:02 -07001042
Andreas Gampe4fc046e2014-05-06 16:56:39 -07001043 EXPECT_FALSE(self->IsExceptionPending());
1044
1045 // Check the new state.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001046 LockWord lock_iter = objects[index]->GetLockWord(true);
Andreas Gampe4fc046e2014-05-06 16:56:39 -07001047 LockWord::LockState iter_state = lock_iter.GetState();
1048 if (fat[index]) {
1049 // Abuse MonitorInfo.
1050 EXPECT_EQ(LockWord::LockState::kFatLocked, iter_state) << index;
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001051 MonitorInfo info(objects[index].Get());
Andreas Gampe4fc046e2014-05-06 16:56:39 -07001052 EXPECT_EQ(counts[index], info.entry_count_) << index;
1053 } else {
1054 if (counts[index] > 0) {
1055 EXPECT_EQ(LockWord::LockState::kThinLocked, iter_state);
1056 EXPECT_EQ(counts[index] - 1, lock_iter.ThinLockCount());
1057 } else {
1058 EXPECT_EQ(LockWord::LockState::kUnlocked, iter_state);
1059 }
1060 }
Andreas Gampe7177d7c2014-05-02 12:10:02 -07001061 }
1062 }
1063
1064 // Unlock the remaining count times and then check it's unlocked. Then deallocate.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001065 // Go reverse order to correctly handle Handles.
Andreas Gampe7177d7c2014-05-02 12:10:02 -07001066 for (size_t i = 0; i < kNumberOfLocks; ++i) {
1067 size_t index = kNumberOfLocks - 1 - i;
1068 size_t count = counts[index];
1069 while (count > 0) {
Andreas Gampe29b38412014-08-13 00:15:43 -07001070 test->Invoke3(reinterpret_cast<size_t>(objects[index].Get()), 0U, 0U, art_quick_unlock_object,
1071 self);
Andreas Gampe7177d7c2014-05-02 12:10:02 -07001072 count--;
1073 }
1074
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001075 LockWord lock_after4 = objects[index]->GetLockWord(false);
Andreas Gampe7177d7c2014-05-02 12:10:02 -07001076 LockWord::LockState new_state4 = lock_after4.GetState();
Andreas Gampe4fc046e2014-05-06 16:56:39 -07001077 EXPECT_TRUE(LockWord::LockState::kUnlocked == new_state4
1078 || LockWord::LockState::kFatLocked == new_state4);
Andreas Gampe7177d7c2014-05-02 12:10:02 -07001079 }
1080
Andreas Gampe4fc046e2014-05-06 16:56:39 -07001081 // Test done.
Andreas Gampe525cde22014-04-22 15:44:50 -07001082#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001083 UNUSED(test);
Andreas Gampe7177d7c2014-05-02 12:10:02 -07001084 LOG(INFO) << "Skipping unlock_object as I don't know how to do that on " << kRuntimeISA;
Andreas Gampe525cde22014-04-22 15:44:50 -07001085 // Force-print to std::cout so it's also outside the logcat.
Andreas Gampe7177d7c2014-05-02 12:10:02 -07001086 std::cout << "Skipping unlock_object as I don't know how to do that on " << kRuntimeISA << std::endl;
Andreas Gampe525cde22014-04-22 15:44:50 -07001087#endif
1088}
1089
Andreas Gampe4fc046e2014-05-06 16:56:39 -07001090TEST_F(StubTest, UnlockObject) {
Andreas Gampe369810a2015-01-14 19:53:31 -08001091 // This will lead to monitor error messages in the log.
1092 ScopedLogSeverity sls(LogSeverity::FATAL);
1093
Andreas Gampe4fc046e2014-05-06 16:56:39 -07001094 TestUnlockObject(this);
1095}
Andreas Gampe525cde22014-04-22 15:44:50 -07001096
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001097#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1098 (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe525cde22014-04-22 15:44:50 -07001099extern "C" void art_quick_check_cast(void);
1100#endif
1101
1102TEST_F(StubTest, CheckCast) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001103#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1104 (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe525cde22014-04-22 15:44:50 -07001105 Thread* self = Thread::Current();
Andreas Gampe29b38412014-08-13 00:15:43 -07001106
1107 const uintptr_t art_quick_check_cast = StubTest::GetEntrypoint(self, kQuickCheckCast);
1108
Andreas Gampe525cde22014-04-22 15:44:50 -07001109 // Find some classes.
1110 ScopedObjectAccess soa(self);
1111 // garbage is created during ClassLinker::Init
1112
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001113 StackHandleScope<2> hs(soa.Self());
1114 Handle<mirror::Class> c(
1115 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "[Ljava/lang/Object;")));
1116 Handle<mirror::Class> c2(
1117 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "[Ljava/lang/String;")));
Andreas Gampe525cde22014-04-22 15:44:50 -07001118
1119 EXPECT_FALSE(self->IsExceptionPending());
1120
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001121 Invoke3(reinterpret_cast<size_t>(c.Get()), reinterpret_cast<size_t>(c.Get()), 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001122 art_quick_check_cast, self);
Andreas Gampe525cde22014-04-22 15:44:50 -07001123
1124 EXPECT_FALSE(self->IsExceptionPending());
1125
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001126 Invoke3(reinterpret_cast<size_t>(c2.Get()), reinterpret_cast<size_t>(c2.Get()), 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001127 art_quick_check_cast, self);
Andreas Gampe525cde22014-04-22 15:44:50 -07001128
1129 EXPECT_FALSE(self->IsExceptionPending());
1130
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001131 Invoke3(reinterpret_cast<size_t>(c.Get()), reinterpret_cast<size_t>(c2.Get()), 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001132 art_quick_check_cast, self);
Andreas Gampe525cde22014-04-22 15:44:50 -07001133
1134 EXPECT_FALSE(self->IsExceptionPending());
1135
1136 // TODO: Make the following work. But that would require correct managed frames.
1137
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001138 Invoke3(reinterpret_cast<size_t>(c2.Get()), reinterpret_cast<size_t>(c.Get()), 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001139 art_quick_check_cast, self);
Andreas Gampe525cde22014-04-22 15:44:50 -07001140
1141 EXPECT_TRUE(self->IsExceptionPending());
1142 self->ClearException();
1143
1144#else
1145 LOG(INFO) << "Skipping check_cast as I don't know how to do that on " << kRuntimeISA;
1146 // Force-print to std::cout so it's also outside the logcat.
1147 std::cout << "Skipping check_cast as I don't know how to do that on " << kRuntimeISA << std::endl;
1148#endif
1149}
1150
1151
Andreas Gampe525cde22014-04-22 15:44:50 -07001152TEST_F(StubTest, APutObj) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001153#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1154 (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe525cde22014-04-22 15:44:50 -07001155 Thread* self = Thread::Current();
Andreas Gampe29b38412014-08-13 00:15:43 -07001156
1157 // Do not check non-checked ones, we'd need handlers and stuff...
1158 const uintptr_t art_quick_aput_obj_with_null_and_bound_check =
1159 StubTest::GetEntrypoint(self, kQuickAputObjectWithNullAndBoundCheck);
1160
Andreas Gampe525cde22014-04-22 15:44:50 -07001161 // Create an object
1162 ScopedObjectAccess soa(self);
1163 // garbage is created during ClassLinker::Init
1164
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001165 StackHandleScope<5> hs(soa.Self());
1166 Handle<mirror::Class> c(
1167 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "Ljava/lang/Object;")));
1168 Handle<mirror::Class> ca(
1169 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "[Ljava/lang/String;")));
Andreas Gampe525cde22014-04-22 15:44:50 -07001170
1171 // Build a string array of size 1
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001172 Handle<mirror::ObjectArray<mirror::Object>> array(
1173 hs.NewHandle(mirror::ObjectArray<mirror::Object>::Alloc(soa.Self(), ca.Get(), 10)));
Andreas Gampe525cde22014-04-22 15:44:50 -07001174
1175 // Build a string -> should be assignable
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001176 Handle<mirror::String> str_obj(
1177 hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), "hello, world!")));
Andreas Gampe525cde22014-04-22 15:44:50 -07001178
1179 // Build a generic object -> should fail assigning
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001180 Handle<mirror::Object> obj_obj(hs.NewHandle(c->AllocObject(soa.Self())));
Andreas Gampe525cde22014-04-22 15:44:50 -07001181
1182 // Play with it...
1183
1184 // 1) Success cases
Andreas Gampef4e910b2014-04-29 16:55:52 -07001185 // 1.1) Assign str_obj to array[0..3]
Andreas Gampe525cde22014-04-22 15:44:50 -07001186
1187 EXPECT_FALSE(self->IsExceptionPending());
1188
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001189 Invoke3(reinterpret_cast<size_t>(array.Get()), 0U, reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampe29b38412014-08-13 00:15:43 -07001190 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampe525cde22014-04-22 15:44:50 -07001191
1192 EXPECT_FALSE(self->IsExceptionPending());
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001193 EXPECT_EQ(str_obj.Get(), array->Get(0));
Andreas Gampe525cde22014-04-22 15:44:50 -07001194
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001195 Invoke3(reinterpret_cast<size_t>(array.Get()), 1U, reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampe29b38412014-08-13 00:15:43 -07001196 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampef4e910b2014-04-29 16:55:52 -07001197
1198 EXPECT_FALSE(self->IsExceptionPending());
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001199 EXPECT_EQ(str_obj.Get(), array->Get(1));
Andreas Gampef4e910b2014-04-29 16:55:52 -07001200
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001201 Invoke3(reinterpret_cast<size_t>(array.Get()), 2U, reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampe29b38412014-08-13 00:15:43 -07001202 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampef4e910b2014-04-29 16:55:52 -07001203
1204 EXPECT_FALSE(self->IsExceptionPending());
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001205 EXPECT_EQ(str_obj.Get(), array->Get(2));
Andreas Gampef4e910b2014-04-29 16:55:52 -07001206
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001207 Invoke3(reinterpret_cast<size_t>(array.Get()), 3U, reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampe29b38412014-08-13 00:15:43 -07001208 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampef4e910b2014-04-29 16:55:52 -07001209
1210 EXPECT_FALSE(self->IsExceptionPending());
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001211 EXPECT_EQ(str_obj.Get(), array->Get(3));
Andreas Gampef4e910b2014-04-29 16:55:52 -07001212
1213 // 1.2) Assign null to array[0..3]
Andreas Gampe525cde22014-04-22 15:44:50 -07001214
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001215 Invoke3(reinterpret_cast<size_t>(array.Get()), 0U, reinterpret_cast<size_t>(nullptr),
Andreas Gampe29b38412014-08-13 00:15:43 -07001216 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampe525cde22014-04-22 15:44:50 -07001217
1218 EXPECT_FALSE(self->IsExceptionPending());
Andreas Gampef4e910b2014-04-29 16:55:52 -07001219 EXPECT_EQ(nullptr, array->Get(0));
1220
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001221 Invoke3(reinterpret_cast<size_t>(array.Get()), 1U, reinterpret_cast<size_t>(nullptr),
Andreas Gampe29b38412014-08-13 00:15:43 -07001222 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampef4e910b2014-04-29 16:55:52 -07001223
1224 EXPECT_FALSE(self->IsExceptionPending());
1225 EXPECT_EQ(nullptr, array->Get(1));
1226
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001227 Invoke3(reinterpret_cast<size_t>(array.Get()), 2U, reinterpret_cast<size_t>(nullptr),
Andreas Gampe29b38412014-08-13 00:15:43 -07001228 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampef4e910b2014-04-29 16:55:52 -07001229
1230 EXPECT_FALSE(self->IsExceptionPending());
1231 EXPECT_EQ(nullptr, array->Get(2));
1232
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001233 Invoke3(reinterpret_cast<size_t>(array.Get()), 3U, reinterpret_cast<size_t>(nullptr),
Andreas Gampe29b38412014-08-13 00:15:43 -07001234 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampef4e910b2014-04-29 16:55:52 -07001235
1236 EXPECT_FALSE(self->IsExceptionPending());
1237 EXPECT_EQ(nullptr, array->Get(3));
Andreas Gampe525cde22014-04-22 15:44:50 -07001238
1239 // TODO: Check _which_ exception is thrown. Then make 3) check that it's the right check order.
1240
1241 // 2) Failure cases (str into str[])
1242 // 2.1) Array = null
1243 // TODO: Throwing NPE needs actual DEX code
1244
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001245// Invoke3(reinterpret_cast<size_t>(nullptr), 0U, reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampe525cde22014-04-22 15:44:50 -07001246// reinterpret_cast<uintptr_t>(&art_quick_aput_obj_with_null_and_bound_check), self);
1247//
1248// EXPECT_TRUE(self->IsExceptionPending());
1249// self->ClearException();
1250
1251 // 2.2) Index < 0
1252
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001253 Invoke3(reinterpret_cast<size_t>(array.Get()), static_cast<size_t>(-1),
1254 reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampe29b38412014-08-13 00:15:43 -07001255 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampe525cde22014-04-22 15:44:50 -07001256
1257 EXPECT_TRUE(self->IsExceptionPending());
1258 self->ClearException();
1259
1260 // 2.3) Index > 0
1261
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001262 Invoke3(reinterpret_cast<size_t>(array.Get()), 10U, reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampe29b38412014-08-13 00:15:43 -07001263 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampe525cde22014-04-22 15:44:50 -07001264
1265 EXPECT_TRUE(self->IsExceptionPending());
1266 self->ClearException();
1267
1268 // 3) Failure cases (obj into str[])
1269
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001270 Invoke3(reinterpret_cast<size_t>(array.Get()), 0U, reinterpret_cast<size_t>(obj_obj.Get()),
Andreas Gampe29b38412014-08-13 00:15:43 -07001271 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampe525cde22014-04-22 15:44:50 -07001272
1273 EXPECT_TRUE(self->IsExceptionPending());
1274 self->ClearException();
1275
1276 // Tests done.
1277#else
1278 LOG(INFO) << "Skipping aput_obj as I don't know how to do that on " << kRuntimeISA;
1279 // Force-print to std::cout so it's also outside the logcat.
1280 std::cout << "Skipping aput_obj as I don't know how to do that on " << kRuntimeISA << std::endl;
1281#endif
1282}
1283
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001284TEST_F(StubTest, AllocObject) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001285#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1286 (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe369810a2015-01-14 19:53:31 -08001287 // This will lead to OOM error messages in the log.
1288 ScopedLogSeverity sls(LogSeverity::FATAL);
1289
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001290 // TODO: Check the "Unresolved" allocation stubs
1291
1292 Thread* self = Thread::Current();
1293 // Create an object
1294 ScopedObjectAccess soa(self);
1295 // garbage is created during ClassLinker::Init
1296
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001297 StackHandleScope<2> hs(soa.Self());
1298 Handle<mirror::Class> c(
1299 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "Ljava/lang/Object;")));
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001300
1301 // Play with it...
1302
1303 EXPECT_FALSE(self->IsExceptionPending());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001304 {
1305 // Use an arbitrary method from c to use as referrer
1306 size_t result = Invoke3(static_cast<size_t>(c->GetDexTypeIndex()), // type_idx
Mathieu Chartiere401d142015-04-22 13:56:20 -07001307 // arbitrary
1308 reinterpret_cast<size_t>(c->GetVirtualMethod(0, sizeof(void*))),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001309 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001310 StubTest::GetEntrypoint(self, kQuickAllocObject),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001311 self);
1312
1313 EXPECT_FALSE(self->IsExceptionPending());
1314 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
1315 mirror::Object* obj = reinterpret_cast<mirror::Object*>(result);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001316 EXPECT_EQ(c.Get(), obj->GetClass());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001317 VerifyObject(obj);
1318 }
1319
1320 {
Mathieu Chartier2cebb242015-04-21 16:50:40 -07001321 // We can use null in the second argument as we do not need a method here (not used in
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001322 // resolved/initialized cases)
Mathieu Chartiere401d142015-04-22 13:56:20 -07001323 size_t result = Invoke3(reinterpret_cast<size_t>(c.Get()), 0u, 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001324 StubTest::GetEntrypoint(self, kQuickAllocObjectResolved),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001325 self);
1326
1327 EXPECT_FALSE(self->IsExceptionPending());
1328 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
1329 mirror::Object* obj = reinterpret_cast<mirror::Object*>(result);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001330 EXPECT_EQ(c.Get(), obj->GetClass());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001331 VerifyObject(obj);
1332 }
1333
1334 {
Mathieu Chartier2cebb242015-04-21 16:50:40 -07001335 // We can use null in the second argument as we do not need a method here (not used in
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001336 // resolved/initialized cases)
Mathieu Chartiere401d142015-04-22 13:56:20 -07001337 size_t result = Invoke3(reinterpret_cast<size_t>(c.Get()), 0u, 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001338 StubTest::GetEntrypoint(self, kQuickAllocObjectInitialized),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001339 self);
1340
1341 EXPECT_FALSE(self->IsExceptionPending());
1342 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
1343 mirror::Object* obj = reinterpret_cast<mirror::Object*>(result);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001344 EXPECT_EQ(c.Get(), obj->GetClass());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001345 VerifyObject(obj);
1346 }
1347
1348 // Failure tests.
1349
1350 // Out-of-memory.
1351 {
1352 Runtime::Current()->GetHeap()->SetIdealFootprint(1 * GB);
1353
1354 // Array helps to fill memory faster.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001355 Handle<mirror::Class> ca(
1356 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "[Ljava/lang/Object;")));
1357
1358 // Use arbitrary large amount for now.
1359 static const size_t kMaxHandles = 1000000;
Ian Rogers700a4022014-05-19 16:49:03 -07001360 std::unique_ptr<StackHandleScope<kMaxHandles>> hsp(new StackHandleScope<kMaxHandles>(self));
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001361
1362 std::vector<Handle<mirror::Object>> handles;
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001363 // Start allocating with 128K
1364 size_t length = 128 * KB / 4;
1365 while (length > 10) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001366 Handle<mirror::Object> h(hsp->NewHandle<mirror::Object>(
1367 mirror::ObjectArray<mirror::Object>::Alloc(soa.Self(), ca.Get(), length / 4)));
1368 if (self->IsExceptionPending() || h.Get() == nullptr) {
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001369 self->ClearException();
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001370
1371 // Try a smaller length
1372 length = length / 8;
1373 // Use at most half the reported free space.
1374 size_t mem = Runtime::Current()->GetHeap()->GetFreeMemory();
1375 if (length * 8 > mem) {
1376 length = mem / 8;
1377 }
1378 } else {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001379 handles.push_back(h);
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001380 }
1381 }
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001382 LOG(INFO) << "Used " << handles.size() << " arrays to fill space.";
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001383
1384 // Allocate simple objects till it fails.
1385 while (!self->IsExceptionPending()) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001386 Handle<mirror::Object> h = hsp->NewHandle(c->AllocObject(soa.Self()));
1387 if (!self->IsExceptionPending() && h.Get() != nullptr) {
1388 handles.push_back(h);
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001389 }
1390 }
1391 self->ClearException();
1392
Mathieu Chartiere401d142015-04-22 13:56:20 -07001393 size_t result = Invoke3(reinterpret_cast<size_t>(c.Get()), 0u, 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001394 StubTest::GetEntrypoint(self, kQuickAllocObjectInitialized),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001395 self);
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001396 EXPECT_TRUE(self->IsExceptionPending());
1397 self->ClearException();
1398 EXPECT_EQ(reinterpret_cast<size_t>(nullptr), result);
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001399 }
1400
1401 // Tests done.
1402#else
1403 LOG(INFO) << "Skipping alloc_object as I don't know how to do that on " << kRuntimeISA;
1404 // Force-print to std::cout so it's also outside the logcat.
1405 std::cout << "Skipping alloc_object as I don't know how to do that on " << kRuntimeISA << std::endl;
1406#endif
1407}
1408
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001409TEST_F(StubTest, AllocObjectArray) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001410#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1411 (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001412 // TODO: Check the "Unresolved" allocation stubs
1413
Andreas Gampe369810a2015-01-14 19:53:31 -08001414 // This will lead to OOM error messages in the log.
1415 ScopedLogSeverity sls(LogSeverity::FATAL);
1416
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001417 Thread* self = Thread::Current();
1418 // Create an object
1419 ScopedObjectAccess soa(self);
1420 // garbage is created during ClassLinker::Init
1421
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001422 StackHandleScope<2> hs(self);
1423 Handle<mirror::Class> c(
1424 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "[Ljava/lang/Object;")));
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001425
1426 // Needed to have a linked method.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001427 Handle<mirror::Class> c_obj(
1428 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "Ljava/lang/Object;")));
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001429
1430 // Play with it...
1431
1432 EXPECT_FALSE(self->IsExceptionPending());
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001433
1434 // For some reason this does not work, as the type_idx is artificial and outside what the
1435 // resolved types of c_obj allow...
1436
Ian Rogerscf7f1912014-10-22 22:06:39 -07001437 if ((false)) {
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001438 // Use an arbitrary method from c to use as referrer
1439 size_t result = Invoke3(static_cast<size_t>(c->GetDexTypeIndex()), // type_idx
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001440 10U,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001441 // arbitrary
1442 reinterpret_cast<size_t>(c_obj->GetVirtualMethod(0, sizeof(void*))),
Andreas Gampe29b38412014-08-13 00:15:43 -07001443 StubTest::GetEntrypoint(self, kQuickAllocArray),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001444 self);
1445
1446 EXPECT_FALSE(self->IsExceptionPending());
1447 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
1448 mirror::Array* obj = reinterpret_cast<mirror::Array*>(result);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001449 EXPECT_EQ(c.Get(), obj->GetClass());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001450 VerifyObject(obj);
1451 EXPECT_EQ(obj->GetLength(), 10);
1452 }
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001453
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001454 {
Mathieu Chartier2cebb242015-04-21 16:50:40 -07001455 // We can use null in the second argument as we do not need a method here (not used in
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001456 // resolved/initialized cases)
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08001457 size_t result = Invoke3(reinterpret_cast<size_t>(c.Get()), 10U,
1458 reinterpret_cast<size_t>(nullptr),
Andreas Gampe29b38412014-08-13 00:15:43 -07001459 StubTest::GetEntrypoint(self, kQuickAllocArrayResolved),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001460 self);
Nicolas Geoffray14691c52015-03-05 10:40:17 +00001461 EXPECT_FALSE(self->IsExceptionPending()) << PrettyTypeOf(self->GetException());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001462 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
1463 mirror::Object* obj = reinterpret_cast<mirror::Object*>(result);
1464 EXPECT_TRUE(obj->IsArrayInstance());
1465 EXPECT_TRUE(obj->IsObjectArray());
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001466 EXPECT_EQ(c.Get(), obj->GetClass());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001467 VerifyObject(obj);
1468 mirror::Array* array = reinterpret_cast<mirror::Array*>(result);
1469 EXPECT_EQ(array->GetLength(), 10);
1470 }
1471
1472 // Failure tests.
1473
1474 // Out-of-memory.
1475 {
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08001476 size_t result = Invoke3(reinterpret_cast<size_t>(c.Get()),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001477 GB, // that should fail...
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08001478 reinterpret_cast<size_t>(nullptr),
Andreas Gampe29b38412014-08-13 00:15:43 -07001479 StubTest::GetEntrypoint(self, kQuickAllocArrayResolved),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001480 self);
1481
1482 EXPECT_TRUE(self->IsExceptionPending());
1483 self->ClearException();
1484 EXPECT_EQ(reinterpret_cast<size_t>(nullptr), result);
1485 }
1486
1487 // Tests done.
1488#else
1489 LOG(INFO) << "Skipping alloc_array as I don't know how to do that on " << kRuntimeISA;
1490 // Force-print to std::cout so it's also outside the logcat.
1491 std::cout << "Skipping alloc_array as I don't know how to do that on " << kRuntimeISA << std::endl;
1492#endif
1493}
1494
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001495
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001496TEST_F(StubTest, StringCompareTo) {
Ian Rogersc3ccc102014-06-25 11:52:14 -07001497#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001498 // TODO: Check the "Unresolved" allocation stubs
1499
1500 Thread* self = Thread::Current();
Andreas Gampe29b38412014-08-13 00:15:43 -07001501
1502 const uintptr_t art_quick_string_compareto = StubTest::GetEntrypoint(self, kQuickStringCompareTo);
1503
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001504 ScopedObjectAccess soa(self);
1505 // garbage is created during ClassLinker::Init
1506
1507 // Create some strings
1508 // Use array so we can index into it and use a matrix for expected results
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -07001509 // Setup: The first half is standard. The second half uses a non-zero offset.
1510 // TODO: Shared backing arrays.
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001511 const char* c[] = { "", "", "a", "aa", "ab",
Serban Constantinescu86797a72014-06-19 16:17:56 +01001512 "aacaacaacaacaacaac", // This one's under the default limit to go to __memcmp16.
1513 "aacaacaacaacaacaacaacaacaacaacaacaac", // This one's over.
1514 "aacaacaacaacaacaacaacaacaacaacaacaaca" }; // As is this one. We need a separate one to
1515 // defeat object-equal optimizations.
Jeff Hao848f70a2014-01-15 13:49:50 -08001516 static constexpr size_t kStringCount = arraysize(c);
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001517
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001518 StackHandleScope<kStringCount> hs(self);
1519 Handle<mirror::String> s[kStringCount];
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001520
Jeff Hao848f70a2014-01-15 13:49:50 -08001521 for (size_t i = 0; i < kStringCount; ++i) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001522 s[i] = hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), c[i]));
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001523 }
1524
1525 // TODO: wide characters
1526
1527 // Matrix of expectations. First component is first parameter. Note we only check against the
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -07001528 // sign, not the value. As we are testing random offsets, we need to compute this and need to
1529 // rely on String::CompareTo being correct.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001530 int32_t expected[kStringCount][kStringCount];
1531 for (size_t x = 0; x < kStringCount; ++x) {
1532 for (size_t y = 0; y < kStringCount; ++y) {
1533 expected[x][y] = s[x]->CompareTo(s[y].Get());
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -07001534 }
1535 }
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001536
1537 // Play with it...
1538
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001539 for (size_t x = 0; x < kStringCount; ++x) {
1540 for (size_t y = 0; y < kStringCount; ++y) {
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001541 // Test string_compareto x y
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001542 size_t result = Invoke3(reinterpret_cast<size_t>(s[x].Get()),
1543 reinterpret_cast<size_t>(s[y].Get()), 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001544 art_quick_string_compareto, self);
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001545
1546 EXPECT_FALSE(self->IsExceptionPending());
1547
1548 // The result is a 32b signed integer
1549 union {
1550 size_t r;
1551 int32_t i;
1552 } conv;
1553 conv.r = result;
1554 int32_t e = expected[x][y];
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -07001555 EXPECT_TRUE(e == 0 ? conv.i == 0 : true) << "x=" << c[x] << " y=" << c[y] << " res=" <<
1556 conv.r;
1557 EXPECT_TRUE(e < 0 ? conv.i < 0 : true) << "x=" << c[x] << " y=" << c[y] << " res=" <<
1558 conv.r;
1559 EXPECT_TRUE(e > 0 ? conv.i > 0 : true) << "x=" << c[x] << " y=" << c[y] << " res=" <<
1560 conv.r;
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001561 }
1562 }
1563
Andreas Gampe7177d7c2014-05-02 12:10:02 -07001564 // TODO: Deallocate things.
1565
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001566 // Tests done.
1567#else
1568 LOG(INFO) << "Skipping string_compareto as I don't know how to do that on " << kRuntimeISA;
1569 // Force-print to std::cout so it's also outside the logcat.
1570 std::cout << "Skipping string_compareto as I don't know how to do that on " << kRuntimeISA <<
1571 std::endl;
1572#endif
1573}
1574
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001575
Mathieu Chartierc7853442015-03-27 14:35:38 -07001576static void GetSetBooleanStatic(ArtField* f, Thread* self,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001577 ArtMethod* referrer, StubTest* test)
Mathieu Chartier90443472015-07-16 20:32:27 -07001578 SHARED_REQUIRES(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001579#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1580 (defined(__x86_64__) && !defined(__APPLE__))
Fred Shih37f05ef2014-07-16 18:38:08 -07001581 constexpr size_t num_values = 5;
1582 uint8_t values[num_values] = { 0, 1, 2, 128, 0xFF };
1583
1584 for (size_t i = 0; i < num_values; ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001585 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001586 static_cast<size_t>(values[i]),
1587 0U,
1588 StubTest::GetEntrypoint(self, kQuickSet8Static),
1589 self,
1590 referrer);
1591
Mathieu Chartierc7853442015-03-27 14:35:38 -07001592 size_t res = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001593 0U, 0U,
1594 StubTest::GetEntrypoint(self, kQuickGetBooleanStatic),
1595 self,
1596 referrer);
1597 // Boolean currently stores bools as uint8_t, be more zealous about asserting correct writes/gets.
1598 EXPECT_EQ(values[i], static_cast<uint8_t>(res)) << "Iteration " << i;
1599 }
1600#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001601 UNUSED(f, self, referrer, test);
Fred Shih37f05ef2014-07-16 18:38:08 -07001602 LOG(INFO) << "Skipping set_boolean_static as I don't know how to do that on " << kRuntimeISA;
1603 // Force-print to std::cout so it's also outside the logcat.
1604 std::cout << "Skipping set_boolean_static as I don't know how to do that on " << kRuntimeISA << std::endl;
1605#endif
1606}
Mathieu Chartiere401d142015-04-22 13:56:20 -07001607static void GetSetByteStatic(ArtField* f, Thread* self, ArtMethod* referrer,
Mathieu Chartierc7853442015-03-27 14:35:38 -07001608 StubTest* test)
Mathieu Chartier90443472015-07-16 20:32:27 -07001609 SHARED_REQUIRES(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001610#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1611 (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001612 int8_t values[] = { -128, -64, 0, 64, 127 };
Fred Shih37f05ef2014-07-16 18:38:08 -07001613
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001614 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001615 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001616 static_cast<size_t>(values[i]),
1617 0U,
1618 StubTest::GetEntrypoint(self, kQuickSet8Static),
1619 self,
1620 referrer);
1621
Mathieu Chartierc7853442015-03-27 14:35:38 -07001622 size_t res = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001623 0U, 0U,
1624 StubTest::GetEntrypoint(self, kQuickGetByteStatic),
1625 self,
1626 referrer);
1627 EXPECT_EQ(values[i], static_cast<int8_t>(res)) << "Iteration " << i;
1628 }
1629#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001630 UNUSED(f, self, referrer, test);
Fred Shih37f05ef2014-07-16 18:38:08 -07001631 LOG(INFO) << "Skipping set_byte_static as I don't know how to do that on " << kRuntimeISA;
1632 // Force-print to std::cout so it's also outside the logcat.
1633 std::cout << "Skipping set_byte_static as I don't know how to do that on " << kRuntimeISA << std::endl;
1634#endif
1635}
1636
1637
Mathieu Chartierc7853442015-03-27 14:35:38 -07001638static void GetSetBooleanInstance(Handle<mirror::Object>* obj, ArtField* f, Thread* self,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001639 ArtMethod* referrer, StubTest* test)
Mathieu Chartier90443472015-07-16 20:32:27 -07001640 SHARED_REQUIRES(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001641#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1642 (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001643 uint8_t values[] = { 0, true, 2, 128, 0xFF };
Fred Shih37f05ef2014-07-16 18:38:08 -07001644
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001645 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001646 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001647 reinterpret_cast<size_t>(obj->Get()),
1648 static_cast<size_t>(values[i]),
1649 StubTest::GetEntrypoint(self, kQuickSet8Instance),
1650 self,
1651 referrer);
1652
Mathieu Chartierc7853442015-03-27 14:35:38 -07001653 uint8_t res = f->GetBoolean(obj->Get());
Fred Shih37f05ef2014-07-16 18:38:08 -07001654 EXPECT_EQ(values[i], res) << "Iteration " << i;
1655
Mathieu Chartierc7853442015-03-27 14:35:38 -07001656 f->SetBoolean<false>(obj->Get(), res);
Fred Shih37f05ef2014-07-16 18:38:08 -07001657
Mathieu Chartierc7853442015-03-27 14:35:38 -07001658 size_t res2 = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001659 reinterpret_cast<size_t>(obj->Get()),
1660 0U,
1661 StubTest::GetEntrypoint(self, kQuickGetBooleanInstance),
1662 self,
1663 referrer);
1664 EXPECT_EQ(res, static_cast<uint8_t>(res2));
1665 }
1666#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001667 UNUSED(obj, f, self, referrer, test);
Fred Shih37f05ef2014-07-16 18:38:08 -07001668 LOG(INFO) << "Skipping set_boolean_instance as I don't know how to do that on " << kRuntimeISA;
1669 // Force-print to std::cout so it's also outside the logcat.
1670 std::cout << "Skipping set_boolean_instance as I don't know how to do that on " << kRuntimeISA << std::endl;
1671#endif
1672}
Mathieu Chartierc7853442015-03-27 14:35:38 -07001673static void GetSetByteInstance(Handle<mirror::Object>* obj, ArtField* f,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001674 Thread* self, ArtMethod* referrer, StubTest* test)
Mathieu Chartier90443472015-07-16 20:32:27 -07001675 SHARED_REQUIRES(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001676#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1677 (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001678 int8_t values[] = { -128, -64, 0, 64, 127 };
Fred Shih37f05ef2014-07-16 18:38:08 -07001679
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001680 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001681 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001682 reinterpret_cast<size_t>(obj->Get()),
1683 static_cast<size_t>(values[i]),
1684 StubTest::GetEntrypoint(self, kQuickSet8Instance),
1685 self,
1686 referrer);
1687
Mathieu Chartierc7853442015-03-27 14:35:38 -07001688 int8_t res = f->GetByte(obj->Get());
Fred Shih37f05ef2014-07-16 18:38:08 -07001689 EXPECT_EQ(res, values[i]) << "Iteration " << i;
Mathieu Chartierc7853442015-03-27 14:35:38 -07001690 f->SetByte<false>(obj->Get(), ++res);
Fred Shih37f05ef2014-07-16 18:38:08 -07001691
Mathieu Chartierc7853442015-03-27 14:35:38 -07001692 size_t res2 = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001693 reinterpret_cast<size_t>(obj->Get()),
1694 0U,
1695 StubTest::GetEntrypoint(self, kQuickGetByteInstance),
1696 self,
1697 referrer);
1698 EXPECT_EQ(res, static_cast<int8_t>(res2));
1699 }
1700#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001701 UNUSED(obj, f, self, referrer, test);
Fred Shih37f05ef2014-07-16 18:38:08 -07001702 LOG(INFO) << "Skipping set_byte_instance as I don't know how to do that on " << kRuntimeISA;
1703 // Force-print to std::cout so it's also outside the logcat.
1704 std::cout << "Skipping set_byte_instance as I don't know how to do that on " << kRuntimeISA << std::endl;
1705#endif
1706}
1707
Mathieu Chartiere401d142015-04-22 13:56:20 -07001708static void GetSetCharStatic(ArtField* f, Thread* self, ArtMethod* referrer,
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001709 StubTest* test)
Mathieu Chartier90443472015-07-16 20:32:27 -07001710 SHARED_REQUIRES(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001711#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1712 (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001713 uint16_t values[] = { 0, 1, 2, 255, 32768, 0xFFFF };
Fred Shih37f05ef2014-07-16 18:38:08 -07001714
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001715 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001716 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001717 static_cast<size_t>(values[i]),
1718 0U,
1719 StubTest::GetEntrypoint(self, kQuickSet16Static),
1720 self,
1721 referrer);
1722
Mathieu Chartierc7853442015-03-27 14:35:38 -07001723 size_t res = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001724 0U, 0U,
1725 StubTest::GetEntrypoint(self, kQuickGetCharStatic),
1726 self,
1727 referrer);
1728
1729 EXPECT_EQ(values[i], static_cast<uint16_t>(res)) << "Iteration " << i;
1730 }
1731#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001732 UNUSED(f, self, referrer, test);
Fred Shih37f05ef2014-07-16 18:38:08 -07001733 LOG(INFO) << "Skipping set_char_static as I don't know how to do that on " << kRuntimeISA;
1734 // Force-print to std::cout so it's also outside the logcat.
1735 std::cout << "Skipping set_char_static as I don't know how to do that on " << kRuntimeISA << std::endl;
1736#endif
1737}
Mathieu Chartierc7853442015-03-27 14:35:38 -07001738static void GetSetShortStatic(ArtField* f, Thread* self,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001739 ArtMethod* referrer, StubTest* test)
Mathieu Chartier90443472015-07-16 20:32:27 -07001740 SHARED_REQUIRES(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001741#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1742 (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001743 int16_t values[] = { -0x7FFF, -32768, 0, 255, 32767, 0x7FFE };
Fred Shih37f05ef2014-07-16 18:38:08 -07001744
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001745 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001746 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001747 static_cast<size_t>(values[i]),
1748 0U,
1749 StubTest::GetEntrypoint(self, kQuickSet16Static),
1750 self,
1751 referrer);
1752
Mathieu Chartierc7853442015-03-27 14:35:38 -07001753 size_t res = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001754 0U, 0U,
1755 StubTest::GetEntrypoint(self, kQuickGetShortStatic),
1756 self,
1757 referrer);
1758
1759 EXPECT_EQ(static_cast<int16_t>(res), values[i]) << "Iteration " << i;
1760 }
1761#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001762 UNUSED(f, self, referrer, test);
Fred Shih37f05ef2014-07-16 18:38:08 -07001763 LOG(INFO) << "Skipping set_short_static as I don't know how to do that on " << kRuntimeISA;
1764 // Force-print to std::cout so it's also outside the logcat.
1765 std::cout << "Skipping set_short_static as I don't know how to do that on " << kRuntimeISA << std::endl;
1766#endif
1767}
1768
Mathieu Chartierc7853442015-03-27 14:35:38 -07001769static void GetSetCharInstance(Handle<mirror::Object>* obj, ArtField* f,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001770 Thread* self, ArtMethod* referrer, StubTest* test)
Mathieu Chartier90443472015-07-16 20:32:27 -07001771 SHARED_REQUIRES(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001772#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1773 (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001774 uint16_t values[] = { 0, 1, 2, 255, 32768, 0xFFFF };
Fred Shih37f05ef2014-07-16 18:38:08 -07001775
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001776 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001777 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001778 reinterpret_cast<size_t>(obj->Get()),
1779 static_cast<size_t>(values[i]),
1780 StubTest::GetEntrypoint(self, kQuickSet16Instance),
1781 self,
1782 referrer);
1783
Mathieu Chartierc7853442015-03-27 14:35:38 -07001784 uint16_t res = f->GetChar(obj->Get());
Fred Shih37f05ef2014-07-16 18:38:08 -07001785 EXPECT_EQ(res, values[i]) << "Iteration " << i;
Mathieu Chartierc7853442015-03-27 14:35:38 -07001786 f->SetChar<false>(obj->Get(), ++res);
Fred Shih37f05ef2014-07-16 18:38:08 -07001787
Mathieu Chartierc7853442015-03-27 14:35:38 -07001788 size_t res2 = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001789 reinterpret_cast<size_t>(obj->Get()),
1790 0U,
1791 StubTest::GetEntrypoint(self, kQuickGetCharInstance),
1792 self,
1793 referrer);
1794 EXPECT_EQ(res, static_cast<uint16_t>(res2));
1795 }
1796#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001797 UNUSED(obj, f, self, referrer, test);
Fred Shih37f05ef2014-07-16 18:38:08 -07001798 LOG(INFO) << "Skipping set_char_instance as I don't know how to do that on " << kRuntimeISA;
1799 // Force-print to std::cout so it's also outside the logcat.
1800 std::cout << "Skipping set_char_instance as I don't know how to do that on " << kRuntimeISA << std::endl;
1801#endif
1802}
Mathieu Chartierc7853442015-03-27 14:35:38 -07001803static void GetSetShortInstance(Handle<mirror::Object>* obj, ArtField* f,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001804 Thread* self, ArtMethod* referrer, StubTest* test)
Mathieu Chartier90443472015-07-16 20:32:27 -07001805 SHARED_REQUIRES(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001806#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1807 (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001808 int16_t values[] = { -0x7FFF, -32768, 0, 255, 32767, 0x7FFE };
Fred Shih37f05ef2014-07-16 18:38:08 -07001809
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001810 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001811 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001812 reinterpret_cast<size_t>(obj->Get()),
1813 static_cast<size_t>(values[i]),
1814 StubTest::GetEntrypoint(self, kQuickSet16Instance),
1815 self,
1816 referrer);
1817
Mathieu Chartierc7853442015-03-27 14:35:38 -07001818 int16_t res = f->GetShort(obj->Get());
Fred Shih37f05ef2014-07-16 18:38:08 -07001819 EXPECT_EQ(res, values[i]) << "Iteration " << i;
Mathieu Chartierc7853442015-03-27 14:35:38 -07001820 f->SetShort<false>(obj->Get(), ++res);
Fred Shih37f05ef2014-07-16 18:38:08 -07001821
Mathieu Chartierc7853442015-03-27 14:35:38 -07001822 size_t res2 = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001823 reinterpret_cast<size_t>(obj->Get()),
1824 0U,
1825 StubTest::GetEntrypoint(self, kQuickGetShortInstance),
1826 self,
1827 referrer);
1828 EXPECT_EQ(res, static_cast<int16_t>(res2));
1829 }
1830#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001831 UNUSED(obj, f, self, referrer, test);
Fred Shih37f05ef2014-07-16 18:38:08 -07001832 LOG(INFO) << "Skipping set_short_instance as I don't know how to do that on " << kRuntimeISA;
1833 // Force-print to std::cout so it's also outside the logcat.
1834 std::cout << "Skipping set_short_instance as I don't know how to do that on " << kRuntimeISA << std::endl;
1835#endif
1836}
1837
Mathieu Chartiere401d142015-04-22 13:56:20 -07001838static void GetSet32Static(ArtField* f, Thread* self, ArtMethod* referrer,
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001839 StubTest* test)
Mathieu Chartier90443472015-07-16 20:32:27 -07001840 SHARED_REQUIRES(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001841#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1842 (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001843 uint32_t values[] = { 0, 1, 2, 255, 32768, 1000000, 0xFFFFFFFF };
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001844
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001845 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001846 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001847 static_cast<size_t>(values[i]),
1848 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001849 StubTest::GetEntrypoint(self, kQuickSet32Static),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001850 self,
1851 referrer);
1852
Mathieu Chartierc7853442015-03-27 14:35:38 -07001853 size_t res = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001854 0U, 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001855 StubTest::GetEntrypoint(self, kQuickGet32Static),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001856 self,
1857 referrer);
1858
Goran Jakovljevic04568812015-04-23 15:27:23 +02001859#if defined(__mips__) && defined(__LP64__)
1860 EXPECT_EQ(static_cast<uint32_t>(res), values[i]) << "Iteration " << i;
1861#else
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001862 EXPECT_EQ(res, values[i]) << "Iteration " << i;
Goran Jakovljevic04568812015-04-23 15:27:23 +02001863#endif
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001864 }
1865#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001866 UNUSED(f, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001867 LOG(INFO) << "Skipping set32static as I don't know how to do that on " << kRuntimeISA;
1868 // Force-print to std::cout so it's also outside the logcat.
1869 std::cout << "Skipping set32static as I don't know how to do that on " << kRuntimeISA << std::endl;
1870#endif
1871}
1872
1873
Mathieu Chartierc7853442015-03-27 14:35:38 -07001874static void GetSet32Instance(Handle<mirror::Object>* obj, ArtField* f,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001875 Thread* self, ArtMethod* referrer, StubTest* test)
Mathieu Chartier90443472015-07-16 20:32:27 -07001876 SHARED_REQUIRES(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001877#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1878 (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001879 uint32_t values[] = { 0, 1, 2, 255, 32768, 1000000, 0xFFFFFFFF };
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001880
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001881 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001882 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001883 reinterpret_cast<size_t>(obj->Get()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001884 static_cast<size_t>(values[i]),
Andreas Gampe29b38412014-08-13 00:15:43 -07001885 StubTest::GetEntrypoint(self, kQuickSet32Instance),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001886 self,
1887 referrer);
1888
Mathieu Chartierc7853442015-03-27 14:35:38 -07001889 int32_t res = f->GetInt(obj->Get());
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001890 EXPECT_EQ(res, static_cast<int32_t>(values[i])) << "Iteration " << i;
1891
1892 res++;
Mathieu Chartierc7853442015-03-27 14:35:38 -07001893 f->SetInt<false>(obj->Get(), res);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001894
Mathieu Chartierc7853442015-03-27 14:35:38 -07001895 size_t res2 = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001896 reinterpret_cast<size_t>(obj->Get()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001897 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001898 StubTest::GetEntrypoint(self, kQuickGet32Instance),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001899 self,
1900 referrer);
1901 EXPECT_EQ(res, static_cast<int32_t>(res2));
1902 }
1903#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001904 UNUSED(obj, f, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001905 LOG(INFO) << "Skipping set32instance as I don't know how to do that on " << kRuntimeISA;
1906 // Force-print to std::cout so it's also outside the logcat.
1907 std::cout << "Skipping set32instance as I don't know how to do that on " << kRuntimeISA << std::endl;
1908#endif
1909}
1910
1911
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001912#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1913 (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001914
1915static void set_and_check_static(uint32_t f_idx, mirror::Object* val, Thread* self,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001916 ArtMethod* referrer, StubTest* test)
Mathieu Chartier90443472015-07-16 20:32:27 -07001917 SHARED_REQUIRES(Locks::mutator_lock_) {
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001918 test->Invoke3WithReferrer(static_cast<size_t>(f_idx),
1919 reinterpret_cast<size_t>(val),
1920 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001921 StubTest::GetEntrypoint(self, kQuickSetObjStatic),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001922 self,
1923 referrer);
1924
1925 size_t res = test->Invoke3WithReferrer(static_cast<size_t>(f_idx),
1926 0U, 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001927 StubTest::GetEntrypoint(self, kQuickGetObjStatic),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001928 self,
1929 referrer);
1930
1931 EXPECT_EQ(res, reinterpret_cast<size_t>(val)) << "Value " << val;
1932}
1933#endif
1934
Mathieu Chartiere401d142015-04-22 13:56:20 -07001935static void GetSetObjStatic(ArtField* f, Thread* self, ArtMethod* referrer,
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001936 StubTest* test)
Mathieu Chartier90443472015-07-16 20:32:27 -07001937 SHARED_REQUIRES(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001938#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1939 (defined(__x86_64__) && !defined(__APPLE__))
Mathieu Chartierc7853442015-03-27 14:35:38 -07001940 set_and_check_static(f->GetDexFieldIndex(), nullptr, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001941
1942 // Allocate a string object for simplicity.
1943 mirror::String* str = mirror::String::AllocFromModifiedUtf8(self, "Test");
Mathieu Chartierc7853442015-03-27 14:35:38 -07001944 set_and_check_static(f->GetDexFieldIndex(), str, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001945
Mathieu Chartierc7853442015-03-27 14:35:38 -07001946 set_and_check_static(f->GetDexFieldIndex(), nullptr, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001947#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001948 UNUSED(f, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001949 LOG(INFO) << "Skipping setObjstatic as I don't know how to do that on " << kRuntimeISA;
1950 // Force-print to std::cout so it's also outside the logcat.
1951 std::cout << "Skipping setObjstatic as I don't know how to do that on " << kRuntimeISA << std::endl;
1952#endif
1953}
1954
1955
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001956#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1957 (defined(__x86_64__) && !defined(__APPLE__))
Mathieu Chartierc7853442015-03-27 14:35:38 -07001958static void set_and_check_instance(ArtField* f, mirror::Object* trg,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001959 mirror::Object* val, Thread* self, ArtMethod* referrer,
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001960 StubTest* test)
Mathieu Chartier90443472015-07-16 20:32:27 -07001961 SHARED_REQUIRES(Locks::mutator_lock_) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001962 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001963 reinterpret_cast<size_t>(trg),
1964 reinterpret_cast<size_t>(val),
Andreas Gampe29b38412014-08-13 00:15:43 -07001965 StubTest::GetEntrypoint(self, kQuickSetObjInstance),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001966 self,
1967 referrer);
1968
Mathieu Chartierc7853442015-03-27 14:35:38 -07001969 size_t res = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001970 reinterpret_cast<size_t>(trg),
1971 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001972 StubTest::GetEntrypoint(self, kQuickGetObjInstance),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001973 self,
1974 referrer);
1975
1976 EXPECT_EQ(res, reinterpret_cast<size_t>(val)) << "Value " << val;
1977
Mathieu Chartierc7853442015-03-27 14:35:38 -07001978 EXPECT_EQ(val, f->GetObj(trg));
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001979}
1980#endif
1981
Mathieu Chartierc7853442015-03-27 14:35:38 -07001982static void GetSetObjInstance(Handle<mirror::Object>* obj, ArtField* f,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001983 Thread* self, ArtMethod* referrer, StubTest* test)
Mathieu Chartier90443472015-07-16 20:32:27 -07001984 SHARED_REQUIRES(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001985#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1986 (defined(__x86_64__) && !defined(__APPLE__))
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001987 set_and_check_instance(f, obj->Get(), nullptr, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001988
1989 // Allocate a string object for simplicity.
1990 mirror::String* str = mirror::String::AllocFromModifiedUtf8(self, "Test");
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001991 set_and_check_instance(f, obj->Get(), str, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001992
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001993 set_and_check_instance(f, obj->Get(), nullptr, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001994#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001995 UNUSED(obj, f, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001996 LOG(INFO) << "Skipping setObjinstance as I don't know how to do that on " << kRuntimeISA;
1997 // Force-print to std::cout so it's also outside the logcat.
1998 std::cout << "Skipping setObjinstance as I don't know how to do that on " << kRuntimeISA << std::endl;
1999#endif
2000}
2001
2002
Calin Juravle872ab3f2015-10-02 07:27:51 +01002003// TODO: Complete these tests for 32b architectures
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07002004
Mathieu Chartiere401d142015-04-22 13:56:20 -07002005static void GetSet64Static(ArtField* f, Thread* self, ArtMethod* referrer,
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07002006 StubTest* test)
Mathieu Chartier90443472015-07-16 20:32:27 -07002007 SHARED_REQUIRES(Locks::mutator_lock_) {
Calin Juravle872ab3f2015-10-02 07:27:51 +01002008// TODO: (defined(__mips__) && defined(__LP64__)) || defined(__aarch64__)
2009#if (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07002010 uint64_t values[] = { 0, 1, 2, 255, 32768, 1000000, 0xFFFFFFFF, 0xFFFFFFFFFFFF };
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07002011
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07002012 for (size_t i = 0; i < arraysize(values); ++i) {
Calin Juravle872ab3f2015-10-02 07:27:51 +01002013 test->Invoke64StaticSet(static_cast<size_t>(f->GetDexFieldIndex()),
2014 values[i],
2015 0U,
2016 StubTest::GetEntrypoint(self, kQuickSet64Static),
2017 self,
2018 referrer);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07002019
Mathieu Chartierc7853442015-03-27 14:35:38 -07002020 size_t res = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07002021 0U, 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07002022 StubTest::GetEntrypoint(self, kQuickGet64Static),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07002023 self,
2024 referrer);
2025
2026 EXPECT_EQ(res, values[i]) << "Iteration " << i;
2027 }
2028#else
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07002029 UNUSED(f, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07002030 LOG(INFO) << "Skipping set64static as I don't know how to do that on " << kRuntimeISA;
2031 // Force-print to std::cout so it's also outside the logcat.
2032 std::cout << "Skipping set64static as I don't know how to do that on " << kRuntimeISA << std::endl;
2033#endif
2034}
2035
2036
Mathieu Chartierc7853442015-03-27 14:35:38 -07002037static void GetSet64Instance(Handle<mirror::Object>* obj, ArtField* f,
Mathieu Chartiere401d142015-04-22 13:56:20 -07002038 Thread* self, ArtMethod* referrer, StubTest* test)
Mathieu Chartier90443472015-07-16 20:32:27 -07002039 SHARED_REQUIRES(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02002040#if (defined(__x86_64__) && !defined(__APPLE__)) || (defined(__mips__) && defined(__LP64__)) || \
2041 defined(__aarch64__)
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07002042 uint64_t values[] = { 0, 1, 2, 255, 32768, 1000000, 0xFFFFFFFF, 0xFFFFFFFFFFFF };
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07002043
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07002044 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07002045 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07002046 reinterpret_cast<size_t>(obj->Get()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07002047 static_cast<size_t>(values[i]),
Andreas Gampe29b38412014-08-13 00:15:43 -07002048 StubTest::GetEntrypoint(self, kQuickSet64Instance),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07002049 self,
2050 referrer);
2051
Mathieu Chartierc7853442015-03-27 14:35:38 -07002052 int64_t res = f->GetLong(obj->Get());
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07002053 EXPECT_EQ(res, static_cast<int64_t>(values[i])) << "Iteration " << i;
2054
2055 res++;
Mathieu Chartierc7853442015-03-27 14:35:38 -07002056 f->SetLong<false>(obj->Get(), res);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07002057
Mathieu Chartierc7853442015-03-27 14:35:38 -07002058 size_t res2 = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07002059 reinterpret_cast<size_t>(obj->Get()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07002060 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07002061 StubTest::GetEntrypoint(self, kQuickGet64Instance),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07002062 self,
2063 referrer);
2064 EXPECT_EQ(res, static_cast<int64_t>(res2));
2065 }
2066#else
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07002067 UNUSED(obj, f, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07002068 LOG(INFO) << "Skipping set64instance as I don't know how to do that on " << kRuntimeISA;
2069 // Force-print to std::cout so it's also outside the logcat.
2070 std::cout << "Skipping set64instance as I don't know how to do that on " << kRuntimeISA << std::endl;
2071#endif
2072}
2073
2074static void TestFields(Thread* self, StubTest* test, Primitive::Type test_type) {
2075 // garbage is created during ClassLinker::Init
2076
2077 JNIEnv* env = Thread::Current()->GetJniEnv();
2078 jclass jc = env->FindClass("AllFields");
Mathieu Chartier2cebb242015-04-21 16:50:40 -07002079 CHECK(jc != nullptr);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07002080 jobject o = env->AllocObject(jc);
Mathieu Chartier2cebb242015-04-21 16:50:40 -07002081 CHECK(o != nullptr);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07002082
2083 ScopedObjectAccess soa(self);
Mathieu Chartiere401d142015-04-22 13:56:20 -07002084 StackHandleScope<3> hs(self);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07002085 Handle<mirror::Object> obj(hs.NewHandle(soa.Decode<mirror::Object*>(o)));
2086 Handle<mirror::Class> c(hs.NewHandle(obj->GetClass()));
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07002087 // Need a method as a referrer
Mathieu Chartiere401d142015-04-22 13:56:20 -07002088 ArtMethod* m = c->GetDirectMethod(0, sizeof(void*));
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07002089
2090 // Play with it...
2091
2092 // Static fields.
Mathieu Chartier54d220e2015-07-30 16:20:06 -07002093 for (ArtField& f : c->GetSFields()) {
2094 Primitive::Type type = f.GetTypeAsPrimitiveType();
Mathieu Chartierc7853442015-03-27 14:35:38 -07002095 if (test_type != type) {
2096 continue;
2097 }
2098 switch (type) {
2099 case Primitive::Type::kPrimBoolean:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07002100 GetSetBooleanStatic(&f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07002101 break;
2102 case Primitive::Type::kPrimByte:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07002103 GetSetByteStatic(&f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07002104 break;
2105 case Primitive::Type::kPrimChar:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07002106 GetSetCharStatic(&f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07002107 break;
2108 case Primitive::Type::kPrimShort:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07002109 GetSetShortStatic(&f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07002110 break;
2111 case Primitive::Type::kPrimInt:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07002112 GetSet32Static(&f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07002113 break;
2114 case Primitive::Type::kPrimLong:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07002115 GetSet64Static(&f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07002116 break;
2117 case Primitive::Type::kPrimNot:
2118 // Don't try array.
Mathieu Chartier54d220e2015-07-30 16:20:06 -07002119 if (f.GetTypeDescriptor()[0] != '[') {
2120 GetSetObjStatic(&f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07002121 }
2122 break;
2123 default:
2124 break; // Skip.
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07002125 }
2126 }
2127
2128 // Instance fields.
Mathieu Chartier54d220e2015-07-30 16:20:06 -07002129 for (ArtField& f : c->GetIFields()) {
2130 Primitive::Type type = f.GetTypeAsPrimitiveType();
Mathieu Chartierc7853442015-03-27 14:35:38 -07002131 if (test_type != type) {
2132 continue;
2133 }
2134 switch (type) {
2135 case Primitive::Type::kPrimBoolean:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07002136 GetSetBooleanInstance(&obj, &f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07002137 break;
2138 case Primitive::Type::kPrimByte:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07002139 GetSetByteInstance(&obj, &f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07002140 break;
2141 case Primitive::Type::kPrimChar:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07002142 GetSetCharInstance(&obj, &f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07002143 break;
2144 case Primitive::Type::kPrimShort:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07002145 GetSetShortInstance(&obj, &f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07002146 break;
2147 case Primitive::Type::kPrimInt:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07002148 GetSet32Instance(&obj, &f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07002149 break;
2150 case Primitive::Type::kPrimLong:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07002151 GetSet64Instance(&obj, &f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07002152 break;
2153 case Primitive::Type::kPrimNot:
2154 // Don't try array.
Mathieu Chartier54d220e2015-07-30 16:20:06 -07002155 if (f.GetTypeDescriptor()[0] != '[') {
2156 GetSetObjInstance(&obj, &f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07002157 }
2158 break;
2159 default:
2160 break; // Skip.
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07002161 }
2162 }
2163
2164 // TODO: Deallocate things.
2165}
2166
Fred Shih37f05ef2014-07-16 18:38:08 -07002167TEST_F(StubTest, Fields8) {
Fred Shih37f05ef2014-07-16 18:38:08 -07002168 Thread* self = Thread::Current();
2169
2170 self->TransitionFromSuspendedToRunnable();
2171 LoadDex("AllFields");
2172 bool started = runtime_->Start();
2173 CHECK(started);
2174
2175 TestFields(self, this, Primitive::Type::kPrimBoolean);
2176 TestFields(self, this, Primitive::Type::kPrimByte);
2177}
2178
2179TEST_F(StubTest, Fields16) {
Fred Shih37f05ef2014-07-16 18:38:08 -07002180 Thread* self = Thread::Current();
2181
2182 self->TransitionFromSuspendedToRunnable();
2183 LoadDex("AllFields");
2184 bool started = runtime_->Start();
2185 CHECK(started);
2186
2187 TestFields(self, this, Primitive::Type::kPrimChar);
2188 TestFields(self, this, Primitive::Type::kPrimShort);
2189}
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07002190
2191TEST_F(StubTest, Fields32) {
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07002192 Thread* self = Thread::Current();
2193
2194 self->TransitionFromSuspendedToRunnable();
2195 LoadDex("AllFields");
2196 bool started = runtime_->Start();
2197 CHECK(started);
2198
2199 TestFields(self, this, Primitive::Type::kPrimInt);
2200}
2201
2202TEST_F(StubTest, FieldsObj) {
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07002203 Thread* self = Thread::Current();
2204
2205 self->TransitionFromSuspendedToRunnable();
2206 LoadDex("AllFields");
2207 bool started = runtime_->Start();
2208 CHECK(started);
2209
2210 TestFields(self, this, Primitive::Type::kPrimNot);
2211}
2212
2213TEST_F(StubTest, Fields64) {
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07002214 Thread* self = Thread::Current();
2215
2216 self->TransitionFromSuspendedToRunnable();
2217 LoadDex("AllFields");
2218 bool started = runtime_->Start();
2219 CHECK(started);
2220
2221 TestFields(self, this, Primitive::Type::kPrimLong);
2222}
2223
Andreas Gampe51f76352014-05-21 08:28:48 -07002224TEST_F(StubTest, IMT) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02002225#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
2226 (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe51f76352014-05-21 08:28:48 -07002227 Thread* self = Thread::Current();
2228
2229 ScopedObjectAccess soa(self);
2230 StackHandleScope<7> hs(self);
2231
2232 JNIEnv* env = Thread::Current()->GetJniEnv();
2233
2234 // ArrayList
2235
2236 // Load ArrayList and used methods (JNI).
2237 jclass arraylist_jclass = env->FindClass("java/util/ArrayList");
2238 ASSERT_NE(nullptr, arraylist_jclass);
2239 jmethodID arraylist_constructor = env->GetMethodID(arraylist_jclass, "<init>", "()V");
2240 ASSERT_NE(nullptr, arraylist_constructor);
Mathieu Chartiere401d142015-04-22 13:56:20 -07002241 jmethodID contains_jmethod = env->GetMethodID(
2242 arraylist_jclass, "contains", "(Ljava/lang/Object;)Z");
Andreas Gampe51f76352014-05-21 08:28:48 -07002243 ASSERT_NE(nullptr, contains_jmethod);
2244 jmethodID add_jmethod = env->GetMethodID(arraylist_jclass, "add", "(Ljava/lang/Object;)Z");
2245 ASSERT_NE(nullptr, add_jmethod);
2246
Mathieu Chartiere401d142015-04-22 13:56:20 -07002247 // Get representation.
2248 ArtMethod* contains_amethod = soa.DecodeMethod(contains_jmethod);
Andreas Gampe51f76352014-05-21 08:28:48 -07002249
2250 // Patch up ArrayList.contains.
Mathieu Chartiere401d142015-04-22 13:56:20 -07002251 if (contains_amethod->GetEntryPointFromQuickCompiledCode() == nullptr) {
2252 contains_amethod->SetEntryPointFromQuickCompiledCode(reinterpret_cast<void*>(
Andreas Gampe29b38412014-08-13 00:15:43 -07002253 StubTest::GetEntrypoint(self, kQuickQuickToInterpreterBridge)));
Andreas Gampe51f76352014-05-21 08:28:48 -07002254 }
2255
2256 // List
2257
2258 // Load List and used methods (JNI).
2259 jclass list_jclass = env->FindClass("java/util/List");
2260 ASSERT_NE(nullptr, list_jclass);
Mathieu Chartiere401d142015-04-22 13:56:20 -07002261 jmethodID inf_contains_jmethod = env->GetMethodID(
2262 list_jclass, "contains", "(Ljava/lang/Object;)Z");
Andreas Gampe51f76352014-05-21 08:28:48 -07002263 ASSERT_NE(nullptr, inf_contains_jmethod);
2264
2265 // Get mirror representation.
Mathieu Chartiere401d142015-04-22 13:56:20 -07002266 ArtMethod* inf_contains = soa.DecodeMethod(inf_contains_jmethod);
Andreas Gampe51f76352014-05-21 08:28:48 -07002267
2268 // Object
2269
2270 jclass obj_jclass = env->FindClass("java/lang/Object");
2271 ASSERT_NE(nullptr, obj_jclass);
2272 jmethodID obj_constructor = env->GetMethodID(obj_jclass, "<init>", "()V");
2273 ASSERT_NE(nullptr, obj_constructor);
2274
Andreas Gampe51f76352014-05-21 08:28:48 -07002275 // Create instances.
2276
2277 jobject jarray_list = env->NewObject(arraylist_jclass, arraylist_constructor);
2278 ASSERT_NE(nullptr, jarray_list);
2279 Handle<mirror::Object> array_list(hs.NewHandle(soa.Decode<mirror::Object*>(jarray_list)));
2280
2281 jobject jobj = env->NewObject(obj_jclass, obj_constructor);
2282 ASSERT_NE(nullptr, jobj);
2283 Handle<mirror::Object> obj(hs.NewHandle(soa.Decode<mirror::Object*>(jobj)));
2284
Andreas Gampe1a7e2922014-05-21 15:37:53 -07002285 // Invocation tests.
2286
2287 // 1. imt_conflict
2288
2289 // Contains.
Andreas Gampe51f76352014-05-21 08:28:48 -07002290
2291 size_t result =
2292 Invoke3WithReferrerAndHidden(0U, reinterpret_cast<size_t>(array_list.Get()),
2293 reinterpret_cast<size_t>(obj.Get()),
Andreas Gampe29b38412014-08-13 00:15:43 -07002294 StubTest::GetEntrypoint(self, kQuickQuickImtConflictTrampoline),
Mathieu Chartiere401d142015-04-22 13:56:20 -07002295 self, contains_amethod,
2296 static_cast<size_t>(inf_contains->GetDexMethodIndex()));
Andreas Gampe51f76352014-05-21 08:28:48 -07002297
2298 ASSERT_FALSE(self->IsExceptionPending());
2299 EXPECT_EQ(static_cast<size_t>(JNI_FALSE), result);
2300
2301 // Add object.
2302
2303 env->CallBooleanMethod(jarray_list, add_jmethod, jobj);
2304
Nicolas Geoffray14691c52015-03-05 10:40:17 +00002305 ASSERT_FALSE(self->IsExceptionPending()) << PrettyTypeOf(self->GetException());
Andreas Gampe51f76352014-05-21 08:28:48 -07002306
Andreas Gampe1a7e2922014-05-21 15:37:53 -07002307 // Contains.
Andreas Gampe51f76352014-05-21 08:28:48 -07002308
Mathieu Chartiere401d142015-04-22 13:56:20 -07002309 result = Invoke3WithReferrerAndHidden(
2310 0U, reinterpret_cast<size_t>(array_list.Get()), reinterpret_cast<size_t>(obj.Get()),
2311 StubTest::GetEntrypoint(self, kQuickQuickImtConflictTrampoline), self, contains_amethod,
2312 static_cast<size_t>(inf_contains->GetDexMethodIndex()));
Andreas Gampe51f76352014-05-21 08:28:48 -07002313
2314 ASSERT_FALSE(self->IsExceptionPending());
2315 EXPECT_EQ(static_cast<size_t>(JNI_TRUE), result);
Andreas Gampe1a7e2922014-05-21 15:37:53 -07002316
2317 // 2. regular interface trampoline
2318
Mathieu Chartiere401d142015-04-22 13:56:20 -07002319 result = Invoke3WithReferrer(static_cast<size_t>(inf_contains->GetDexMethodIndex()),
Andreas Gampe1a7e2922014-05-21 15:37:53 -07002320 reinterpret_cast<size_t>(array_list.Get()),
2321 reinterpret_cast<size_t>(obj.Get()),
2322 StubTest::GetEntrypoint(self,
2323 kQuickInvokeInterfaceTrampolineWithAccessCheck),
Mathieu Chartiere401d142015-04-22 13:56:20 -07002324 self, contains_amethod);
Andreas Gampe1a7e2922014-05-21 15:37:53 -07002325
2326 ASSERT_FALSE(self->IsExceptionPending());
2327 EXPECT_EQ(static_cast<size_t>(JNI_TRUE), result);
2328
Mathieu Chartiere401d142015-04-22 13:56:20 -07002329 result = Invoke3WithReferrer(
2330 static_cast<size_t>(inf_contains->GetDexMethodIndex()),
2331 reinterpret_cast<size_t>(array_list.Get()), reinterpret_cast<size_t>(array_list.Get()),
2332 StubTest::GetEntrypoint(self, kQuickInvokeInterfaceTrampolineWithAccessCheck), self,
2333 contains_amethod);
Andreas Gampe1a7e2922014-05-21 15:37:53 -07002334
2335 ASSERT_FALSE(self->IsExceptionPending());
2336 EXPECT_EQ(static_cast<size_t>(JNI_FALSE), result);
Andreas Gampe51f76352014-05-21 08:28:48 -07002337#else
Andreas Gampe6aac3552014-06-09 14:55:53 -07002338 LOG(INFO) << "Skipping imt as I don't know how to do that on " << kRuntimeISA;
Andreas Gampe51f76352014-05-21 08:28:48 -07002339 // Force-print to std::cout so it's also outside the logcat.
Andreas Gampe6aac3552014-06-09 14:55:53 -07002340 std::cout << "Skipping imt as I don't know how to do that on " << kRuntimeISA << std::endl;
2341#endif
2342}
2343
Andreas Gampe6aac3552014-06-09 14:55:53 -07002344TEST_F(StubTest, StringIndexOf) {
2345#if defined(__arm__) || defined(__aarch64__)
2346 Thread* self = Thread::Current();
2347 ScopedObjectAccess soa(self);
2348 // garbage is created during ClassLinker::Init
2349
2350 // Create some strings
2351 // Use array so we can index into it and use a matrix for expected results
2352 // Setup: The first half is standard. The second half uses a non-zero offset.
2353 // TODO: Shared backing arrays.
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07002354 const char* c_str[] = { "", "a", "ba", "cba", "dcba", "edcba", "asdfghjkl" };
2355 static constexpr size_t kStringCount = arraysize(c_str);
2356 const char c_char[] = { 'a', 'b', 'c', 'd', 'e' };
2357 static constexpr size_t kCharCount = arraysize(c_char);
Andreas Gampe6aac3552014-06-09 14:55:53 -07002358
2359 StackHandleScope<kStringCount> hs(self);
2360 Handle<mirror::String> s[kStringCount];
2361
2362 for (size_t i = 0; i < kStringCount; ++i) {
2363 s[i] = hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), c_str[i]));
2364 }
2365
2366 // Matrix of expectations. First component is first parameter. Note we only check against the
2367 // sign, not the value. As we are testing random offsets, we need to compute this and need to
2368 // rely on String::CompareTo being correct.
2369 static constexpr size_t kMaxLen = 9;
2370 DCHECK_LE(strlen(c_str[kStringCount-1]), kMaxLen) << "Please fix the indexof test.";
2371
2372 // Last dimension: start, offset by 1.
2373 int32_t expected[kStringCount][kCharCount][kMaxLen + 3];
2374 for (size_t x = 0; x < kStringCount; ++x) {
2375 for (size_t y = 0; y < kCharCount; ++y) {
2376 for (size_t z = 0; z <= kMaxLen + 2; ++z) {
2377 expected[x][y][z] = s[x]->FastIndexOf(c_char[y], static_cast<int32_t>(z) - 1);
2378 }
2379 }
2380 }
2381
2382 // Play with it...
2383
2384 for (size_t x = 0; x < kStringCount; ++x) {
2385 for (size_t y = 0; y < kCharCount; ++y) {
2386 for (size_t z = 0; z <= kMaxLen + 2; ++z) {
2387 int32_t start = static_cast<int32_t>(z) - 1;
2388
2389 // Test string_compareto x y
2390 size_t result = Invoke3(reinterpret_cast<size_t>(s[x].Get()), c_char[y], start,
Andreas Gampe29b38412014-08-13 00:15:43 -07002391 StubTest::GetEntrypoint(self, kQuickIndexOf), self);
Andreas Gampe6aac3552014-06-09 14:55:53 -07002392
2393 EXPECT_FALSE(self->IsExceptionPending());
2394
2395 // The result is a 32b signed integer
2396 union {
2397 size_t r;
2398 int32_t i;
2399 } conv;
2400 conv.r = result;
2401
2402 EXPECT_EQ(expected[x][y][z], conv.i) << "Wrong result for " << c_str[x] << " / " <<
2403 c_char[y] << " @ " << start;
2404 }
2405 }
2406 }
2407
2408 // TODO: Deallocate things.
2409
2410 // Tests done.
2411#else
2412 LOG(INFO) << "Skipping indexof as I don't know how to do that on " << kRuntimeISA;
2413 // Force-print to std::cout so it's also outside the logcat.
2414 std::cout << "Skipping indexof as I don't know how to do that on " << kRuntimeISA << std::endl;
Andreas Gampe51f76352014-05-21 08:28:48 -07002415#endif
2416}
2417
Man Cao1aee9002015-07-14 22:31:42 -07002418TEST_F(StubTest, ReadBarrier) {
2419#if defined(ART_USE_READ_BARRIER) && (defined(__i386__) || defined(__arm__) || \
2420 defined(__aarch64__) || defined(__mips__) || (defined(__x86_64__) && !defined(__APPLE__)))
2421 Thread* self = Thread::Current();
2422
2423 const uintptr_t readBarrierSlow = StubTest::GetEntrypoint(self, kQuickReadBarrierSlow);
2424
2425 // Create an object
2426 ScopedObjectAccess soa(self);
2427 // garbage is created during ClassLinker::Init
2428
2429 StackHandleScope<2> hs(soa.Self());
2430 Handle<mirror::Class> c(
2431 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "Ljava/lang/Object;")));
2432
2433 // Build an object instance
2434 Handle<mirror::Object> obj(hs.NewHandle(c->AllocObject(soa.Self())));
2435
2436 EXPECT_FALSE(self->IsExceptionPending());
2437
2438 size_t result = Invoke3(0U, reinterpret_cast<size_t>(obj.Get()),
2439 mirror::Object::ClassOffset().SizeValue(), readBarrierSlow, self);
2440
2441 EXPECT_FALSE(self->IsExceptionPending());
2442 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
2443 mirror::Class* klass = reinterpret_cast<mirror::Class*>(result);
2444 EXPECT_EQ(klass, obj->GetClass());
2445
2446 // Tests done.
2447#else
2448 LOG(INFO) << "Skipping read_barrier_slow";
2449 // Force-print to std::cout so it's also outside the logcat.
2450 std::cout << "Skipping read_barrier_slow" << std::endl;
2451#endif
2452}
2453
Andreas Gampe525cde22014-04-22 15:44:50 -07002454} // namespace art