blob: 23b7cfa09c1c312074532079a3472a45de493bb1 [file] [log] [blame]
Andreas Gampe525cde22014-04-22 15:44:50 -07001/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
Ian Rogerse63db272014-07-15 15:36:11 -070017#include <cstdio>
18
Mathieu Chartierc7853442015-03-27 14:35:38 -070019#include "art_field-inl.h"
Vladimir Marko3481ba22015-04-13 12:22:36 +010020#include "class_linker-inl.h"
Andreas Gampe525cde22014-04-22 15:44:50 -070021#include "common_runtime_test.h"
Andreas Gampe29b38412014-08-13 00:15:43 -070022#include "entrypoints/quick/quick_entrypoints_enum.h"
Andreas Gampe51f76352014-05-21 08:28:48 -070023#include "mirror/art_method-inl.h"
24#include "mirror/class-inl.h"
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -070025#include "mirror/string-inl.h"
Ian Rogerse63db272014-07-15 15:36:11 -070026#include "scoped_thread_state_change.h"
Andreas Gampe525cde22014-04-22 15:44:50 -070027
28namespace art {
29
30
31class StubTest : public CommonRuntimeTest {
32 protected:
33 // We need callee-save methods set up in the Runtime for exceptions.
34 void SetUp() OVERRIDE {
35 // Do the normal setup.
36 CommonRuntimeTest::SetUp();
37
38 {
39 // Create callee-save methods
40 ScopedObjectAccess soa(Thread::Current());
Vladimir Marko7624d252014-05-02 14:40:15 +010041 runtime_->SetInstructionSet(kRuntimeISA);
Andreas Gampe525cde22014-04-22 15:44:50 -070042 for (int i = 0; i < Runtime::kLastCalleeSaveType; i++) {
43 Runtime::CalleeSaveType type = Runtime::CalleeSaveType(i);
44 if (!runtime_->HasCalleeSaveMethod(type)) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -070045 runtime_->SetCalleeSaveMethod(runtime_->CreateCalleeSaveMethod(), type);
Andreas Gampe525cde22014-04-22 15:44:50 -070046 }
47 }
48 }
49 }
50
Ian Rogerse63db272014-07-15 15:36:11 -070051 void SetUpRuntimeOptions(RuntimeOptions *options) OVERRIDE {
Andreas Gampe00c1e6d2014-04-25 15:47:13 -070052 // Use a smaller heap
53 for (std::pair<std::string, const void*>& pair : *options) {
54 if (pair.first.find("-Xmx") == 0) {
55 pair.first = "-Xmx4M"; // Smallest we can go.
56 }
57 }
Andreas Gampe51f76352014-05-21 08:28:48 -070058 options->push_back(std::make_pair("-Xint", nullptr));
Andreas Gampe00c1e6d2014-04-25 15:47:13 -070059 }
Andreas Gampe525cde22014-04-22 15:44:50 -070060
Mathieu Chartier119c6bd2014-05-09 14:11:47 -070061 // Helper function needed since TEST_F makes a new class.
62 Thread::tls_ptr_sized_values* GetTlsPtr(Thread* self) {
63 return &self->tlsPtr_;
64 }
65
Andreas Gampe4fc046e2014-05-06 16:56:39 -070066 public:
Andreas Gampe525cde22014-04-22 15:44:50 -070067 size_t Invoke3(size_t arg0, size_t arg1, size_t arg2, uintptr_t code, Thread* self) {
Andreas Gampe6cf80102014-05-19 11:32:41 -070068 return Invoke3WithReferrer(arg0, arg1, arg2, code, self, nullptr);
Andreas Gampe525cde22014-04-22 15:44:50 -070069 }
Andreas Gampe6e4e59c2014-05-05 20:11:02 -070070
Andreas Gampe6e4e59c2014-05-05 20:11:02 -070071 // TODO: Set up a frame according to referrer's specs.
72 size_t Invoke3WithReferrer(size_t arg0, size_t arg1, size_t arg2, uintptr_t code, Thread* self,
73 mirror::ArtMethod* referrer) {
74 // Push a transition back into managed code onto the linked list in thread.
75 ManagedStack fragment;
76 self->PushManagedStackFragment(&fragment);
77
78 size_t result;
Andreas Gampe6cf80102014-05-19 11:32:41 -070079 size_t fpr_result = 0;
Andreas Gampe6e4e59c2014-05-05 20:11:02 -070080#if defined(__i386__)
81 // TODO: Set the thread?
82 __asm__ __volatile__(
Ian Rogersc5f17732014-06-05 20:48:42 -070083 "subl $12, %%esp\n\t" // Align stack.
84 "pushl %[referrer]\n\t" // Store referrer.
Andreas Gampe6e4e59c2014-05-05 20:11:02 -070085 "call *%%edi\n\t" // Call the stub
Ian Rogersc5f17732014-06-05 20:48:42 -070086 "addl $16, %%esp" // Pop referrer
Andreas Gampe6e4e59c2014-05-05 20:11:02 -070087 : "=a" (result)
88 // Use the result from eax
Andreas Gampe2f6e3512014-06-07 01:32:33 -070089 : "a"(arg0), "c"(arg1), "d"(arg2), "D"(code), [referrer]"r"(referrer)
90 // This places code into edi, arg0 into eax, arg1 into ecx, and arg2 into edx
91 : "memory"); // clobber.
Andreas Gampe6e4e59c2014-05-05 20:11:02 -070092 // TODO: Should we clobber the other registers? EBX gets clobbered by some of the stubs,
93 // but compilation fails when declaring that.
94#elif defined(__arm__)
95 __asm__ __volatile__(
96 "push {r1-r12, lr}\n\t" // Save state, 13*4B = 52B
97 ".cfi_adjust_cfa_offset 52\n\t"
98 "push {r9}\n\t"
99 ".cfi_adjust_cfa_offset 4\n\t"
100 "mov r9, %[referrer]\n\n"
101 "str r9, [sp, #-8]!\n\t" // Push referrer, +8B padding so 16B aligned
102 ".cfi_adjust_cfa_offset 8\n\t"
103 "ldr r9, [sp, #8]\n\t"
104
105 // Push everything on the stack, so we don't rely on the order. What a mess. :-(
106 "sub sp, sp, #20\n\t"
107 "str %[arg0], [sp]\n\t"
108 "str %[arg1], [sp, #4]\n\t"
109 "str %[arg2], [sp, #8]\n\t"
110 "str %[code], [sp, #12]\n\t"
111 "str %[self], [sp, #16]\n\t"
112 "ldr r0, [sp]\n\t"
113 "ldr r1, [sp, #4]\n\t"
114 "ldr r2, [sp, #8]\n\t"
115 "ldr r3, [sp, #12]\n\t"
116 "ldr r9, [sp, #16]\n\t"
117 "add sp, sp, #20\n\t"
118
119 "blx r3\n\t" // Call the stub
Mathieu Chartier2cebb242015-04-21 16:50:40 -0700120 "add sp, sp, #12\n\t" // Pop null and padding
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700121 ".cfi_adjust_cfa_offset -12\n\t"
122 "pop {r1-r12, lr}\n\t" // Restore state
123 ".cfi_adjust_cfa_offset -52\n\t"
124 "mov %[result], r0\n\t" // Save the result
125 : [result] "=r" (result)
126 // Use the result from r0
127 : [arg0] "r"(arg0), [arg1] "r"(arg1), [arg2] "r"(arg2), [code] "r"(code), [self] "r"(self),
128 [referrer] "r"(referrer)
Andreas Gampe2f6e3512014-06-07 01:32:33 -0700129 : "memory"); // clobber.
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700130#elif defined(__aarch64__)
131 __asm__ __volatile__(
Andreas Gampef39b3782014-06-03 14:38:30 -0700132 // Spill x0-x7 which we say we don't clobber. May contain args.
Andreas Gampe6cf80102014-05-19 11:32:41 -0700133 "sub sp, sp, #64\n\t"
134 ".cfi_adjust_cfa_offset 64\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700135 "stp x0, x1, [sp]\n\t"
136 "stp x2, x3, [sp, #16]\n\t"
137 "stp x4, x5, [sp, #32]\n\t"
138 "stp x6, x7, [sp, #48]\n\t"
Andreas Gampe6cf80102014-05-19 11:32:41 -0700139
Andreas Gampef39b3782014-06-03 14:38:30 -0700140 "sub sp, sp, #16\n\t" // Reserve stack space, 16B aligned
141 ".cfi_adjust_cfa_offset 16\n\t"
142 "str %[referrer], [sp]\n\t" // referrer
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700143
144 // Push everything on the stack, so we don't rely on the order. What a mess. :-(
145 "sub sp, sp, #48\n\t"
Andreas Gampe6cf80102014-05-19 11:32:41 -0700146 ".cfi_adjust_cfa_offset 48\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700147 // All things are "r" constraints, so direct str/stp should work.
148 "stp %[arg0], %[arg1], [sp]\n\t"
149 "stp %[arg2], %[code], [sp, #16]\n\t"
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700150 "str %[self], [sp, #32]\n\t"
Andreas Gampe6cf80102014-05-19 11:32:41 -0700151
152 // Now we definitely have x0-x3 free, use it to garble d8 - d15
153 "movk x0, #0xfad0\n\t"
154 "movk x0, #0xebad, lsl #16\n\t"
155 "movk x0, #0xfad0, lsl #32\n\t"
156 "movk x0, #0xebad, lsl #48\n\t"
157 "fmov d8, x0\n\t"
158 "add x0, x0, 1\n\t"
159 "fmov d9, x0\n\t"
160 "add x0, x0, 1\n\t"
161 "fmov d10, x0\n\t"
162 "add x0, x0, 1\n\t"
163 "fmov d11, x0\n\t"
164 "add x0, x0, 1\n\t"
165 "fmov d12, x0\n\t"
166 "add x0, x0, 1\n\t"
167 "fmov d13, x0\n\t"
168 "add x0, x0, 1\n\t"
169 "fmov d14, x0\n\t"
170 "add x0, x0, 1\n\t"
171 "fmov d15, x0\n\t"
172
Andreas Gampef39b3782014-06-03 14:38:30 -0700173 // Load call params into the right registers.
174 "ldp x0, x1, [sp]\n\t"
175 "ldp x2, x3, [sp, #16]\n\t"
Serban Constantinescu9bd88b02015-04-22 16:24:46 +0100176 "ldr x19, [sp, #32]\n\t"
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700177 "add sp, sp, #48\n\t"
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700178 ".cfi_adjust_cfa_offset -48\n\t"
179
Andreas Gampe6cf80102014-05-19 11:32:41 -0700180
181 "blr x3\n\t" // Call the stub
Andreas Gampef39b3782014-06-03 14:38:30 -0700182 "mov x8, x0\n\t" // Store result
183 "add sp, sp, #16\n\t" // Drop the quick "frame"
184 ".cfi_adjust_cfa_offset -16\n\t"
Andreas Gampe6cf80102014-05-19 11:32:41 -0700185
186 // Test d8 - d15. We can use x1 and x2.
187 "movk x1, #0xfad0\n\t"
188 "movk x1, #0xebad, lsl #16\n\t"
189 "movk x1, #0xfad0, lsl #32\n\t"
190 "movk x1, #0xebad, lsl #48\n\t"
191 "fmov x2, d8\n\t"
192 "cmp x1, x2\n\t"
193 "b.ne 1f\n\t"
194 "add x1, x1, 1\n\t"
195
196 "fmov x2, d9\n\t"
197 "cmp x1, x2\n\t"
198 "b.ne 1f\n\t"
199 "add x1, x1, 1\n\t"
200
201 "fmov x2, d10\n\t"
202 "cmp x1, x2\n\t"
203 "b.ne 1f\n\t"
204 "add x1, x1, 1\n\t"
205
206 "fmov x2, d11\n\t"
207 "cmp x1, x2\n\t"
208 "b.ne 1f\n\t"
209 "add x1, x1, 1\n\t"
210
211 "fmov x2, d12\n\t"
212 "cmp x1, x2\n\t"
213 "b.ne 1f\n\t"
214 "add x1, x1, 1\n\t"
215
216 "fmov x2, d13\n\t"
217 "cmp x1, x2\n\t"
218 "b.ne 1f\n\t"
219 "add x1, x1, 1\n\t"
220
221 "fmov x2, d14\n\t"
222 "cmp x1, x2\n\t"
223 "b.ne 1f\n\t"
224 "add x1, x1, 1\n\t"
225
226 "fmov x2, d15\n\t"
227 "cmp x1, x2\n\t"
228 "b.ne 1f\n\t"
229
Andreas Gampef39b3782014-06-03 14:38:30 -0700230 "mov x9, #0\n\t" // Use x9 as flag, in clobber list
Andreas Gampe6cf80102014-05-19 11:32:41 -0700231
232 // Finish up.
233 "2:\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700234 "ldp x0, x1, [sp]\n\t" // Restore stuff not named clobbered, may contain fpr_result
235 "ldp x2, x3, [sp, #16]\n\t"
236 "ldp x4, x5, [sp, #32]\n\t"
237 "ldp x6, x7, [sp, #48]\n\t"
238 "add sp, sp, #64\n\t" // Free stack space, now sp as on entry
Andreas Gampe6cf80102014-05-19 11:32:41 -0700239 ".cfi_adjust_cfa_offset -64\n\t"
240
Andreas Gampef39b3782014-06-03 14:38:30 -0700241 "str x9, %[fpr_result]\n\t" // Store the FPR comparison result
242 "mov %[result], x8\n\t" // Store the call result
243
Andreas Gampe6cf80102014-05-19 11:32:41 -0700244 "b 3f\n\t" // Goto end
245
246 // Failed fpr verification.
247 "1:\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700248 "mov x9, #1\n\t"
Andreas Gampe6cf80102014-05-19 11:32:41 -0700249 "b 2b\n\t" // Goto finish-up
250
251 // End
252 "3:\n\t"
Andreas Gampecf4035a2014-05-28 22:43:01 -0700253 : [result] "=r" (result)
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700254 // Use the result from r0
255 : [arg0] "0"(arg0), [arg1] "r"(arg1), [arg2] "r"(arg2), [code] "r"(code), [self] "r"(self),
Andreas Gampecf4035a2014-05-28 22:43:01 -0700256 [referrer] "r"(referrer), [fpr_result] "m" (fpr_result)
Andreas Gampef39b3782014-06-03 14:38:30 -0700257 : "x8", "x9", "x10", "x11", "x12", "x13", "x14", "x15", "x16", "x17", "x18", "x19", "x20",
258 "x21", "x22", "x23", "x24", "x25", "x26", "x27", "x28", "x30",
259 "d0", "d1", "d2", "d3", "d4", "d5", "d6", "d7",
260 "d8", "d9", "d10", "d11", "d12", "d13", "d14", "d15",
261 "d16", "d17", "d18", "d19", "d20", "d21", "d22", "d23",
Andreas Gampe2f6e3512014-06-07 01:32:33 -0700262 "d24", "d25", "d26", "d27", "d28", "d29", "d30", "d31",
263 "memory"); // clobber.
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200264#elif defined(__mips__) && !defined(__LP64__)
265 __asm__ __volatile__ (
266 // Spill a0-a3 and t0-t7 which we say we don't clobber. May contain args.
267 "addiu $sp, $sp, -64\n\t"
268 "sw $a0, 0($sp)\n\t"
269 "sw $a1, 4($sp)\n\t"
270 "sw $a2, 8($sp)\n\t"
271 "sw $a3, 12($sp)\n\t"
272 "sw $t0, 16($sp)\n\t"
273 "sw $t1, 20($sp)\n\t"
274 "sw $t2, 24($sp)\n\t"
275 "sw $t3, 28($sp)\n\t"
276 "sw $t4, 32($sp)\n\t"
277 "sw $t5, 36($sp)\n\t"
278 "sw $t6, 40($sp)\n\t"
279 "sw $t7, 44($sp)\n\t"
280 // Spill gp register since it is caller save.
281 "sw $gp, 52($sp)\n\t"
282
283 "addiu $sp, $sp, -16\n\t" // Reserve stack space, 16B aligned.
284 "sw %[referrer], 0($sp)\n\t"
285
286 // Push everything on the stack, so we don't rely on the order.
287 "addiu $sp, $sp, -20\n\t"
288 "sw %[arg0], 0($sp)\n\t"
289 "sw %[arg1], 4($sp)\n\t"
290 "sw %[arg2], 8($sp)\n\t"
291 "sw %[code], 12($sp)\n\t"
292 "sw %[self], 16($sp)\n\t"
293
294 // Load call params into the right registers.
295 "lw $a0, 0($sp)\n\t"
296 "lw $a1, 4($sp)\n\t"
297 "lw $a2, 8($sp)\n\t"
298 "lw $t9, 12($sp)\n\t"
299 "lw $s1, 16($sp)\n\t"
300 "addiu $sp, $sp, 20\n\t"
301
302 "jalr $t9\n\t" // Call the stub.
303 "nop\n\t"
304 "addiu $sp, $sp, 16\n\t" // Drop the quick "frame".
305
306 // Restore stuff not named clobbered.
307 "lw $a0, 0($sp)\n\t"
308 "lw $a1, 4($sp)\n\t"
309 "lw $a2, 8($sp)\n\t"
310 "lw $a3, 12($sp)\n\t"
311 "lw $t0, 16($sp)\n\t"
312 "lw $t1, 20($sp)\n\t"
313 "lw $t2, 24($sp)\n\t"
314 "lw $t3, 28($sp)\n\t"
315 "lw $t4, 32($sp)\n\t"
316 "lw $t5, 36($sp)\n\t"
317 "lw $t6, 40($sp)\n\t"
318 "lw $t7, 44($sp)\n\t"
319 // Restore gp.
320 "lw $gp, 52($sp)\n\t"
321 "addiu $sp, $sp, 64\n\t" // Free stack space, now sp as on entry.
322
323 "move %[result], $v0\n\t" // Store the call result.
324 : [result] "=r" (result)
325 : [arg0] "r"(arg0), [arg1] "r"(arg1), [arg2] "r"(arg2), [code] "r"(code), [self] "r"(self),
326 [referrer] "r"(referrer)
327 : "at", "v0", "v1", "s0", "s1", "s2", "s3", "s4", "s5", "s6", "s7", "t8", "t9", "k0", "k1",
328 "fp", "ra",
329 "f0", "f1", "f2", "f3", "f4", "f5", "f6", "f7", "f8", "f9", "f10", "f11", "f12", "f13",
330 "f14", "f15", "f16", "f17", "f18", "f19", "f20", "f21", "f22", "f23", "f24", "f25", "f26",
331 "f27", "f28", "f29", "f30", "f31",
332 "memory"); // clobber.
333#elif defined(__mips__) && defined(__LP64__)
334 __asm__ __volatile__ (
335 // Spill a0-a7 which we say we don't clobber. May contain args.
336 "daddiu $sp, $sp, -64\n\t"
337 "sd $a0, 0($sp)\n\t"
338 "sd $a1, 8($sp)\n\t"
339 "sd $a2, 16($sp)\n\t"
340 "sd $a3, 24($sp)\n\t"
341 "sd $a4, 32($sp)\n\t"
342 "sd $a5, 40($sp)\n\t"
343 "sd $a6, 48($sp)\n\t"
344 "sd $a7, 56($sp)\n\t"
345
346 "daddiu $sp, $sp, -16\n\t" // Reserve stack space, 16B aligned.
347 "sd %[referrer], 0($sp)\n\t"
348
349 // Push everything on the stack, so we don't rely on the order.
350 "daddiu $sp, $sp, -40\n\t"
351 "sd %[arg0], 0($sp)\n\t"
352 "sd %[arg1], 8($sp)\n\t"
353 "sd %[arg2], 16($sp)\n\t"
354 "sd %[code], 24($sp)\n\t"
355 "sd %[self], 32($sp)\n\t"
356
357 // Load call params into the right registers.
358 "ld $a0, 0($sp)\n\t"
359 "ld $a1, 8($sp)\n\t"
360 "ld $a2, 16($sp)\n\t"
361 "ld $t9, 24($sp)\n\t"
362 "ld $s1, 32($sp)\n\t"
363 "daddiu $sp, $sp, 40\n\t"
364
365 "jalr $t9\n\t" // Call the stub.
366 "nop\n\t"
367 "daddiu $sp, $sp, 16\n\t" // Drop the quick "frame".
368
369 // Restore stuff not named clobbered.
370 "ld $a0, 0($sp)\n\t"
371 "ld $a1, 8($sp)\n\t"
372 "ld $a2, 16($sp)\n\t"
373 "ld $a3, 24($sp)\n\t"
374 "ld $a4, 32($sp)\n\t"
375 "ld $a5, 40($sp)\n\t"
376 "ld $a6, 48($sp)\n\t"
377 "ld $a7, 56($sp)\n\t"
378 "daddiu $sp, $sp, 64\n\t"
379
380 "move %[result], $v0\n\t" // Store the call result.
381 : [result] "=r" (result)
382 : [arg0] "r"(arg0), [arg1] "r"(arg1), [arg2] "r"(arg2), [code] "r"(code), [self] "r"(self),
383 [referrer] "r"(referrer)
384 : "at", "v0", "v1", "t0", "t1", "t2", "t3", "s0", "s1", "s2", "s3", "s4", "s5", "s6", "s7",
385 "t8", "t9", "k0", "k1", "fp", "ra",
386 "f0", "f1", "f2", "f3", "f4", "f5", "f6", "f7", "f8", "f9", "f10", "f11", "f12", "f13",
387 "f14", "f15", "f16", "f17", "f18", "f19", "f20", "f21", "f22", "f23", "f24", "f25", "f26",
388 "f27", "f28", "f29", "f30", "f31",
389 "memory"); // clobber.
Ian Rogers6f3dbba2014-10-14 17:41:57 -0700390#elif defined(__x86_64__) && !defined(__APPLE__) && defined(__clang__)
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700391 // Note: Uses the native convention
392 // TODO: Set the thread?
393 __asm__ __volatile__(
394 "pushq %[referrer]\n\t" // Push referrer
395 "pushq (%%rsp)\n\t" // & 16B alignment padding
396 ".cfi_adjust_cfa_offset 16\n\t"
397 "call *%%rax\n\t" // Call the stub
Mathieu Chartier2cebb242015-04-21 16:50:40 -0700398 "addq $16, %%rsp\n\t" // Pop null and padding
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700399 ".cfi_adjust_cfa_offset -16\n\t"
400 : "=a" (result)
401 // Use the result from rax
Andreas Gampe5c3d3a92015-01-21 12:23:50 -0800402 : "D"(arg0), "S"(arg1), "d"(arg2), "a"(code), [referrer] "c"(referrer)
Andreas Gampe2f6e3512014-06-07 01:32:33 -0700403 // This places arg0 into rdi, arg1 into rsi, arg2 into rdx, and code into rax
Andreas Gampe5c3d3a92015-01-21 12:23:50 -0800404 : "rbx", "rbp", "r8", "r9", "r10", "r11", "r12", "r13", "r14", "r15",
Andreas Gampe2f6e3512014-06-07 01:32:33 -0700405 "memory"); // clobber all
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700406 // TODO: Should we clobber the other registers?
407#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -0800408 UNUSED(arg0, arg1, arg2, code, referrer);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700409 LOG(WARNING) << "Was asked to invoke for an architecture I do not understand.";
410 result = 0;
411#endif
412 // Pop transition.
413 self->PopManagedStackFragment(fragment);
Andreas Gampe6cf80102014-05-19 11:32:41 -0700414
415 fp_result = fpr_result;
416 EXPECT_EQ(0U, fp_result);
417
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700418 return result;
419 }
420
Andreas Gampe51f76352014-05-21 08:28:48 -0700421 // TODO: Set up a frame according to referrer's specs.
422 size_t Invoke3WithReferrerAndHidden(size_t arg0, size_t arg1, size_t arg2, uintptr_t code,
423 Thread* self, mirror::ArtMethod* referrer, size_t hidden) {
424 // Push a transition back into managed code onto the linked list in thread.
425 ManagedStack fragment;
426 self->PushManagedStackFragment(&fragment);
427
428 size_t result;
429 size_t fpr_result = 0;
430#if defined(__i386__)
431 // TODO: Set the thread?
432 __asm__ __volatile__(
Mark P Mendell966c3ae2015-01-27 15:45:27 +0000433 "movd %[hidden], %%xmm7\n\t"
Ian Rogersc5f17732014-06-05 20:48:42 -0700434 "subl $12, %%esp\n\t" // Align stack.
Andreas Gampe51f76352014-05-21 08:28:48 -0700435 "pushl %[referrer]\n\t" // Store referrer
436 "call *%%edi\n\t" // Call the stub
Ian Rogersc5f17732014-06-05 20:48:42 -0700437 "addl $16, %%esp" // Pop referrer
Andreas Gampe51f76352014-05-21 08:28:48 -0700438 : "=a" (result)
439 // Use the result from eax
Andreas Gampe1a7e2922014-05-21 15:37:53 -0700440 : "a"(arg0), "c"(arg1), "d"(arg2), "D"(code), [referrer]"r"(referrer), [hidden]"m"(hidden)
Andreas Gampe2f6e3512014-06-07 01:32:33 -0700441 // This places code into edi, arg0 into eax, arg1 into ecx, and arg2 into edx
442 : "memory"); // clobber.
Andreas Gampe51f76352014-05-21 08:28:48 -0700443 // TODO: Should we clobber the other registers? EBX gets clobbered by some of the stubs,
444 // but compilation fails when declaring that.
445#elif defined(__arm__)
446 __asm__ __volatile__(
447 "push {r1-r12, lr}\n\t" // Save state, 13*4B = 52B
448 ".cfi_adjust_cfa_offset 52\n\t"
449 "push {r9}\n\t"
450 ".cfi_adjust_cfa_offset 4\n\t"
451 "mov r9, %[referrer]\n\n"
452 "str r9, [sp, #-8]!\n\t" // Push referrer, +8B padding so 16B aligned
453 ".cfi_adjust_cfa_offset 8\n\t"
454 "ldr r9, [sp, #8]\n\t"
455
456 // Push everything on the stack, so we don't rely on the order. What a mess. :-(
457 "sub sp, sp, #24\n\t"
458 "str %[arg0], [sp]\n\t"
459 "str %[arg1], [sp, #4]\n\t"
460 "str %[arg2], [sp, #8]\n\t"
461 "str %[code], [sp, #12]\n\t"
462 "str %[self], [sp, #16]\n\t"
463 "str %[hidden], [sp, #20]\n\t"
464 "ldr r0, [sp]\n\t"
465 "ldr r1, [sp, #4]\n\t"
466 "ldr r2, [sp, #8]\n\t"
467 "ldr r3, [sp, #12]\n\t"
468 "ldr r9, [sp, #16]\n\t"
469 "ldr r12, [sp, #20]\n\t"
470 "add sp, sp, #24\n\t"
471
472 "blx r3\n\t" // Call the stub
Mathieu Chartier2cebb242015-04-21 16:50:40 -0700473 "add sp, sp, #12\n\t" // Pop null and padding
Andreas Gampe51f76352014-05-21 08:28:48 -0700474 ".cfi_adjust_cfa_offset -12\n\t"
475 "pop {r1-r12, lr}\n\t" // Restore state
476 ".cfi_adjust_cfa_offset -52\n\t"
477 "mov %[result], r0\n\t" // Save the result
478 : [result] "=r" (result)
479 // Use the result from r0
Andreas Gampe2f6e3512014-06-07 01:32:33 -0700480 : [arg0] "r"(arg0), [arg1] "r"(arg1), [arg2] "r"(arg2), [code] "r"(code), [self] "r"(self),
481 [referrer] "r"(referrer), [hidden] "r"(hidden)
482 : "memory"); // clobber.
Andreas Gampe51f76352014-05-21 08:28:48 -0700483#elif defined(__aarch64__)
484 __asm__ __volatile__(
Andreas Gampef39b3782014-06-03 14:38:30 -0700485 // Spill x0-x7 which we say we don't clobber. May contain args.
Andreas Gampe51f76352014-05-21 08:28:48 -0700486 "sub sp, sp, #64\n\t"
487 ".cfi_adjust_cfa_offset 64\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700488 "stp x0, x1, [sp]\n\t"
489 "stp x2, x3, [sp, #16]\n\t"
490 "stp x4, x5, [sp, #32]\n\t"
491 "stp x6, x7, [sp, #48]\n\t"
Andreas Gampe51f76352014-05-21 08:28:48 -0700492
Andreas Gampef39b3782014-06-03 14:38:30 -0700493 "sub sp, sp, #16\n\t" // Reserve stack space, 16B aligned
494 ".cfi_adjust_cfa_offset 16\n\t"
495 "str %[referrer], [sp]\n\t" // referrer
Andreas Gampe51f76352014-05-21 08:28:48 -0700496
497 // Push everything on the stack, so we don't rely on the order. What a mess. :-(
498 "sub sp, sp, #48\n\t"
499 ".cfi_adjust_cfa_offset 48\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700500 // All things are "r" constraints, so direct str/stp should work.
501 "stp %[arg0], %[arg1], [sp]\n\t"
502 "stp %[arg2], %[code], [sp, #16]\n\t"
503 "stp %[self], %[hidden], [sp, #32]\n\t"
Andreas Gampe51f76352014-05-21 08:28:48 -0700504
505 // Now we definitely have x0-x3 free, use it to garble d8 - d15
506 "movk x0, #0xfad0\n\t"
507 "movk x0, #0xebad, lsl #16\n\t"
508 "movk x0, #0xfad0, lsl #32\n\t"
509 "movk x0, #0xebad, lsl #48\n\t"
510 "fmov d8, x0\n\t"
511 "add x0, x0, 1\n\t"
512 "fmov d9, x0\n\t"
513 "add x0, x0, 1\n\t"
514 "fmov d10, x0\n\t"
515 "add x0, x0, 1\n\t"
516 "fmov d11, x0\n\t"
517 "add x0, x0, 1\n\t"
518 "fmov d12, x0\n\t"
519 "add x0, x0, 1\n\t"
520 "fmov d13, x0\n\t"
521 "add x0, x0, 1\n\t"
522 "fmov d14, x0\n\t"
523 "add x0, x0, 1\n\t"
524 "fmov d15, x0\n\t"
525
Andreas Gampef39b3782014-06-03 14:38:30 -0700526 // Load call params into the right registers.
527 "ldp x0, x1, [sp]\n\t"
528 "ldp x2, x3, [sp, #16]\n\t"
Serban Constantinescu9bd88b02015-04-22 16:24:46 +0100529 "ldp x19, x17, [sp, #32]\n\t"
Andreas Gampe51f76352014-05-21 08:28:48 -0700530 "add sp, sp, #48\n\t"
531 ".cfi_adjust_cfa_offset -48\n\t"
532
Andreas Gampe51f76352014-05-21 08:28:48 -0700533 "blr x3\n\t" // Call the stub
Andreas Gampef39b3782014-06-03 14:38:30 -0700534 "mov x8, x0\n\t" // Store result
535 "add sp, sp, #16\n\t" // Drop the quick "frame"
536 ".cfi_adjust_cfa_offset -16\n\t"
Andreas Gampe51f76352014-05-21 08:28:48 -0700537
538 // Test d8 - d15. We can use x1 and x2.
539 "movk x1, #0xfad0\n\t"
540 "movk x1, #0xebad, lsl #16\n\t"
541 "movk x1, #0xfad0, lsl #32\n\t"
542 "movk x1, #0xebad, lsl #48\n\t"
543 "fmov x2, d8\n\t"
544 "cmp x1, x2\n\t"
545 "b.ne 1f\n\t"
546 "add x1, x1, 1\n\t"
547
548 "fmov x2, d9\n\t"
549 "cmp x1, x2\n\t"
550 "b.ne 1f\n\t"
551 "add x1, x1, 1\n\t"
552
553 "fmov x2, d10\n\t"
554 "cmp x1, x2\n\t"
555 "b.ne 1f\n\t"
556 "add x1, x1, 1\n\t"
557
558 "fmov x2, d11\n\t"
559 "cmp x1, x2\n\t"
560 "b.ne 1f\n\t"
561 "add x1, x1, 1\n\t"
562
563 "fmov x2, d12\n\t"
564 "cmp x1, x2\n\t"
565 "b.ne 1f\n\t"
566 "add x1, x1, 1\n\t"
567
568 "fmov x2, d13\n\t"
569 "cmp x1, x2\n\t"
570 "b.ne 1f\n\t"
571 "add x1, x1, 1\n\t"
572
573 "fmov x2, d14\n\t"
574 "cmp x1, x2\n\t"
575 "b.ne 1f\n\t"
576 "add x1, x1, 1\n\t"
577
578 "fmov x2, d15\n\t"
579 "cmp x1, x2\n\t"
580 "b.ne 1f\n\t"
581
Andreas Gampef39b3782014-06-03 14:38:30 -0700582 "mov x9, #0\n\t" // Use x9 as flag, in clobber list
Andreas Gampe51f76352014-05-21 08:28:48 -0700583
584 // Finish up.
585 "2:\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700586 "ldp x0, x1, [sp]\n\t" // Restore stuff not named clobbered, may contain fpr_result
587 "ldp x2, x3, [sp, #16]\n\t"
588 "ldp x4, x5, [sp, #32]\n\t"
589 "ldp x6, x7, [sp, #48]\n\t"
590 "add sp, sp, #64\n\t" // Free stack space, now sp as on entry
Andreas Gampe51f76352014-05-21 08:28:48 -0700591 ".cfi_adjust_cfa_offset -64\n\t"
592
Andreas Gampef39b3782014-06-03 14:38:30 -0700593 "str x9, %[fpr_result]\n\t" // Store the FPR comparison result
594 "mov %[result], x8\n\t" // Store the call result
595
Andreas Gampe51f76352014-05-21 08:28:48 -0700596 "b 3f\n\t" // Goto end
597
598 // Failed fpr verification.
599 "1:\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700600 "mov x9, #1\n\t"
Andreas Gampe51f76352014-05-21 08:28:48 -0700601 "b 2b\n\t" // Goto finish-up
602
603 // End
604 "3:\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700605 : [result] "=r" (result)
606 // Use the result from r0
Andreas Gampe51f76352014-05-21 08:28:48 -0700607 : [arg0] "0"(arg0), [arg1] "r"(arg1), [arg2] "r"(arg2), [code] "r"(code), [self] "r"(self),
Andreas Gampef39b3782014-06-03 14:38:30 -0700608 [referrer] "r"(referrer), [hidden] "r"(hidden), [fpr_result] "m" (fpr_result)
609 : "x8", "x9", "x10", "x11", "x12", "x13", "x14", "x15", "x16", "x17", "x18", "x19", "x20",
610 "x21", "x22", "x23", "x24", "x25", "x26", "x27", "x28", "x30",
611 "d0", "d1", "d2", "d3", "d4", "d5", "d6", "d7",
612 "d8", "d9", "d10", "d11", "d12", "d13", "d14", "d15",
613 "d16", "d17", "d18", "d19", "d20", "d21", "d22", "d23",
Andreas Gampe2f6e3512014-06-07 01:32:33 -0700614 "d24", "d25", "d26", "d27", "d28", "d29", "d30", "d31",
615 "memory"); // clobber.
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200616#elif defined(__mips__) && !defined(__LP64__)
617 __asm__ __volatile__ (
618 // Spill a0-a3 and t0-t7 which we say we don't clobber. May contain args.
619 "addiu $sp, $sp, -64\n\t"
620 "sw $a0, 0($sp)\n\t"
621 "sw $a1, 4($sp)\n\t"
622 "sw $a2, 8($sp)\n\t"
623 "sw $a3, 12($sp)\n\t"
624 "sw $t0, 16($sp)\n\t"
625 "sw $t1, 20($sp)\n\t"
626 "sw $t2, 24($sp)\n\t"
627 "sw $t3, 28($sp)\n\t"
628 "sw $t4, 32($sp)\n\t"
629 "sw $t5, 36($sp)\n\t"
630 "sw $t6, 40($sp)\n\t"
631 "sw $t7, 44($sp)\n\t"
632 // Spill gp register since it is caller save.
633 "sw $gp, 52($sp)\n\t"
634
635 "addiu $sp, $sp, -16\n\t" // Reserve stack space, 16B aligned.
636 "sw %[referrer], 0($sp)\n\t"
637
638 // Push everything on the stack, so we don't rely on the order.
639 "addiu $sp, $sp, -24\n\t"
640 "sw %[arg0], 0($sp)\n\t"
641 "sw %[arg1], 4($sp)\n\t"
642 "sw %[arg2], 8($sp)\n\t"
643 "sw %[code], 12($sp)\n\t"
644 "sw %[self], 16($sp)\n\t"
645 "sw %[hidden], 20($sp)\n\t"
646
647 // Load call params into the right registers.
648 "lw $a0, 0($sp)\n\t"
649 "lw $a1, 4($sp)\n\t"
650 "lw $a2, 8($sp)\n\t"
651 "lw $t9, 12($sp)\n\t"
652 "lw $s1, 16($sp)\n\t"
653 "lw $t0, 20($sp)\n\t"
654 "addiu $sp, $sp, 24\n\t"
655
656 "jalr $t9\n\t" // Call the stub.
657 "nop\n\t"
658 "addiu $sp, $sp, 16\n\t" // Drop the quick "frame".
659
660 // Restore stuff not named clobbered.
661 "lw $a0, 0($sp)\n\t"
662 "lw $a1, 4($sp)\n\t"
663 "lw $a2, 8($sp)\n\t"
664 "lw $a3, 12($sp)\n\t"
665 "lw $t0, 16($sp)\n\t"
666 "lw $t1, 20($sp)\n\t"
667 "lw $t2, 24($sp)\n\t"
668 "lw $t3, 28($sp)\n\t"
669 "lw $t4, 32($sp)\n\t"
670 "lw $t5, 36($sp)\n\t"
671 "lw $t6, 40($sp)\n\t"
672 "lw $t7, 44($sp)\n\t"
673 // Restore gp.
674 "lw $gp, 52($sp)\n\t"
675 "addiu $sp, $sp, 64\n\t" // Free stack space, now sp as on entry.
676
677 "move %[result], $v0\n\t" // Store the call result.
678 : [result] "=r" (result)
679 : [arg0] "r"(arg0), [arg1] "r"(arg1), [arg2] "r"(arg2), [code] "r"(code), [self] "r"(self),
680 [referrer] "r"(referrer), [hidden] "r"(hidden)
681 : "at", "v0", "v1", "s0", "s1", "s2", "s3", "s4", "s5", "s6", "s7", "t8", "t9", "k0", "k1",
682 "fp", "ra",
683 "f0", "f1", "f2", "f3", "f4", "f5", "f6", "f7", "f8", "f9", "f10", "f11", "f12", "f13",
684 "f14", "f15", "f16", "f17", "f18", "f19", "f20", "f21", "f22", "f23", "f24", "f25", "f26",
685 "f27", "f28", "f29", "f30", "f31",
686 "memory"); // clobber.
687#elif defined(__mips__) && defined(__LP64__)
688 __asm__ __volatile__ (
689 // Spill a0-a7 which we say we don't clobber. May contain args.
690 "daddiu $sp, $sp, -64\n\t"
691 "sd $a0, 0($sp)\n\t"
692 "sd $a1, 8($sp)\n\t"
693 "sd $a2, 16($sp)\n\t"
694 "sd $a3, 24($sp)\n\t"
695 "sd $a4, 32($sp)\n\t"
696 "sd $a5, 40($sp)\n\t"
697 "sd $a6, 48($sp)\n\t"
698 "sd $a7, 56($sp)\n\t"
699
700 "daddiu $sp, $sp, -16\n\t" // Reserve stack space, 16B aligned.
701 "sd %[referrer], 0($sp)\n\t"
702
703 // Push everything on the stack, so we don't rely on the order.
704 "daddiu $sp, $sp, -48\n\t"
705 "sd %[arg0], 0($sp)\n\t"
706 "sd %[arg1], 8($sp)\n\t"
707 "sd %[arg2], 16($sp)\n\t"
708 "sd %[code], 24($sp)\n\t"
709 "sd %[self], 32($sp)\n\t"
710 "sd %[hidden], 40($sp)\n\t"
711
712 // Load call params into the right registers.
713 "ld $a0, 0($sp)\n\t"
714 "ld $a1, 8($sp)\n\t"
715 "ld $a2, 16($sp)\n\t"
716 "ld $t9, 24($sp)\n\t"
717 "ld $s1, 32($sp)\n\t"
718 "ld $t0, 40($sp)\n\t"
719 "daddiu $sp, $sp, 48\n\t"
720
721 "jalr $t9\n\t" // Call the stub.
722 "nop\n\t"
723 "daddiu $sp, $sp, 16\n\t" // Drop the quick "frame".
724
725 // Restore stuff not named clobbered.
726 "ld $a0, 0($sp)\n\t"
727 "ld $a1, 8($sp)\n\t"
728 "ld $a2, 16($sp)\n\t"
729 "ld $a3, 24($sp)\n\t"
730 "ld $a4, 32($sp)\n\t"
731 "ld $a5, 40($sp)\n\t"
732 "ld $a6, 48($sp)\n\t"
733 "ld $a7, 56($sp)\n\t"
734 "daddiu $sp, $sp, 64\n\t"
735
736 "move %[result], $v0\n\t" // Store the call result.
737 : [result] "=r" (result)
738 : [arg0] "r"(arg0), [arg1] "r"(arg1), [arg2] "r"(arg2), [code] "r"(code), [self] "r"(self),
739 [referrer] "r"(referrer), [hidden] "r"(hidden)
740 : "at", "v0", "v1", "t0", "t1", "t2", "t3", "s0", "s1", "s2", "s3", "s4", "s5", "s6", "s7",
741 "t8", "t9", "k0", "k1", "fp", "ra",
742 "f0", "f1", "f2", "f3", "f4", "f5", "f6", "f7", "f8", "f9", "f10", "f11", "f12", "f13",
743 "f14", "f15", "f16", "f17", "f18", "f19", "f20", "f21", "f22", "f23", "f24", "f25", "f26",
744 "f27", "f28", "f29", "f30", "f31",
745 "memory"); // clobber.
Ian Rogers6f3dbba2014-10-14 17:41:57 -0700746#elif defined(__x86_64__) && !defined(__APPLE__) && defined(__clang__)
Andreas Gampe51f76352014-05-21 08:28:48 -0700747 // Note: Uses the native convention
748 // TODO: Set the thread?
749 __asm__ __volatile__(
Andreas Gampe51f76352014-05-21 08:28:48 -0700750 "pushq %[referrer]\n\t" // Push referrer
751 "pushq (%%rsp)\n\t" // & 16B alignment padding
752 ".cfi_adjust_cfa_offset 16\n\t"
Andreas Gampe1a7e2922014-05-21 15:37:53 -0700753 "call *%%rbx\n\t" // Call the stub
Mathieu Chartier2cebb242015-04-21 16:50:40 -0700754 "addq $16, %%rsp\n\t" // Pop null and padding
Andreas Gampe51f76352014-05-21 08:28:48 -0700755 ".cfi_adjust_cfa_offset -16\n\t"
756 : "=a" (result)
757 // Use the result from rax
Andreas Gampe1a7e2922014-05-21 15:37:53 -0700758 : "D"(arg0), "S"(arg1), "d"(arg2), "b"(code), [referrer] "c"(referrer), [hidden] "a"(hidden)
Andreas Gampe51f76352014-05-21 08:28:48 -0700759 // This places arg0 into rdi, arg1 into rsi, arg2 into rdx, and code into rax
Andreas Gampe1a7e2922014-05-21 15:37:53 -0700760 : "rbp", "r8", "r9", "r10", "r11", "r12", "r13", "r14", "r15",
Andreas Gampe2f6e3512014-06-07 01:32:33 -0700761 "memory"); // clobber all
Andreas Gampe51f76352014-05-21 08:28:48 -0700762 // TODO: Should we clobber the other registers?
763#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -0800764 UNUSED(arg0, arg1, arg2, code, referrer, hidden);
Andreas Gampe51f76352014-05-21 08:28:48 -0700765 LOG(WARNING) << "Was asked to invoke for an architecture I do not understand.";
766 result = 0;
767#endif
768 // Pop transition.
769 self->PopManagedStackFragment(fragment);
770
771 fp_result = fpr_result;
772 EXPECT_EQ(0U, fp_result);
773
774 return result;
775 }
776
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700777 // Method with 32b arg0, 64b arg1
778 size_t Invoke3UWithReferrer(size_t arg0, uint64_t arg1, uintptr_t code, Thread* self,
779 mirror::ArtMethod* referrer) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200780#if (defined(__x86_64__) && !defined(__APPLE__)) || (defined(__mips__) && defined(__LP64__)) || \
781 defined(__aarch64__)
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700782 // Just pass through.
783 return Invoke3WithReferrer(arg0, arg1, 0U, code, self, referrer);
784#else
785 // Need to split up arguments.
786 uint32_t lower = static_cast<uint32_t>(arg1 & 0xFFFFFFFF);
787 uint32_t upper = static_cast<uint32_t>((arg1 >> 32) & 0xFFFFFFFF);
788
789 return Invoke3WithReferrer(arg0, lower, upper, code, self, referrer);
790#endif
791 }
792
Andreas Gampe29b38412014-08-13 00:15:43 -0700793 static uintptr_t GetEntrypoint(Thread* self, QuickEntrypointEnum entrypoint) {
794 int32_t offset;
795#ifdef __LP64__
796 offset = GetThreadOffset<8>(entrypoint).Int32Value();
797#else
798 offset = GetThreadOffset<4>(entrypoint).Int32Value();
799#endif
800 return *reinterpret_cast<uintptr_t*>(reinterpret_cast<uint8_t*>(self) + offset);
801 }
802
Andreas Gampe6cf80102014-05-19 11:32:41 -0700803 protected:
804 size_t fp_result;
Andreas Gampe525cde22014-04-22 15:44:50 -0700805};
806
807
Andreas Gampe525cde22014-04-22 15:44:50 -0700808TEST_F(StubTest, Memcpy) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200809#if defined(__i386__) || (defined(__x86_64__) && !defined(__APPLE__)) || defined(__mips__)
Andreas Gampe525cde22014-04-22 15:44:50 -0700810 Thread* self = Thread::Current();
811
812 uint32_t orig[20];
813 uint32_t trg[20];
814 for (size_t i = 0; i < 20; ++i) {
815 orig[i] = i;
816 trg[i] = 0;
817 }
818
819 Invoke3(reinterpret_cast<size_t>(&trg[4]), reinterpret_cast<size_t>(&orig[4]),
Andreas Gampe29b38412014-08-13 00:15:43 -0700820 10 * sizeof(uint32_t), StubTest::GetEntrypoint(self, kQuickMemcpy), self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700821
822 EXPECT_EQ(orig[0], trg[0]);
823
824 for (size_t i = 1; i < 4; ++i) {
825 EXPECT_NE(orig[i], trg[i]);
826 }
827
828 for (size_t i = 4; i < 14; ++i) {
829 EXPECT_EQ(orig[i], trg[i]);
830 }
831
832 for (size_t i = 14; i < 20; ++i) {
833 EXPECT_NE(orig[i], trg[i]);
834 }
835
836 // TODO: Test overlapping?
837
838#else
839 LOG(INFO) << "Skipping memcpy as I don't know how to do that on " << kRuntimeISA;
840 // Force-print to std::cout so it's also outside the logcat.
841 std::cout << "Skipping memcpy as I don't know how to do that on " << kRuntimeISA << std::endl;
842#endif
843}
844
Andreas Gampe525cde22014-04-22 15:44:50 -0700845TEST_F(StubTest, LockObject) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200846#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
847 (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -0700848 static constexpr size_t kThinLockLoops = 100;
849
Andreas Gampe525cde22014-04-22 15:44:50 -0700850 Thread* self = Thread::Current();
Andreas Gampe29b38412014-08-13 00:15:43 -0700851
852 const uintptr_t art_quick_lock_object = StubTest::GetEntrypoint(self, kQuickLockObject);
853
Andreas Gampe525cde22014-04-22 15:44:50 -0700854 // Create an object
855 ScopedObjectAccess soa(self);
856 // garbage is created during ClassLinker::Init
857
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700858 StackHandleScope<2> hs(soa.Self());
859 Handle<mirror::String> obj(
860 hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), "hello, world!")));
Andreas Gampe525cde22014-04-22 15:44:50 -0700861 LockWord lock = obj->GetLockWord(false);
862 LockWord::LockState old_state = lock.GetState();
863 EXPECT_EQ(LockWord::LockState::kUnlocked, old_state);
864
Andreas Gampe29b38412014-08-13 00:15:43 -0700865 Invoke3(reinterpret_cast<size_t>(obj.Get()), 0U, 0U, art_quick_lock_object, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700866
867 LockWord lock_after = obj->GetLockWord(false);
868 LockWord::LockState new_state = lock_after.GetState();
869 EXPECT_EQ(LockWord::LockState::kThinLocked, new_state);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700870 EXPECT_EQ(lock_after.ThinLockCount(), 0U); // Thin lock starts count at zero
871
872 for (size_t i = 1; i < kThinLockLoops; ++i) {
Andreas Gampe29b38412014-08-13 00:15:43 -0700873 Invoke3(reinterpret_cast<size_t>(obj.Get()), 0U, 0U, art_quick_lock_object, self);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700874
875 // Check we're at lock count i
876
877 LockWord l_inc = obj->GetLockWord(false);
878 LockWord::LockState l_inc_state = l_inc.GetState();
879 EXPECT_EQ(LockWord::LockState::kThinLocked, l_inc_state);
880 EXPECT_EQ(l_inc.ThinLockCount(), i);
881 }
882
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700883 // Force a fat lock by running identity hashcode to fill up lock word.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700884 Handle<mirror::String> obj2(hs.NewHandle(
885 mirror::String::AllocFromModifiedUtf8(soa.Self(), "hello, world!")));
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700886
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700887 obj2->IdentityHashCode();
888
Andreas Gampe29b38412014-08-13 00:15:43 -0700889 Invoke3(reinterpret_cast<size_t>(obj2.Get()), 0U, 0U, art_quick_lock_object, self);
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700890
891 LockWord lock_after2 = obj2->GetLockWord(false);
892 LockWord::LockState new_state2 = lock_after2.GetState();
893 EXPECT_EQ(LockWord::LockState::kFatLocked, new_state2);
894 EXPECT_NE(lock_after2.FatLockMonitor(), static_cast<Monitor*>(nullptr));
895
896 // Test done.
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700897#else
898 LOG(INFO) << "Skipping lock_object as I don't know how to do that on " << kRuntimeISA;
899 // Force-print to std::cout so it's also outside the logcat.
900 std::cout << "Skipping lock_object as I don't know how to do that on " << kRuntimeISA << std::endl;
901#endif
902}
903
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700904
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700905class RandGen {
906 public:
907 explicit RandGen(uint32_t seed) : val_(seed) {}
908
909 uint32_t next() {
910 val_ = val_ * 48271 % 2147483647 + 13;
911 return val_;
912 }
913
914 uint32_t val_;
915};
916
917
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700918// NO_THREAD_SAFETY_ANALYSIS as we do not want to grab exclusive mutator lock for MonitorInfo.
919static void TestUnlockObject(StubTest* test) NO_THREAD_SAFETY_ANALYSIS {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200920#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
921 (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -0700922 static constexpr size_t kThinLockLoops = 100;
923
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700924 Thread* self = Thread::Current();
Andreas Gampe29b38412014-08-13 00:15:43 -0700925
926 const uintptr_t art_quick_lock_object = StubTest::GetEntrypoint(self, kQuickLockObject);
927 const uintptr_t art_quick_unlock_object = StubTest::GetEntrypoint(self, kQuickUnlockObject);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700928 // Create an object
929 ScopedObjectAccess soa(self);
930 // garbage is created during ClassLinker::Init
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700931 static constexpr size_t kNumberOfLocks = 10; // Number of objects = lock
932 StackHandleScope<kNumberOfLocks + 1> hs(self);
933 Handle<mirror::String> obj(
934 hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), "hello, world!")));
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700935 LockWord lock = obj->GetLockWord(false);
936 LockWord::LockState old_state = lock.GetState();
937 EXPECT_EQ(LockWord::LockState::kUnlocked, old_state);
938
Andreas Gampe29b38412014-08-13 00:15:43 -0700939 test->Invoke3(reinterpret_cast<size_t>(obj.Get()), 0U, 0U, art_quick_unlock_object, self);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700940 // This should be an illegal monitor state.
941 EXPECT_TRUE(self->IsExceptionPending());
942 self->ClearException();
943
944 LockWord lock_after = obj->GetLockWord(false);
945 LockWord::LockState new_state = lock_after.GetState();
946 EXPECT_EQ(LockWord::LockState::kUnlocked, new_state);
Andreas Gampe525cde22014-04-22 15:44:50 -0700947
Andreas Gampe29b38412014-08-13 00:15:43 -0700948 test->Invoke3(reinterpret_cast<size_t>(obj.Get()), 0U, 0U, art_quick_lock_object, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700949
950 LockWord lock_after2 = obj->GetLockWord(false);
951 LockWord::LockState new_state2 = lock_after2.GetState();
952 EXPECT_EQ(LockWord::LockState::kThinLocked, new_state2);
953
Andreas Gampe29b38412014-08-13 00:15:43 -0700954 test->Invoke3(reinterpret_cast<size_t>(obj.Get()), 0U, 0U, art_quick_unlock_object, self);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700955
956 LockWord lock_after3 = obj->GetLockWord(false);
957 LockWord::LockState new_state3 = lock_after3.GetState();
958 EXPECT_EQ(LockWord::LockState::kUnlocked, new_state3);
959
960 // Stress test:
961 // Keep a number of objects and their locks in flight. Randomly lock or unlock one of them in
962 // each step.
963
964 RandGen r(0x1234);
965
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700966 constexpr size_t kIterations = 10000; // Number of iterations
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700967 constexpr size_t kMoveToFat = 1000; // Chance of 1:kMoveFat to make a lock fat.
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700968
969 size_t counts[kNumberOfLocks];
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700970 bool fat[kNumberOfLocks]; // Whether a lock should be thin or fat.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700971 Handle<mirror::String> objects[kNumberOfLocks];
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700972
973 // Initialize = allocate.
974 for (size_t i = 0; i < kNumberOfLocks; ++i) {
975 counts[i] = 0;
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700976 fat[i] = false;
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700977 objects[i] = hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), ""));
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700978 }
979
980 for (size_t i = 0; i < kIterations; ++i) {
981 // Select which lock to update.
982 size_t index = r.next() % kNumberOfLocks;
983
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700984 // Make lock fat?
985 if (!fat[index] && (r.next() % kMoveToFat == 0)) {
986 fat[index] = true;
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700987 objects[index]->IdentityHashCode();
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700988
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700989 LockWord lock_iter = objects[index]->GetLockWord(false);
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700990 LockWord::LockState iter_state = lock_iter.GetState();
991 if (counts[index] == 0) {
992 EXPECT_EQ(LockWord::LockState::kHashCode, iter_state);
993 } else {
994 EXPECT_EQ(LockWord::LockState::kFatLocked, iter_state);
995 }
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700996 } else {
Andreas Gampe277ccbd2014-11-03 21:36:10 -0800997 bool take_lock; // Whether to lock or unlock in this step.
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700998 if (counts[index] == 0) {
Andreas Gampe277ccbd2014-11-03 21:36:10 -0800999 take_lock = true;
Andreas Gampe4fc046e2014-05-06 16:56:39 -07001000 } else if (counts[index] == kThinLockLoops) {
Andreas Gampe277ccbd2014-11-03 21:36:10 -08001001 take_lock = false;
Andreas Gampe4fc046e2014-05-06 16:56:39 -07001002 } else {
1003 // Randomly.
Andreas Gampe277ccbd2014-11-03 21:36:10 -08001004 take_lock = r.next() % 2 == 0;
Andreas Gampe4fc046e2014-05-06 16:56:39 -07001005 }
Andreas Gampe7177d7c2014-05-02 12:10:02 -07001006
Andreas Gampe277ccbd2014-11-03 21:36:10 -08001007 if (take_lock) {
Andreas Gampe29b38412014-08-13 00:15:43 -07001008 test->Invoke3(reinterpret_cast<size_t>(objects[index].Get()), 0U, 0U, art_quick_lock_object,
1009 self);
Andreas Gampe4fc046e2014-05-06 16:56:39 -07001010 counts[index]++;
1011 } else {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001012 test->Invoke3(reinterpret_cast<size_t>(objects[index].Get()), 0U, 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001013 art_quick_unlock_object, self);
Andreas Gampe4fc046e2014-05-06 16:56:39 -07001014 counts[index]--;
1015 }
Andreas Gampe7177d7c2014-05-02 12:10:02 -07001016
Andreas Gampe4fc046e2014-05-06 16:56:39 -07001017 EXPECT_FALSE(self->IsExceptionPending());
1018
1019 // Check the new state.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001020 LockWord lock_iter = objects[index]->GetLockWord(true);
Andreas Gampe4fc046e2014-05-06 16:56:39 -07001021 LockWord::LockState iter_state = lock_iter.GetState();
1022 if (fat[index]) {
1023 // Abuse MonitorInfo.
1024 EXPECT_EQ(LockWord::LockState::kFatLocked, iter_state) << index;
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001025 MonitorInfo info(objects[index].Get());
Andreas Gampe4fc046e2014-05-06 16:56:39 -07001026 EXPECT_EQ(counts[index], info.entry_count_) << index;
1027 } else {
1028 if (counts[index] > 0) {
1029 EXPECT_EQ(LockWord::LockState::kThinLocked, iter_state);
1030 EXPECT_EQ(counts[index] - 1, lock_iter.ThinLockCount());
1031 } else {
1032 EXPECT_EQ(LockWord::LockState::kUnlocked, iter_state);
1033 }
1034 }
Andreas Gampe7177d7c2014-05-02 12:10:02 -07001035 }
1036 }
1037
1038 // Unlock the remaining count times and then check it's unlocked. Then deallocate.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001039 // Go reverse order to correctly handle Handles.
Andreas Gampe7177d7c2014-05-02 12:10:02 -07001040 for (size_t i = 0; i < kNumberOfLocks; ++i) {
1041 size_t index = kNumberOfLocks - 1 - i;
1042 size_t count = counts[index];
1043 while (count > 0) {
Andreas Gampe29b38412014-08-13 00:15:43 -07001044 test->Invoke3(reinterpret_cast<size_t>(objects[index].Get()), 0U, 0U, art_quick_unlock_object,
1045 self);
Andreas Gampe7177d7c2014-05-02 12:10:02 -07001046 count--;
1047 }
1048
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001049 LockWord lock_after4 = objects[index]->GetLockWord(false);
Andreas Gampe7177d7c2014-05-02 12:10:02 -07001050 LockWord::LockState new_state4 = lock_after4.GetState();
Andreas Gampe4fc046e2014-05-06 16:56:39 -07001051 EXPECT_TRUE(LockWord::LockState::kUnlocked == new_state4
1052 || LockWord::LockState::kFatLocked == new_state4);
Andreas Gampe7177d7c2014-05-02 12:10:02 -07001053 }
1054
Andreas Gampe4fc046e2014-05-06 16:56:39 -07001055 // Test done.
Andreas Gampe525cde22014-04-22 15:44:50 -07001056#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001057 UNUSED(test);
Andreas Gampe7177d7c2014-05-02 12:10:02 -07001058 LOG(INFO) << "Skipping unlock_object as I don't know how to do that on " << kRuntimeISA;
Andreas Gampe525cde22014-04-22 15:44:50 -07001059 // Force-print to std::cout so it's also outside the logcat.
Andreas Gampe7177d7c2014-05-02 12:10:02 -07001060 std::cout << "Skipping unlock_object as I don't know how to do that on " << kRuntimeISA << std::endl;
Andreas Gampe525cde22014-04-22 15:44:50 -07001061#endif
1062}
1063
Andreas Gampe4fc046e2014-05-06 16:56:39 -07001064TEST_F(StubTest, UnlockObject) {
Andreas Gampe369810a2015-01-14 19:53:31 -08001065 // This will lead to monitor error messages in the log.
1066 ScopedLogSeverity sls(LogSeverity::FATAL);
1067
Andreas Gampe4fc046e2014-05-06 16:56:39 -07001068 TestUnlockObject(this);
1069}
Andreas Gampe525cde22014-04-22 15:44:50 -07001070
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001071#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1072 (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe525cde22014-04-22 15:44:50 -07001073extern "C" void art_quick_check_cast(void);
1074#endif
1075
1076TEST_F(StubTest, CheckCast) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001077#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1078 (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe525cde22014-04-22 15:44:50 -07001079 Thread* self = Thread::Current();
Andreas Gampe29b38412014-08-13 00:15:43 -07001080
1081 const uintptr_t art_quick_check_cast = StubTest::GetEntrypoint(self, kQuickCheckCast);
1082
Andreas Gampe525cde22014-04-22 15:44:50 -07001083 // Find some classes.
1084 ScopedObjectAccess soa(self);
1085 // garbage is created during ClassLinker::Init
1086
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001087 StackHandleScope<2> hs(soa.Self());
1088 Handle<mirror::Class> c(
1089 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "[Ljava/lang/Object;")));
1090 Handle<mirror::Class> c2(
1091 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "[Ljava/lang/String;")));
Andreas Gampe525cde22014-04-22 15:44:50 -07001092
1093 EXPECT_FALSE(self->IsExceptionPending());
1094
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001095 Invoke3(reinterpret_cast<size_t>(c.Get()), reinterpret_cast<size_t>(c.Get()), 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001096 art_quick_check_cast, self);
Andreas Gampe525cde22014-04-22 15:44:50 -07001097
1098 EXPECT_FALSE(self->IsExceptionPending());
1099
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001100 Invoke3(reinterpret_cast<size_t>(c2.Get()), reinterpret_cast<size_t>(c2.Get()), 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001101 art_quick_check_cast, self);
Andreas Gampe525cde22014-04-22 15:44:50 -07001102
1103 EXPECT_FALSE(self->IsExceptionPending());
1104
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001105 Invoke3(reinterpret_cast<size_t>(c.Get()), reinterpret_cast<size_t>(c2.Get()), 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001106 art_quick_check_cast, self);
Andreas Gampe525cde22014-04-22 15:44:50 -07001107
1108 EXPECT_FALSE(self->IsExceptionPending());
1109
1110 // TODO: Make the following work. But that would require correct managed frames.
1111
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001112 Invoke3(reinterpret_cast<size_t>(c2.Get()), reinterpret_cast<size_t>(c.Get()), 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001113 art_quick_check_cast, self);
Andreas Gampe525cde22014-04-22 15:44:50 -07001114
1115 EXPECT_TRUE(self->IsExceptionPending());
1116 self->ClearException();
1117
1118#else
1119 LOG(INFO) << "Skipping check_cast as I don't know how to do that on " << kRuntimeISA;
1120 // Force-print to std::cout so it's also outside the logcat.
1121 std::cout << "Skipping check_cast as I don't know how to do that on " << kRuntimeISA << std::endl;
1122#endif
1123}
1124
1125
Andreas Gampe525cde22014-04-22 15:44:50 -07001126TEST_F(StubTest, APutObj) {
Hiroshi Yamauchid6881ae2014-04-28 17:21:48 -07001127 TEST_DISABLED_FOR_HEAP_REFERENCE_POISONING();
1128
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001129#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1130 (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe525cde22014-04-22 15:44:50 -07001131 Thread* self = Thread::Current();
Andreas Gampe29b38412014-08-13 00:15:43 -07001132
1133 // Do not check non-checked ones, we'd need handlers and stuff...
1134 const uintptr_t art_quick_aput_obj_with_null_and_bound_check =
1135 StubTest::GetEntrypoint(self, kQuickAputObjectWithNullAndBoundCheck);
1136
Andreas Gampe525cde22014-04-22 15:44:50 -07001137 // Create an object
1138 ScopedObjectAccess soa(self);
1139 // garbage is created during ClassLinker::Init
1140
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001141 StackHandleScope<5> hs(soa.Self());
1142 Handle<mirror::Class> c(
1143 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "Ljava/lang/Object;")));
1144 Handle<mirror::Class> ca(
1145 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "[Ljava/lang/String;")));
Andreas Gampe525cde22014-04-22 15:44:50 -07001146
1147 // Build a string array of size 1
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001148 Handle<mirror::ObjectArray<mirror::Object>> array(
1149 hs.NewHandle(mirror::ObjectArray<mirror::Object>::Alloc(soa.Self(), ca.Get(), 10)));
Andreas Gampe525cde22014-04-22 15:44:50 -07001150
1151 // Build a string -> should be assignable
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001152 Handle<mirror::String> str_obj(
1153 hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), "hello, world!")));
Andreas Gampe525cde22014-04-22 15:44:50 -07001154
1155 // Build a generic object -> should fail assigning
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001156 Handle<mirror::Object> obj_obj(hs.NewHandle(c->AllocObject(soa.Self())));
Andreas Gampe525cde22014-04-22 15:44:50 -07001157
1158 // Play with it...
1159
1160 // 1) Success cases
Andreas Gampef4e910b2014-04-29 16:55:52 -07001161 // 1.1) Assign str_obj to array[0..3]
Andreas Gampe525cde22014-04-22 15:44:50 -07001162
1163 EXPECT_FALSE(self->IsExceptionPending());
1164
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001165 Invoke3(reinterpret_cast<size_t>(array.Get()), 0U, reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampe29b38412014-08-13 00:15:43 -07001166 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampe525cde22014-04-22 15:44:50 -07001167
1168 EXPECT_FALSE(self->IsExceptionPending());
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001169 EXPECT_EQ(str_obj.Get(), array->Get(0));
Andreas Gampe525cde22014-04-22 15:44:50 -07001170
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001171 Invoke3(reinterpret_cast<size_t>(array.Get()), 1U, reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampe29b38412014-08-13 00:15:43 -07001172 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampef4e910b2014-04-29 16:55:52 -07001173
1174 EXPECT_FALSE(self->IsExceptionPending());
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001175 EXPECT_EQ(str_obj.Get(), array->Get(1));
Andreas Gampef4e910b2014-04-29 16:55:52 -07001176
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001177 Invoke3(reinterpret_cast<size_t>(array.Get()), 2U, reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampe29b38412014-08-13 00:15:43 -07001178 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampef4e910b2014-04-29 16:55:52 -07001179
1180 EXPECT_FALSE(self->IsExceptionPending());
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001181 EXPECT_EQ(str_obj.Get(), array->Get(2));
Andreas Gampef4e910b2014-04-29 16:55:52 -07001182
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001183 Invoke3(reinterpret_cast<size_t>(array.Get()), 3U, reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampe29b38412014-08-13 00:15:43 -07001184 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampef4e910b2014-04-29 16:55:52 -07001185
1186 EXPECT_FALSE(self->IsExceptionPending());
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001187 EXPECT_EQ(str_obj.Get(), array->Get(3));
Andreas Gampef4e910b2014-04-29 16:55:52 -07001188
1189 // 1.2) Assign null to array[0..3]
Andreas Gampe525cde22014-04-22 15:44:50 -07001190
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001191 Invoke3(reinterpret_cast<size_t>(array.Get()), 0U, reinterpret_cast<size_t>(nullptr),
Andreas Gampe29b38412014-08-13 00:15:43 -07001192 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampe525cde22014-04-22 15:44:50 -07001193
1194 EXPECT_FALSE(self->IsExceptionPending());
Andreas Gampef4e910b2014-04-29 16:55:52 -07001195 EXPECT_EQ(nullptr, array->Get(0));
1196
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001197 Invoke3(reinterpret_cast<size_t>(array.Get()), 1U, reinterpret_cast<size_t>(nullptr),
Andreas Gampe29b38412014-08-13 00:15:43 -07001198 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampef4e910b2014-04-29 16:55:52 -07001199
1200 EXPECT_FALSE(self->IsExceptionPending());
1201 EXPECT_EQ(nullptr, array->Get(1));
1202
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001203 Invoke3(reinterpret_cast<size_t>(array.Get()), 2U, reinterpret_cast<size_t>(nullptr),
Andreas Gampe29b38412014-08-13 00:15:43 -07001204 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampef4e910b2014-04-29 16:55:52 -07001205
1206 EXPECT_FALSE(self->IsExceptionPending());
1207 EXPECT_EQ(nullptr, array->Get(2));
1208
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001209 Invoke3(reinterpret_cast<size_t>(array.Get()), 3U, reinterpret_cast<size_t>(nullptr),
Andreas Gampe29b38412014-08-13 00:15:43 -07001210 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampef4e910b2014-04-29 16:55:52 -07001211
1212 EXPECT_FALSE(self->IsExceptionPending());
1213 EXPECT_EQ(nullptr, array->Get(3));
Andreas Gampe525cde22014-04-22 15:44:50 -07001214
1215 // TODO: Check _which_ exception is thrown. Then make 3) check that it's the right check order.
1216
1217 // 2) Failure cases (str into str[])
1218 // 2.1) Array = null
1219 // TODO: Throwing NPE needs actual DEX code
1220
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001221// Invoke3(reinterpret_cast<size_t>(nullptr), 0U, reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampe525cde22014-04-22 15:44:50 -07001222// reinterpret_cast<uintptr_t>(&art_quick_aput_obj_with_null_and_bound_check), self);
1223//
1224// EXPECT_TRUE(self->IsExceptionPending());
1225// self->ClearException();
1226
1227 // 2.2) Index < 0
1228
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001229 Invoke3(reinterpret_cast<size_t>(array.Get()), static_cast<size_t>(-1),
1230 reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampe29b38412014-08-13 00:15:43 -07001231 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampe525cde22014-04-22 15:44:50 -07001232
1233 EXPECT_TRUE(self->IsExceptionPending());
1234 self->ClearException();
1235
1236 // 2.3) Index > 0
1237
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001238 Invoke3(reinterpret_cast<size_t>(array.Get()), 10U, reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampe29b38412014-08-13 00:15:43 -07001239 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampe525cde22014-04-22 15:44:50 -07001240
1241 EXPECT_TRUE(self->IsExceptionPending());
1242 self->ClearException();
1243
1244 // 3) Failure cases (obj into str[])
1245
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001246 Invoke3(reinterpret_cast<size_t>(array.Get()), 0U, reinterpret_cast<size_t>(obj_obj.Get()),
Andreas Gampe29b38412014-08-13 00:15:43 -07001247 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampe525cde22014-04-22 15:44:50 -07001248
1249 EXPECT_TRUE(self->IsExceptionPending());
1250 self->ClearException();
1251
1252 // Tests done.
1253#else
1254 LOG(INFO) << "Skipping aput_obj as I don't know how to do that on " << kRuntimeISA;
1255 // Force-print to std::cout so it's also outside the logcat.
1256 std::cout << "Skipping aput_obj as I don't know how to do that on " << kRuntimeISA << std::endl;
1257#endif
1258}
1259
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001260TEST_F(StubTest, AllocObject) {
1261 TEST_DISABLED_FOR_HEAP_REFERENCE_POISONING();
1262
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001263#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1264 (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe369810a2015-01-14 19:53:31 -08001265 // This will lead to OOM error messages in the log.
1266 ScopedLogSeverity sls(LogSeverity::FATAL);
1267
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001268 // TODO: Check the "Unresolved" allocation stubs
1269
1270 Thread* self = Thread::Current();
1271 // Create an object
1272 ScopedObjectAccess soa(self);
1273 // garbage is created during ClassLinker::Init
1274
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001275 StackHandleScope<2> hs(soa.Self());
1276 Handle<mirror::Class> c(
1277 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "Ljava/lang/Object;")));
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001278
1279 // Play with it...
1280
1281 EXPECT_FALSE(self->IsExceptionPending());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001282 {
1283 // Use an arbitrary method from c to use as referrer
1284 size_t result = Invoke3(static_cast<size_t>(c->GetDexTypeIndex()), // type_idx
1285 reinterpret_cast<size_t>(c->GetVirtualMethod(0)), // arbitrary
1286 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001287 StubTest::GetEntrypoint(self, kQuickAllocObject),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001288 self);
1289
1290 EXPECT_FALSE(self->IsExceptionPending());
1291 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
1292 mirror::Object* obj = reinterpret_cast<mirror::Object*>(result);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001293 EXPECT_EQ(c.Get(), obj->GetClass());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001294 VerifyObject(obj);
1295 }
1296
1297 {
Mathieu Chartier2cebb242015-04-21 16:50:40 -07001298 // We can use null in the second argument as we do not need a method here (not used in
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001299 // resolved/initialized cases)
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001300 size_t result = Invoke3(reinterpret_cast<size_t>(c.Get()), reinterpret_cast<size_t>(nullptr), 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001301 StubTest::GetEntrypoint(self, kQuickAllocObjectResolved),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001302 self);
1303
1304 EXPECT_FALSE(self->IsExceptionPending());
1305 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
1306 mirror::Object* obj = reinterpret_cast<mirror::Object*>(result);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001307 EXPECT_EQ(c.Get(), obj->GetClass());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001308 VerifyObject(obj);
1309 }
1310
1311 {
Mathieu Chartier2cebb242015-04-21 16:50:40 -07001312 // We can use null in the second argument as we do not need a method here (not used in
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001313 // resolved/initialized cases)
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001314 size_t result = Invoke3(reinterpret_cast<size_t>(c.Get()), reinterpret_cast<size_t>(nullptr), 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001315 StubTest::GetEntrypoint(self, kQuickAllocObjectInitialized),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001316 self);
1317
1318 EXPECT_FALSE(self->IsExceptionPending());
1319 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
1320 mirror::Object* obj = reinterpret_cast<mirror::Object*>(result);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001321 EXPECT_EQ(c.Get(), obj->GetClass());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001322 VerifyObject(obj);
1323 }
1324
1325 // Failure tests.
1326
1327 // Out-of-memory.
1328 {
1329 Runtime::Current()->GetHeap()->SetIdealFootprint(1 * GB);
1330
1331 // Array helps to fill memory faster.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001332 Handle<mirror::Class> ca(
1333 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "[Ljava/lang/Object;")));
1334
1335 // Use arbitrary large amount for now.
1336 static const size_t kMaxHandles = 1000000;
Ian Rogers700a4022014-05-19 16:49:03 -07001337 std::unique_ptr<StackHandleScope<kMaxHandles>> hsp(new StackHandleScope<kMaxHandles>(self));
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001338
1339 std::vector<Handle<mirror::Object>> handles;
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001340 // Start allocating with 128K
1341 size_t length = 128 * KB / 4;
1342 while (length > 10) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001343 Handle<mirror::Object> h(hsp->NewHandle<mirror::Object>(
1344 mirror::ObjectArray<mirror::Object>::Alloc(soa.Self(), ca.Get(), length / 4)));
1345 if (self->IsExceptionPending() || h.Get() == nullptr) {
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001346 self->ClearException();
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001347
1348 // Try a smaller length
1349 length = length / 8;
1350 // Use at most half the reported free space.
1351 size_t mem = Runtime::Current()->GetHeap()->GetFreeMemory();
1352 if (length * 8 > mem) {
1353 length = mem / 8;
1354 }
1355 } else {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001356 handles.push_back(h);
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001357 }
1358 }
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001359 LOG(INFO) << "Used " << handles.size() << " arrays to fill space.";
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001360
1361 // Allocate simple objects till it fails.
1362 while (!self->IsExceptionPending()) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001363 Handle<mirror::Object> h = hsp->NewHandle(c->AllocObject(soa.Self()));
1364 if (!self->IsExceptionPending() && h.Get() != nullptr) {
1365 handles.push_back(h);
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001366 }
1367 }
1368 self->ClearException();
1369
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001370 size_t result = Invoke3(reinterpret_cast<size_t>(c.Get()), reinterpret_cast<size_t>(nullptr), 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001371 StubTest::GetEntrypoint(self, kQuickAllocObjectInitialized),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001372 self);
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001373 EXPECT_TRUE(self->IsExceptionPending());
1374 self->ClearException();
1375 EXPECT_EQ(reinterpret_cast<size_t>(nullptr), result);
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001376 }
1377
1378 // Tests done.
1379#else
1380 LOG(INFO) << "Skipping alloc_object as I don't know how to do that on " << kRuntimeISA;
1381 // Force-print to std::cout so it's also outside the logcat.
1382 std::cout << "Skipping alloc_object as I don't know how to do that on " << kRuntimeISA << std::endl;
1383#endif
1384}
1385
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001386TEST_F(StubTest, AllocObjectArray) {
1387 TEST_DISABLED_FOR_HEAP_REFERENCE_POISONING();
1388
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001389#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1390 (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001391 // TODO: Check the "Unresolved" allocation stubs
1392
Andreas Gampe369810a2015-01-14 19:53:31 -08001393 // This will lead to OOM error messages in the log.
1394 ScopedLogSeverity sls(LogSeverity::FATAL);
1395
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001396 Thread* self = Thread::Current();
1397 // Create an object
1398 ScopedObjectAccess soa(self);
1399 // garbage is created during ClassLinker::Init
1400
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001401 StackHandleScope<2> hs(self);
1402 Handle<mirror::Class> c(
1403 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "[Ljava/lang/Object;")));
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001404
1405 // Needed to have a linked method.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001406 Handle<mirror::Class> c_obj(
1407 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "Ljava/lang/Object;")));
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001408
1409 // Play with it...
1410
1411 EXPECT_FALSE(self->IsExceptionPending());
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001412
1413 // For some reason this does not work, as the type_idx is artificial and outside what the
1414 // resolved types of c_obj allow...
1415
Ian Rogerscf7f1912014-10-22 22:06:39 -07001416 if ((false)) {
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001417 // Use an arbitrary method from c to use as referrer
1418 size_t result = Invoke3(static_cast<size_t>(c->GetDexTypeIndex()), // type_idx
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001419 10U,
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08001420 reinterpret_cast<size_t>(c_obj->GetVirtualMethod(0)), // arbitrary
Andreas Gampe29b38412014-08-13 00:15:43 -07001421 StubTest::GetEntrypoint(self, kQuickAllocArray),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001422 self);
1423
1424 EXPECT_FALSE(self->IsExceptionPending());
1425 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
1426 mirror::Array* obj = reinterpret_cast<mirror::Array*>(result);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001427 EXPECT_EQ(c.Get(), obj->GetClass());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001428 VerifyObject(obj);
1429 EXPECT_EQ(obj->GetLength(), 10);
1430 }
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001431
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001432 {
Mathieu Chartier2cebb242015-04-21 16:50:40 -07001433 // We can use null in the second argument as we do not need a method here (not used in
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001434 // resolved/initialized cases)
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08001435 size_t result = Invoke3(reinterpret_cast<size_t>(c.Get()), 10U,
1436 reinterpret_cast<size_t>(nullptr),
Andreas Gampe29b38412014-08-13 00:15:43 -07001437 StubTest::GetEntrypoint(self, kQuickAllocArrayResolved),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001438 self);
Nicolas Geoffray14691c52015-03-05 10:40:17 +00001439 EXPECT_FALSE(self->IsExceptionPending()) << PrettyTypeOf(self->GetException());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001440 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
1441 mirror::Object* obj = reinterpret_cast<mirror::Object*>(result);
1442 EXPECT_TRUE(obj->IsArrayInstance());
1443 EXPECT_TRUE(obj->IsObjectArray());
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001444 EXPECT_EQ(c.Get(), obj->GetClass());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001445 VerifyObject(obj);
1446 mirror::Array* array = reinterpret_cast<mirror::Array*>(result);
1447 EXPECT_EQ(array->GetLength(), 10);
1448 }
1449
1450 // Failure tests.
1451
1452 // Out-of-memory.
1453 {
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08001454 size_t result = Invoke3(reinterpret_cast<size_t>(c.Get()),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001455 GB, // that should fail...
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08001456 reinterpret_cast<size_t>(nullptr),
Andreas Gampe29b38412014-08-13 00:15:43 -07001457 StubTest::GetEntrypoint(self, kQuickAllocArrayResolved),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001458 self);
1459
1460 EXPECT_TRUE(self->IsExceptionPending());
1461 self->ClearException();
1462 EXPECT_EQ(reinterpret_cast<size_t>(nullptr), result);
1463 }
1464
1465 // Tests done.
1466#else
1467 LOG(INFO) << "Skipping alloc_array as I don't know how to do that on " << kRuntimeISA;
1468 // Force-print to std::cout so it's also outside the logcat.
1469 std::cout << "Skipping alloc_array as I don't know how to do that on " << kRuntimeISA << std::endl;
1470#endif
1471}
1472
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001473
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001474TEST_F(StubTest, StringCompareTo) {
1475 TEST_DISABLED_FOR_HEAP_REFERENCE_POISONING();
1476
Ian Rogersc3ccc102014-06-25 11:52:14 -07001477#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001478 // TODO: Check the "Unresolved" allocation stubs
1479
1480 Thread* self = Thread::Current();
Andreas Gampe29b38412014-08-13 00:15:43 -07001481
1482 const uintptr_t art_quick_string_compareto = StubTest::GetEntrypoint(self, kQuickStringCompareTo);
1483
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001484 ScopedObjectAccess soa(self);
1485 // garbage is created during ClassLinker::Init
1486
1487 // Create some strings
1488 // Use array so we can index into it and use a matrix for expected results
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -07001489 // Setup: The first half is standard. The second half uses a non-zero offset.
1490 // TODO: Shared backing arrays.
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001491 const char* c[] = { "", "", "a", "aa", "ab",
Serban Constantinescu86797a72014-06-19 16:17:56 +01001492 "aacaacaacaacaacaac", // This one's under the default limit to go to __memcmp16.
1493 "aacaacaacaacaacaacaacaacaacaacaacaac", // This one's over.
1494 "aacaacaacaacaacaacaacaacaacaacaacaaca" }; // As is this one. We need a separate one to
1495 // defeat object-equal optimizations.
Jeff Hao848f70a2014-01-15 13:49:50 -08001496 static constexpr size_t kStringCount = arraysize(c);
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001497
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001498 StackHandleScope<kStringCount> hs(self);
1499 Handle<mirror::String> s[kStringCount];
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001500
Jeff Hao848f70a2014-01-15 13:49:50 -08001501 for (size_t i = 0; i < kStringCount; ++i) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001502 s[i] = hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), c[i]));
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001503 }
1504
1505 // TODO: wide characters
1506
1507 // Matrix of expectations. First component is first parameter. Note we only check against the
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -07001508 // sign, not the value. As we are testing random offsets, we need to compute this and need to
1509 // rely on String::CompareTo being correct.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001510 int32_t expected[kStringCount][kStringCount];
1511 for (size_t x = 0; x < kStringCount; ++x) {
1512 for (size_t y = 0; y < kStringCount; ++y) {
1513 expected[x][y] = s[x]->CompareTo(s[y].Get());
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -07001514 }
1515 }
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001516
1517 // Play with it...
1518
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001519 for (size_t x = 0; x < kStringCount; ++x) {
1520 for (size_t y = 0; y < kStringCount; ++y) {
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001521 // Test string_compareto x y
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001522 size_t result = Invoke3(reinterpret_cast<size_t>(s[x].Get()),
1523 reinterpret_cast<size_t>(s[y].Get()), 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001524 art_quick_string_compareto, self);
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001525
1526 EXPECT_FALSE(self->IsExceptionPending());
1527
1528 // The result is a 32b signed integer
1529 union {
1530 size_t r;
1531 int32_t i;
1532 } conv;
1533 conv.r = result;
1534 int32_t e = expected[x][y];
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -07001535 EXPECT_TRUE(e == 0 ? conv.i == 0 : true) << "x=" << c[x] << " y=" << c[y] << " res=" <<
1536 conv.r;
1537 EXPECT_TRUE(e < 0 ? conv.i < 0 : true) << "x=" << c[x] << " y=" << c[y] << " res=" <<
1538 conv.r;
1539 EXPECT_TRUE(e > 0 ? conv.i > 0 : true) << "x=" << c[x] << " y=" << c[y] << " res=" <<
1540 conv.r;
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001541 }
1542 }
1543
Andreas Gampe7177d7c2014-05-02 12:10:02 -07001544 // TODO: Deallocate things.
1545
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001546 // Tests done.
1547#else
1548 LOG(INFO) << "Skipping string_compareto as I don't know how to do that on " << kRuntimeISA;
1549 // Force-print to std::cout so it's also outside the logcat.
1550 std::cout << "Skipping string_compareto as I don't know how to do that on " << kRuntimeISA <<
1551 std::endl;
1552#endif
1553}
1554
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001555
Mathieu Chartierc7853442015-03-27 14:35:38 -07001556static void GetSetBooleanStatic(ArtField* f, Thread* self,
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001557 mirror::ArtMethod* referrer, StubTest* test)
Fred Shih37f05ef2014-07-16 18:38:08 -07001558 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001559#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1560 (defined(__x86_64__) && !defined(__APPLE__))
Fred Shih37f05ef2014-07-16 18:38:08 -07001561 constexpr size_t num_values = 5;
1562 uint8_t values[num_values] = { 0, 1, 2, 128, 0xFF };
1563
1564 for (size_t i = 0; i < num_values; ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001565 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001566 static_cast<size_t>(values[i]),
1567 0U,
1568 StubTest::GetEntrypoint(self, kQuickSet8Static),
1569 self,
1570 referrer);
1571
Mathieu Chartierc7853442015-03-27 14:35:38 -07001572 size_t res = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001573 0U, 0U,
1574 StubTest::GetEntrypoint(self, kQuickGetBooleanStatic),
1575 self,
1576 referrer);
1577 // Boolean currently stores bools as uint8_t, be more zealous about asserting correct writes/gets.
1578 EXPECT_EQ(values[i], static_cast<uint8_t>(res)) << "Iteration " << i;
1579 }
1580#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001581 UNUSED(f, self, referrer, test);
Fred Shih37f05ef2014-07-16 18:38:08 -07001582 LOG(INFO) << "Skipping set_boolean_static as I don't know how to do that on " << kRuntimeISA;
1583 // Force-print to std::cout so it's also outside the logcat.
1584 std::cout << "Skipping set_boolean_static as I don't know how to do that on " << kRuntimeISA << std::endl;
1585#endif
1586}
Mathieu Chartierc7853442015-03-27 14:35:38 -07001587static void GetSetByteStatic(ArtField* f, Thread* self, mirror::ArtMethod* referrer,
1588 StubTest* test)
Fred Shih37f05ef2014-07-16 18:38:08 -07001589 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001590#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1591 (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001592 int8_t values[] = { -128, -64, 0, 64, 127 };
Fred Shih37f05ef2014-07-16 18:38:08 -07001593
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001594 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001595 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001596 static_cast<size_t>(values[i]),
1597 0U,
1598 StubTest::GetEntrypoint(self, kQuickSet8Static),
1599 self,
1600 referrer);
1601
Mathieu Chartierc7853442015-03-27 14:35:38 -07001602 size_t res = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001603 0U, 0U,
1604 StubTest::GetEntrypoint(self, kQuickGetByteStatic),
1605 self,
1606 referrer);
1607 EXPECT_EQ(values[i], static_cast<int8_t>(res)) << "Iteration " << i;
1608 }
1609#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001610 UNUSED(f, self, referrer, test);
Fred Shih37f05ef2014-07-16 18:38:08 -07001611 LOG(INFO) << "Skipping set_byte_static as I don't know how to do that on " << kRuntimeISA;
1612 // Force-print to std::cout so it's also outside the logcat.
1613 std::cout << "Skipping set_byte_static as I don't know how to do that on " << kRuntimeISA << std::endl;
1614#endif
1615}
1616
1617
Mathieu Chartierc7853442015-03-27 14:35:38 -07001618static void GetSetBooleanInstance(Handle<mirror::Object>* obj, ArtField* f, Thread* self,
1619 mirror::ArtMethod* referrer, StubTest* test)
Fred Shih37f05ef2014-07-16 18:38:08 -07001620 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001621#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1622 (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001623 uint8_t values[] = { 0, true, 2, 128, 0xFF };
Fred Shih37f05ef2014-07-16 18:38:08 -07001624
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001625 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001626 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001627 reinterpret_cast<size_t>(obj->Get()),
1628 static_cast<size_t>(values[i]),
1629 StubTest::GetEntrypoint(self, kQuickSet8Instance),
1630 self,
1631 referrer);
1632
Mathieu Chartierc7853442015-03-27 14:35:38 -07001633 uint8_t res = f->GetBoolean(obj->Get());
Fred Shih37f05ef2014-07-16 18:38:08 -07001634 EXPECT_EQ(values[i], res) << "Iteration " << i;
1635
Mathieu Chartierc7853442015-03-27 14:35:38 -07001636 f->SetBoolean<false>(obj->Get(), res);
Fred Shih37f05ef2014-07-16 18:38:08 -07001637
Mathieu Chartierc7853442015-03-27 14:35:38 -07001638 size_t res2 = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001639 reinterpret_cast<size_t>(obj->Get()),
1640 0U,
1641 StubTest::GetEntrypoint(self, kQuickGetBooleanInstance),
1642 self,
1643 referrer);
1644 EXPECT_EQ(res, static_cast<uint8_t>(res2));
1645 }
1646#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001647 UNUSED(obj, f, self, referrer, test);
Fred Shih37f05ef2014-07-16 18:38:08 -07001648 LOG(INFO) << "Skipping set_boolean_instance as I don't know how to do that on " << kRuntimeISA;
1649 // Force-print to std::cout so it's also outside the logcat.
1650 std::cout << "Skipping set_boolean_instance as I don't know how to do that on " << kRuntimeISA << std::endl;
1651#endif
1652}
Mathieu Chartierc7853442015-03-27 14:35:38 -07001653static void GetSetByteInstance(Handle<mirror::Object>* obj, ArtField* f,
Fred Shih37f05ef2014-07-16 18:38:08 -07001654 Thread* self, mirror::ArtMethod* referrer, StubTest* test)
1655 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001656#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1657 (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001658 int8_t values[] = { -128, -64, 0, 64, 127 };
Fred Shih37f05ef2014-07-16 18:38:08 -07001659
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001660 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001661 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001662 reinterpret_cast<size_t>(obj->Get()),
1663 static_cast<size_t>(values[i]),
1664 StubTest::GetEntrypoint(self, kQuickSet8Instance),
1665 self,
1666 referrer);
1667
Mathieu Chartierc7853442015-03-27 14:35:38 -07001668 int8_t res = f->GetByte(obj->Get());
Fred Shih37f05ef2014-07-16 18:38:08 -07001669 EXPECT_EQ(res, values[i]) << "Iteration " << i;
Mathieu Chartierc7853442015-03-27 14:35:38 -07001670 f->SetByte<false>(obj->Get(), ++res);
Fred Shih37f05ef2014-07-16 18:38:08 -07001671
Mathieu Chartierc7853442015-03-27 14:35:38 -07001672 size_t res2 = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001673 reinterpret_cast<size_t>(obj->Get()),
1674 0U,
1675 StubTest::GetEntrypoint(self, kQuickGetByteInstance),
1676 self,
1677 referrer);
1678 EXPECT_EQ(res, static_cast<int8_t>(res2));
1679 }
1680#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001681 UNUSED(obj, f, self, referrer, test);
Fred Shih37f05ef2014-07-16 18:38:08 -07001682 LOG(INFO) << "Skipping set_byte_instance as I don't know how to do that on " << kRuntimeISA;
1683 // Force-print to std::cout so it's also outside the logcat.
1684 std::cout << "Skipping set_byte_instance as I don't know how to do that on " << kRuntimeISA << std::endl;
1685#endif
1686}
1687
Mathieu Chartierc7853442015-03-27 14:35:38 -07001688static void GetSetCharStatic(ArtField* f, Thread* self, mirror::ArtMethod* referrer,
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001689 StubTest* test)
Fred Shih37f05ef2014-07-16 18:38:08 -07001690 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001691#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1692 (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001693 uint16_t values[] = { 0, 1, 2, 255, 32768, 0xFFFF };
Fred Shih37f05ef2014-07-16 18:38:08 -07001694
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001695 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001696 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001697 static_cast<size_t>(values[i]),
1698 0U,
1699 StubTest::GetEntrypoint(self, kQuickSet16Static),
1700 self,
1701 referrer);
1702
Mathieu Chartierc7853442015-03-27 14:35:38 -07001703 size_t res = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001704 0U, 0U,
1705 StubTest::GetEntrypoint(self, kQuickGetCharStatic),
1706 self,
1707 referrer);
1708
1709 EXPECT_EQ(values[i], static_cast<uint16_t>(res)) << "Iteration " << i;
1710 }
1711#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001712 UNUSED(f, self, referrer, test);
Fred Shih37f05ef2014-07-16 18:38:08 -07001713 LOG(INFO) << "Skipping set_char_static as I don't know how to do that on " << kRuntimeISA;
1714 // Force-print to std::cout so it's also outside the logcat.
1715 std::cout << "Skipping set_char_static as I don't know how to do that on " << kRuntimeISA << std::endl;
1716#endif
1717}
Mathieu Chartierc7853442015-03-27 14:35:38 -07001718static void GetSetShortStatic(ArtField* f, Thread* self,
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001719 mirror::ArtMethod* referrer, StubTest* test)
Fred Shih37f05ef2014-07-16 18:38:08 -07001720 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001721#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1722 (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001723 int16_t values[] = { -0x7FFF, -32768, 0, 255, 32767, 0x7FFE };
Fred Shih37f05ef2014-07-16 18:38:08 -07001724
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001725 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001726 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001727 static_cast<size_t>(values[i]),
1728 0U,
1729 StubTest::GetEntrypoint(self, kQuickSet16Static),
1730 self,
1731 referrer);
1732
Mathieu Chartierc7853442015-03-27 14:35:38 -07001733 size_t res = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001734 0U, 0U,
1735 StubTest::GetEntrypoint(self, kQuickGetShortStatic),
1736 self,
1737 referrer);
1738
1739 EXPECT_EQ(static_cast<int16_t>(res), values[i]) << "Iteration " << i;
1740 }
1741#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001742 UNUSED(f, self, referrer, test);
Fred Shih37f05ef2014-07-16 18:38:08 -07001743 LOG(INFO) << "Skipping set_short_static as I don't know how to do that on " << kRuntimeISA;
1744 // Force-print to std::cout so it's also outside the logcat.
1745 std::cout << "Skipping set_short_static as I don't know how to do that on " << kRuntimeISA << std::endl;
1746#endif
1747}
1748
Mathieu Chartierc7853442015-03-27 14:35:38 -07001749static void GetSetCharInstance(Handle<mirror::Object>* obj, ArtField* f,
1750 Thread* self, mirror::ArtMethod* referrer, StubTest* test)
Fred Shih37f05ef2014-07-16 18:38:08 -07001751 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001752#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1753 (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001754 uint16_t values[] = { 0, 1, 2, 255, 32768, 0xFFFF };
Fred Shih37f05ef2014-07-16 18:38:08 -07001755
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001756 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001757 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001758 reinterpret_cast<size_t>(obj->Get()),
1759 static_cast<size_t>(values[i]),
1760 StubTest::GetEntrypoint(self, kQuickSet16Instance),
1761 self,
1762 referrer);
1763
Mathieu Chartierc7853442015-03-27 14:35:38 -07001764 uint16_t res = f->GetChar(obj->Get());
Fred Shih37f05ef2014-07-16 18:38:08 -07001765 EXPECT_EQ(res, values[i]) << "Iteration " << i;
Mathieu Chartierc7853442015-03-27 14:35:38 -07001766 f->SetChar<false>(obj->Get(), ++res);
Fred Shih37f05ef2014-07-16 18:38:08 -07001767
Mathieu Chartierc7853442015-03-27 14:35:38 -07001768 size_t res2 = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001769 reinterpret_cast<size_t>(obj->Get()),
1770 0U,
1771 StubTest::GetEntrypoint(self, kQuickGetCharInstance),
1772 self,
1773 referrer);
1774 EXPECT_EQ(res, static_cast<uint16_t>(res2));
1775 }
1776#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001777 UNUSED(obj, f, self, referrer, test);
Fred Shih37f05ef2014-07-16 18:38:08 -07001778 LOG(INFO) << "Skipping set_char_instance as I don't know how to do that on " << kRuntimeISA;
1779 // Force-print to std::cout so it's also outside the logcat.
1780 std::cout << "Skipping set_char_instance as I don't know how to do that on " << kRuntimeISA << std::endl;
1781#endif
1782}
Mathieu Chartierc7853442015-03-27 14:35:38 -07001783static void GetSetShortInstance(Handle<mirror::Object>* obj, ArtField* f,
Fred Shih37f05ef2014-07-16 18:38:08 -07001784 Thread* self, mirror::ArtMethod* referrer, StubTest* test)
1785 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001786#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1787 (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001788 int16_t values[] = { -0x7FFF, -32768, 0, 255, 32767, 0x7FFE };
Fred Shih37f05ef2014-07-16 18:38:08 -07001789
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001790 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001791 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001792 reinterpret_cast<size_t>(obj->Get()),
1793 static_cast<size_t>(values[i]),
1794 StubTest::GetEntrypoint(self, kQuickSet16Instance),
1795 self,
1796 referrer);
1797
Mathieu Chartierc7853442015-03-27 14:35:38 -07001798 int16_t res = f->GetShort(obj->Get());
Fred Shih37f05ef2014-07-16 18:38:08 -07001799 EXPECT_EQ(res, values[i]) << "Iteration " << i;
Mathieu Chartierc7853442015-03-27 14:35:38 -07001800 f->SetShort<false>(obj->Get(), ++res);
Fred Shih37f05ef2014-07-16 18:38:08 -07001801
Mathieu Chartierc7853442015-03-27 14:35:38 -07001802 size_t res2 = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001803 reinterpret_cast<size_t>(obj->Get()),
1804 0U,
1805 StubTest::GetEntrypoint(self, kQuickGetShortInstance),
1806 self,
1807 referrer);
1808 EXPECT_EQ(res, static_cast<int16_t>(res2));
1809 }
1810#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001811 UNUSED(obj, f, self, referrer, test);
Fred Shih37f05ef2014-07-16 18:38:08 -07001812 LOG(INFO) << "Skipping set_short_instance as I don't know how to do that on " << kRuntimeISA;
1813 // Force-print to std::cout so it's also outside the logcat.
1814 std::cout << "Skipping set_short_instance as I don't know how to do that on " << kRuntimeISA << std::endl;
1815#endif
1816}
1817
Mathieu Chartierc7853442015-03-27 14:35:38 -07001818static void GetSet32Static(ArtField* f, Thread* self, mirror::ArtMethod* referrer,
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001819 StubTest* test)
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001820 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001821#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1822 (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001823 uint32_t values[] = { 0, 1, 2, 255, 32768, 1000000, 0xFFFFFFFF };
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001824
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001825 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001826 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001827 static_cast<size_t>(values[i]),
1828 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001829 StubTest::GetEntrypoint(self, kQuickSet32Static),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001830 self,
1831 referrer);
1832
Mathieu Chartierc7853442015-03-27 14:35:38 -07001833 size_t res = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001834 0U, 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001835 StubTest::GetEntrypoint(self, kQuickGet32Static),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001836 self,
1837 referrer);
1838
Goran Jakovljevic04568812015-04-23 15:27:23 +02001839#if defined(__mips__) && defined(__LP64__)
1840 EXPECT_EQ(static_cast<uint32_t>(res), values[i]) << "Iteration " << i;
1841#else
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001842 EXPECT_EQ(res, values[i]) << "Iteration " << i;
Goran Jakovljevic04568812015-04-23 15:27:23 +02001843#endif
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001844 }
1845#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001846 UNUSED(f, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001847 LOG(INFO) << "Skipping set32static as I don't know how to do that on " << kRuntimeISA;
1848 // Force-print to std::cout so it's also outside the logcat.
1849 std::cout << "Skipping set32static as I don't know how to do that on " << kRuntimeISA << std::endl;
1850#endif
1851}
1852
1853
Mathieu Chartierc7853442015-03-27 14:35:38 -07001854static void GetSet32Instance(Handle<mirror::Object>* obj, ArtField* f,
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001855 Thread* self, mirror::ArtMethod* referrer, StubTest* test)
1856 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001857#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1858 (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001859 uint32_t values[] = { 0, 1, 2, 255, 32768, 1000000, 0xFFFFFFFF };
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001860
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001861 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001862 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001863 reinterpret_cast<size_t>(obj->Get()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001864 static_cast<size_t>(values[i]),
Andreas Gampe29b38412014-08-13 00:15:43 -07001865 StubTest::GetEntrypoint(self, kQuickSet32Instance),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001866 self,
1867 referrer);
1868
Mathieu Chartierc7853442015-03-27 14:35:38 -07001869 int32_t res = f->GetInt(obj->Get());
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001870 EXPECT_EQ(res, static_cast<int32_t>(values[i])) << "Iteration " << i;
1871
1872 res++;
Mathieu Chartierc7853442015-03-27 14:35:38 -07001873 f->SetInt<false>(obj->Get(), res);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001874
Mathieu Chartierc7853442015-03-27 14:35:38 -07001875 size_t res2 = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001876 reinterpret_cast<size_t>(obj->Get()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001877 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001878 StubTest::GetEntrypoint(self, kQuickGet32Instance),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001879 self,
1880 referrer);
1881 EXPECT_EQ(res, static_cast<int32_t>(res2));
1882 }
1883#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001884 UNUSED(obj, f, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001885 LOG(INFO) << "Skipping set32instance as I don't know how to do that on " << kRuntimeISA;
1886 // Force-print to std::cout so it's also outside the logcat.
1887 std::cout << "Skipping set32instance as I don't know how to do that on " << kRuntimeISA << std::endl;
1888#endif
1889}
1890
1891
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001892#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1893 (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001894
1895static void set_and_check_static(uint32_t f_idx, mirror::Object* val, Thread* self,
1896 mirror::ArtMethod* referrer, StubTest* test)
1897 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1898 test->Invoke3WithReferrer(static_cast<size_t>(f_idx),
1899 reinterpret_cast<size_t>(val),
1900 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001901 StubTest::GetEntrypoint(self, kQuickSetObjStatic),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001902 self,
1903 referrer);
1904
1905 size_t res = test->Invoke3WithReferrer(static_cast<size_t>(f_idx),
1906 0U, 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001907 StubTest::GetEntrypoint(self, kQuickGetObjStatic),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001908 self,
1909 referrer);
1910
1911 EXPECT_EQ(res, reinterpret_cast<size_t>(val)) << "Value " << val;
1912}
1913#endif
1914
Mathieu Chartierc7853442015-03-27 14:35:38 -07001915static void GetSetObjStatic(ArtField* f, Thread* self, mirror::ArtMethod* referrer,
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001916 StubTest* test)
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001917 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001918#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1919 (defined(__x86_64__) && !defined(__APPLE__))
Mathieu Chartierc7853442015-03-27 14:35:38 -07001920 set_and_check_static(f->GetDexFieldIndex(), nullptr, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001921
1922 // Allocate a string object for simplicity.
1923 mirror::String* str = mirror::String::AllocFromModifiedUtf8(self, "Test");
Mathieu Chartierc7853442015-03-27 14:35:38 -07001924 set_and_check_static(f->GetDexFieldIndex(), str, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001925
Mathieu Chartierc7853442015-03-27 14:35:38 -07001926 set_and_check_static(f->GetDexFieldIndex(), nullptr, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001927#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001928 UNUSED(f, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001929 LOG(INFO) << "Skipping setObjstatic as I don't know how to do that on " << kRuntimeISA;
1930 // Force-print to std::cout so it's also outside the logcat.
1931 std::cout << "Skipping setObjstatic as I don't know how to do that on " << kRuntimeISA << std::endl;
1932#endif
1933}
1934
1935
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001936#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1937 (defined(__x86_64__) && !defined(__APPLE__))
Mathieu Chartierc7853442015-03-27 14:35:38 -07001938static void set_and_check_instance(ArtField* f, mirror::Object* trg,
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001939 mirror::Object* val, Thread* self, mirror::ArtMethod* referrer,
1940 StubTest* test)
1941 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001942 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001943 reinterpret_cast<size_t>(trg),
1944 reinterpret_cast<size_t>(val),
Andreas Gampe29b38412014-08-13 00:15:43 -07001945 StubTest::GetEntrypoint(self, kQuickSetObjInstance),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001946 self,
1947 referrer);
1948
Mathieu Chartierc7853442015-03-27 14:35:38 -07001949 size_t res = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001950 reinterpret_cast<size_t>(trg),
1951 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001952 StubTest::GetEntrypoint(self, kQuickGetObjInstance),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001953 self,
1954 referrer);
1955
1956 EXPECT_EQ(res, reinterpret_cast<size_t>(val)) << "Value " << val;
1957
Mathieu Chartierc7853442015-03-27 14:35:38 -07001958 EXPECT_EQ(val, f->GetObj(trg));
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001959}
1960#endif
1961
Mathieu Chartierc7853442015-03-27 14:35:38 -07001962static void GetSetObjInstance(Handle<mirror::Object>* obj, ArtField* f,
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001963 Thread* self, mirror::ArtMethod* referrer, StubTest* test)
1964 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001965#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1966 (defined(__x86_64__) && !defined(__APPLE__))
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001967 set_and_check_instance(f, obj->Get(), nullptr, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001968
1969 // Allocate a string object for simplicity.
1970 mirror::String* str = mirror::String::AllocFromModifiedUtf8(self, "Test");
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001971 set_and_check_instance(f, obj->Get(), str, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001972
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001973 set_and_check_instance(f, obj->Get(), nullptr, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001974#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001975 UNUSED(obj, f, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001976 LOG(INFO) << "Skipping setObjinstance as I don't know how to do that on " << kRuntimeISA;
1977 // Force-print to std::cout so it's also outside the logcat.
1978 std::cout << "Skipping setObjinstance as I don't know how to do that on " << kRuntimeISA << std::endl;
1979#endif
1980}
1981
1982
1983// TODO: Complete these tests for 32b architectures.
1984
Mathieu Chartierc7853442015-03-27 14:35:38 -07001985static void GetSet64Static(ArtField* f, Thread* self, mirror::ArtMethod* referrer,
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001986 StubTest* test)
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001987 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001988#if (defined(__x86_64__) && !defined(__APPLE__)) || (defined(__mips__) && defined(__LP64__)) || \
1989 defined(__aarch64__)
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001990 uint64_t values[] = { 0, 1, 2, 255, 32768, 1000000, 0xFFFFFFFF, 0xFFFFFFFFFFFF };
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001991
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001992 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001993 test->Invoke3UWithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001994 values[i],
Andreas Gampe29b38412014-08-13 00:15:43 -07001995 StubTest::GetEntrypoint(self, kQuickSet64Static),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001996 self,
1997 referrer);
1998
Mathieu Chartierc7853442015-03-27 14:35:38 -07001999 size_t res = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07002000 0U, 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07002001 StubTest::GetEntrypoint(self, kQuickGet64Static),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07002002 self,
2003 referrer);
2004
2005 EXPECT_EQ(res, values[i]) << "Iteration " << i;
2006 }
2007#else
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07002008 UNUSED(f, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07002009 LOG(INFO) << "Skipping set64static as I don't know how to do that on " << kRuntimeISA;
2010 // Force-print to std::cout so it's also outside the logcat.
2011 std::cout << "Skipping set64static as I don't know how to do that on " << kRuntimeISA << std::endl;
2012#endif
2013}
2014
2015
Mathieu Chartierc7853442015-03-27 14:35:38 -07002016static void GetSet64Instance(Handle<mirror::Object>* obj, ArtField* f,
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07002017 Thread* self, mirror::ArtMethod* referrer, StubTest* test)
2018 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02002019#if (defined(__x86_64__) && !defined(__APPLE__)) || (defined(__mips__) && defined(__LP64__)) || \
2020 defined(__aarch64__)
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07002021 uint64_t values[] = { 0, 1, 2, 255, 32768, 1000000, 0xFFFFFFFF, 0xFFFFFFFFFFFF };
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07002022
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07002023 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07002024 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07002025 reinterpret_cast<size_t>(obj->Get()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07002026 static_cast<size_t>(values[i]),
Andreas Gampe29b38412014-08-13 00:15:43 -07002027 StubTest::GetEntrypoint(self, kQuickSet64Instance),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07002028 self,
2029 referrer);
2030
Mathieu Chartierc7853442015-03-27 14:35:38 -07002031 int64_t res = f->GetLong(obj->Get());
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07002032 EXPECT_EQ(res, static_cast<int64_t>(values[i])) << "Iteration " << i;
2033
2034 res++;
Mathieu Chartierc7853442015-03-27 14:35:38 -07002035 f->SetLong<false>(obj->Get(), res);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07002036
Mathieu Chartierc7853442015-03-27 14:35:38 -07002037 size_t res2 = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07002038 reinterpret_cast<size_t>(obj->Get()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07002039 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07002040 StubTest::GetEntrypoint(self, kQuickGet64Instance),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07002041 self,
2042 referrer);
2043 EXPECT_EQ(res, static_cast<int64_t>(res2));
2044 }
2045#else
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07002046 UNUSED(obj, f, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07002047 LOG(INFO) << "Skipping set64instance as I don't know how to do that on " << kRuntimeISA;
2048 // Force-print to std::cout so it's also outside the logcat.
2049 std::cout << "Skipping set64instance as I don't know how to do that on " << kRuntimeISA << std::endl;
2050#endif
2051}
2052
2053static void TestFields(Thread* self, StubTest* test, Primitive::Type test_type) {
2054 // garbage is created during ClassLinker::Init
2055
2056 JNIEnv* env = Thread::Current()->GetJniEnv();
2057 jclass jc = env->FindClass("AllFields");
Mathieu Chartier2cebb242015-04-21 16:50:40 -07002058 CHECK(jc != nullptr);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07002059 jobject o = env->AllocObject(jc);
Mathieu Chartier2cebb242015-04-21 16:50:40 -07002060 CHECK(o != nullptr);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07002061
2062 ScopedObjectAccess soa(self);
Mathieu Chartierc7853442015-03-27 14:35:38 -07002063 StackHandleScope<4> hs(self);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07002064 Handle<mirror::Object> obj(hs.NewHandle(soa.Decode<mirror::Object*>(o)));
2065 Handle<mirror::Class> c(hs.NewHandle(obj->GetClass()));
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07002066 // Need a method as a referrer
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07002067 Handle<mirror::ArtMethod> m(hs.NewHandle(c->GetDirectMethod(0)));
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07002068
2069 // Play with it...
2070
2071 // Static fields.
Mathieu Chartierc7853442015-03-27 14:35:38 -07002072 ArtField* fields = c->GetSFields();
2073 size_t num_fields = c->NumStaticFields();
2074 for (size_t i = 0; i < num_fields; ++i) {
2075 ArtField* f = &fields[i];
2076 Primitive::Type type = f->GetTypeAsPrimitiveType();
2077 if (test_type != type) {
2078 continue;
2079 }
2080 switch (type) {
2081 case Primitive::Type::kPrimBoolean:
2082 GetSetBooleanStatic(f, self, m.Get(), test);
2083 break;
2084 case Primitive::Type::kPrimByte:
2085 GetSetByteStatic(f, self, m.Get(), test);
2086 break;
2087 case Primitive::Type::kPrimChar:
2088 GetSetCharStatic(f, self, m.Get(), test);
2089 break;
2090 case Primitive::Type::kPrimShort:
2091 GetSetShortStatic(f, self, m.Get(), test);
2092 break;
2093 case Primitive::Type::kPrimInt:
2094 GetSet32Static(f, self, m.Get(), test);
2095 break;
2096 case Primitive::Type::kPrimLong:
2097 GetSet64Static(f, self, m.Get(), test);
2098 break;
2099 case Primitive::Type::kPrimNot:
2100 // Don't try array.
2101 if (f->GetTypeDescriptor()[0] != '[') {
2102 GetSetObjStatic(f, self, m.Get(), test);
2103 }
2104 break;
2105 default:
2106 break; // Skip.
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07002107 }
2108 }
2109
2110 // Instance fields.
Mathieu Chartierc7853442015-03-27 14:35:38 -07002111 fields = c->GetIFields();
2112 num_fields = c->NumInstanceFields();
2113 for (size_t i = 0; i < num_fields; ++i) {
2114 ArtField* f = &fields[i];
2115 Primitive::Type type = f->GetTypeAsPrimitiveType();
2116 if (test_type != type) {
2117 continue;
2118 }
2119 switch (type) {
2120 case Primitive::Type::kPrimBoolean:
2121 GetSetBooleanInstance(&obj, f, self, m.Get(), test);
2122 break;
2123 case Primitive::Type::kPrimByte:
2124 GetSetByteInstance(&obj, f, self, m.Get(), test);
2125 break;
2126 case Primitive::Type::kPrimChar:
2127 GetSetCharInstance(&obj, f, self, m.Get(), test);
2128 break;
2129 case Primitive::Type::kPrimShort:
2130 GetSetShortInstance(&obj, f, self, m.Get(), test);
2131 break;
2132 case Primitive::Type::kPrimInt:
2133 GetSet32Instance(&obj, f, self, m.Get(), test);
2134 break;
2135 case Primitive::Type::kPrimLong:
2136 GetSet64Instance(&obj, f, self, m.Get(), test);
2137 break;
2138 case Primitive::Type::kPrimNot:
2139 // Don't try array.
2140 if (f->GetTypeDescriptor()[0] != '[') {
2141 GetSetObjInstance(&obj, f, self, m.Get(), test);
2142 }
2143 break;
2144 default:
2145 break; // Skip.
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07002146 }
2147 }
2148
2149 // TODO: Deallocate things.
2150}
2151
Fred Shih37f05ef2014-07-16 18:38:08 -07002152TEST_F(StubTest, Fields8) {
2153 TEST_DISABLED_FOR_HEAP_REFERENCE_POISONING();
2154
2155 Thread* self = Thread::Current();
2156
2157 self->TransitionFromSuspendedToRunnable();
2158 LoadDex("AllFields");
2159 bool started = runtime_->Start();
2160 CHECK(started);
2161
2162 TestFields(self, this, Primitive::Type::kPrimBoolean);
2163 TestFields(self, this, Primitive::Type::kPrimByte);
2164}
2165
2166TEST_F(StubTest, Fields16) {
2167 TEST_DISABLED_FOR_HEAP_REFERENCE_POISONING();
2168
2169 Thread* self = Thread::Current();
2170
2171 self->TransitionFromSuspendedToRunnable();
2172 LoadDex("AllFields");
2173 bool started = runtime_->Start();
2174 CHECK(started);
2175
2176 TestFields(self, this, Primitive::Type::kPrimChar);
2177 TestFields(self, this, Primitive::Type::kPrimShort);
2178}
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07002179
2180TEST_F(StubTest, Fields32) {
2181 TEST_DISABLED_FOR_HEAP_REFERENCE_POISONING();
2182
2183 Thread* self = Thread::Current();
2184
2185 self->TransitionFromSuspendedToRunnable();
2186 LoadDex("AllFields");
2187 bool started = runtime_->Start();
2188 CHECK(started);
2189
2190 TestFields(self, this, Primitive::Type::kPrimInt);
2191}
2192
2193TEST_F(StubTest, FieldsObj) {
2194 TEST_DISABLED_FOR_HEAP_REFERENCE_POISONING();
2195
2196 Thread* self = Thread::Current();
2197
2198 self->TransitionFromSuspendedToRunnable();
2199 LoadDex("AllFields");
2200 bool started = runtime_->Start();
2201 CHECK(started);
2202
2203 TestFields(self, this, Primitive::Type::kPrimNot);
2204}
2205
2206TEST_F(StubTest, Fields64) {
2207 TEST_DISABLED_FOR_HEAP_REFERENCE_POISONING();
2208
2209 Thread* self = Thread::Current();
2210
2211 self->TransitionFromSuspendedToRunnable();
2212 LoadDex("AllFields");
2213 bool started = runtime_->Start();
2214 CHECK(started);
2215
2216 TestFields(self, this, Primitive::Type::kPrimLong);
2217}
2218
Andreas Gampe51f76352014-05-21 08:28:48 -07002219TEST_F(StubTest, IMT) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02002220#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
2221 (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe51f76352014-05-21 08:28:48 -07002222 TEST_DISABLED_FOR_HEAP_REFERENCE_POISONING();
2223
2224 Thread* self = Thread::Current();
2225
2226 ScopedObjectAccess soa(self);
2227 StackHandleScope<7> hs(self);
2228
2229 JNIEnv* env = Thread::Current()->GetJniEnv();
2230
2231 // ArrayList
2232
2233 // Load ArrayList and used methods (JNI).
2234 jclass arraylist_jclass = env->FindClass("java/util/ArrayList");
2235 ASSERT_NE(nullptr, arraylist_jclass);
2236 jmethodID arraylist_constructor = env->GetMethodID(arraylist_jclass, "<init>", "()V");
2237 ASSERT_NE(nullptr, arraylist_constructor);
2238 jmethodID contains_jmethod = env->GetMethodID(arraylist_jclass, "contains", "(Ljava/lang/Object;)Z");
2239 ASSERT_NE(nullptr, contains_jmethod);
2240 jmethodID add_jmethod = env->GetMethodID(arraylist_jclass, "add", "(Ljava/lang/Object;)Z");
2241 ASSERT_NE(nullptr, add_jmethod);
2242
2243 // Get mirror representation.
2244 Handle<mirror::ArtMethod> contains_amethod(hs.NewHandle(soa.DecodeMethod(contains_jmethod)));
2245
2246 // Patch up ArrayList.contains.
2247 if (contains_amethod.Get()->GetEntryPointFromQuickCompiledCode() == nullptr) {
2248 contains_amethod.Get()->SetEntryPointFromQuickCompiledCode(reinterpret_cast<void*>(
Andreas Gampe29b38412014-08-13 00:15:43 -07002249 StubTest::GetEntrypoint(self, kQuickQuickToInterpreterBridge)));
Andreas Gampe51f76352014-05-21 08:28:48 -07002250 }
2251
2252 // List
2253
2254 // Load List and used methods (JNI).
2255 jclass list_jclass = env->FindClass("java/util/List");
2256 ASSERT_NE(nullptr, list_jclass);
2257 jmethodID inf_contains_jmethod = env->GetMethodID(list_jclass, "contains", "(Ljava/lang/Object;)Z");
2258 ASSERT_NE(nullptr, inf_contains_jmethod);
2259
2260 // Get mirror representation.
2261 Handle<mirror::ArtMethod> inf_contains(hs.NewHandle(soa.DecodeMethod(inf_contains_jmethod)));
2262
2263 // Object
2264
2265 jclass obj_jclass = env->FindClass("java/lang/Object");
2266 ASSERT_NE(nullptr, obj_jclass);
2267 jmethodID obj_constructor = env->GetMethodID(obj_jclass, "<init>", "()V");
2268 ASSERT_NE(nullptr, obj_constructor);
2269
Andreas Gampe51f76352014-05-21 08:28:48 -07002270 // Create instances.
2271
2272 jobject jarray_list = env->NewObject(arraylist_jclass, arraylist_constructor);
2273 ASSERT_NE(nullptr, jarray_list);
2274 Handle<mirror::Object> array_list(hs.NewHandle(soa.Decode<mirror::Object*>(jarray_list)));
2275
2276 jobject jobj = env->NewObject(obj_jclass, obj_constructor);
2277 ASSERT_NE(nullptr, jobj);
2278 Handle<mirror::Object> obj(hs.NewHandle(soa.Decode<mirror::Object*>(jobj)));
2279
Andreas Gampe1a7e2922014-05-21 15:37:53 -07002280 // Invocation tests.
2281
2282 // 1. imt_conflict
2283
2284 // Contains.
Andreas Gampe51f76352014-05-21 08:28:48 -07002285
2286 size_t result =
2287 Invoke3WithReferrerAndHidden(0U, reinterpret_cast<size_t>(array_list.Get()),
2288 reinterpret_cast<size_t>(obj.Get()),
Andreas Gampe29b38412014-08-13 00:15:43 -07002289 StubTest::GetEntrypoint(self, kQuickQuickImtConflictTrampoline),
Andreas Gampe51f76352014-05-21 08:28:48 -07002290 self, contains_amethod.Get(),
2291 static_cast<size_t>(inf_contains.Get()->GetDexMethodIndex()));
2292
2293 ASSERT_FALSE(self->IsExceptionPending());
2294 EXPECT_EQ(static_cast<size_t>(JNI_FALSE), result);
2295
2296 // Add object.
2297
2298 env->CallBooleanMethod(jarray_list, add_jmethod, jobj);
2299
Nicolas Geoffray14691c52015-03-05 10:40:17 +00002300 ASSERT_FALSE(self->IsExceptionPending()) << PrettyTypeOf(self->GetException());
Andreas Gampe51f76352014-05-21 08:28:48 -07002301
Andreas Gampe1a7e2922014-05-21 15:37:53 -07002302 // Contains.
Andreas Gampe51f76352014-05-21 08:28:48 -07002303
2304 result = Invoke3WithReferrerAndHidden(0U, reinterpret_cast<size_t>(array_list.Get()),
2305 reinterpret_cast<size_t>(obj.Get()),
Andreas Gampe29b38412014-08-13 00:15:43 -07002306 StubTest::GetEntrypoint(self, kQuickQuickImtConflictTrampoline),
Andreas Gampe51f76352014-05-21 08:28:48 -07002307 self, contains_amethod.Get(),
2308 static_cast<size_t>(inf_contains.Get()->GetDexMethodIndex()));
2309
2310 ASSERT_FALSE(self->IsExceptionPending());
2311 EXPECT_EQ(static_cast<size_t>(JNI_TRUE), result);
Andreas Gampe1a7e2922014-05-21 15:37:53 -07002312
2313 // 2. regular interface trampoline
2314
2315 result = Invoke3WithReferrer(static_cast<size_t>(inf_contains.Get()->GetDexMethodIndex()),
2316 reinterpret_cast<size_t>(array_list.Get()),
2317 reinterpret_cast<size_t>(obj.Get()),
2318 StubTest::GetEntrypoint(self,
2319 kQuickInvokeInterfaceTrampolineWithAccessCheck),
2320 self, contains_amethod.Get());
2321
2322 ASSERT_FALSE(self->IsExceptionPending());
2323 EXPECT_EQ(static_cast<size_t>(JNI_TRUE), result);
2324
2325 result = Invoke3WithReferrer(static_cast<size_t>(inf_contains.Get()->GetDexMethodIndex()),
2326 reinterpret_cast<size_t>(array_list.Get()),
2327 reinterpret_cast<size_t>(array_list.Get()),
2328 StubTest::GetEntrypoint(self,
2329 kQuickInvokeInterfaceTrampolineWithAccessCheck),
2330 self, contains_amethod.Get());
2331
2332 ASSERT_FALSE(self->IsExceptionPending());
2333 EXPECT_EQ(static_cast<size_t>(JNI_FALSE), result);
Andreas Gampe51f76352014-05-21 08:28:48 -07002334#else
Andreas Gampe6aac3552014-06-09 14:55:53 -07002335 LOG(INFO) << "Skipping imt as I don't know how to do that on " << kRuntimeISA;
Andreas Gampe51f76352014-05-21 08:28:48 -07002336 // Force-print to std::cout so it's also outside the logcat.
Andreas Gampe6aac3552014-06-09 14:55:53 -07002337 std::cout << "Skipping imt as I don't know how to do that on " << kRuntimeISA << std::endl;
2338#endif
2339}
2340
Andreas Gampe6aac3552014-06-09 14:55:53 -07002341TEST_F(StubTest, StringIndexOf) {
2342#if defined(__arm__) || defined(__aarch64__)
Hiroshi Yamauchi52fa8142014-06-16 12:59:49 -07002343 TEST_DISABLED_FOR_HEAP_REFERENCE_POISONING();
2344
Andreas Gampe6aac3552014-06-09 14:55:53 -07002345 Thread* self = Thread::Current();
2346 ScopedObjectAccess soa(self);
2347 // garbage is created during ClassLinker::Init
2348
2349 // Create some strings
2350 // Use array so we can index into it and use a matrix for expected results
2351 // Setup: The first half is standard. The second half uses a non-zero offset.
2352 // TODO: Shared backing arrays.
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07002353 const char* c_str[] = { "", "a", "ba", "cba", "dcba", "edcba", "asdfghjkl" };
2354 static constexpr size_t kStringCount = arraysize(c_str);
2355 const char c_char[] = { 'a', 'b', 'c', 'd', 'e' };
2356 static constexpr size_t kCharCount = arraysize(c_char);
Andreas Gampe6aac3552014-06-09 14:55:53 -07002357
2358 StackHandleScope<kStringCount> hs(self);
2359 Handle<mirror::String> s[kStringCount];
2360
2361 for (size_t i = 0; i < kStringCount; ++i) {
2362 s[i] = hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), c_str[i]));
2363 }
2364
2365 // Matrix of expectations. First component is first parameter. Note we only check against the
2366 // sign, not the value. As we are testing random offsets, we need to compute this and need to
2367 // rely on String::CompareTo being correct.
2368 static constexpr size_t kMaxLen = 9;
2369 DCHECK_LE(strlen(c_str[kStringCount-1]), kMaxLen) << "Please fix the indexof test.";
2370
2371 // Last dimension: start, offset by 1.
2372 int32_t expected[kStringCount][kCharCount][kMaxLen + 3];
2373 for (size_t x = 0; x < kStringCount; ++x) {
2374 for (size_t y = 0; y < kCharCount; ++y) {
2375 for (size_t z = 0; z <= kMaxLen + 2; ++z) {
2376 expected[x][y][z] = s[x]->FastIndexOf(c_char[y], static_cast<int32_t>(z) - 1);
2377 }
2378 }
2379 }
2380
2381 // Play with it...
2382
2383 for (size_t x = 0; x < kStringCount; ++x) {
2384 for (size_t y = 0; y < kCharCount; ++y) {
2385 for (size_t z = 0; z <= kMaxLen + 2; ++z) {
2386 int32_t start = static_cast<int32_t>(z) - 1;
2387
2388 // Test string_compareto x y
2389 size_t result = Invoke3(reinterpret_cast<size_t>(s[x].Get()), c_char[y], start,
Andreas Gampe29b38412014-08-13 00:15:43 -07002390 StubTest::GetEntrypoint(self, kQuickIndexOf), self);
Andreas Gampe6aac3552014-06-09 14:55:53 -07002391
2392 EXPECT_FALSE(self->IsExceptionPending());
2393
2394 // The result is a 32b signed integer
2395 union {
2396 size_t r;
2397 int32_t i;
2398 } conv;
2399 conv.r = result;
2400
2401 EXPECT_EQ(expected[x][y][z], conv.i) << "Wrong result for " << c_str[x] << " / " <<
2402 c_char[y] << " @ " << start;
2403 }
2404 }
2405 }
2406
2407 // TODO: Deallocate things.
2408
2409 // Tests done.
2410#else
2411 LOG(INFO) << "Skipping indexof as I don't know how to do that on " << kRuntimeISA;
2412 // Force-print to std::cout so it's also outside the logcat.
2413 std::cout << "Skipping indexof as I don't know how to do that on " << kRuntimeISA << std::endl;
Andreas Gampe51f76352014-05-21 08:28:48 -07002414#endif
2415}
2416
Andreas Gampe525cde22014-04-22 15:44:50 -07002417} // namespace art