blob: 44edd4b076801e8a697ca8b53b98f441eb10514c [file] [log] [blame]
Andreas Gampe525cde22014-04-22 15:44:50 -07001/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "common_runtime_test.h"
Andreas Gampe6e4e59c2014-05-05 20:11:02 -070018#include "mirror/art_field-inl.h"
Andreas Gampe51f76352014-05-21 08:28:48 -070019#include "mirror/art_method-inl.h"
20#include "mirror/class-inl.h"
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -070021#include "mirror/string-inl.h"
Andreas Gampe525cde22014-04-22 15:44:50 -070022
23#include <cstdio>
24
25namespace art {
26
27
28class StubTest : public CommonRuntimeTest {
29 protected:
30 // We need callee-save methods set up in the Runtime for exceptions.
31 void SetUp() OVERRIDE {
32 // Do the normal setup.
33 CommonRuntimeTest::SetUp();
34
35 {
36 // Create callee-save methods
37 ScopedObjectAccess soa(Thread::Current());
Vladimir Marko7624d252014-05-02 14:40:15 +010038 runtime_->SetInstructionSet(kRuntimeISA);
Andreas Gampe525cde22014-04-22 15:44:50 -070039 for (int i = 0; i < Runtime::kLastCalleeSaveType; i++) {
40 Runtime::CalleeSaveType type = Runtime::CalleeSaveType(i);
41 if (!runtime_->HasCalleeSaveMethod(type)) {
Vladimir Marko7624d252014-05-02 14:40:15 +010042 runtime_->SetCalleeSaveMethod(runtime_->CreateCalleeSaveMethod(type), type);
Andreas Gampe525cde22014-04-22 15:44:50 -070043 }
44 }
45 }
46 }
47
Andreas Gampe00c1e6d2014-04-25 15:47:13 -070048 void SetUpRuntimeOptions(Runtime::Options *options) OVERRIDE {
49 // Use a smaller heap
50 for (std::pair<std::string, const void*>& pair : *options) {
51 if (pair.first.find("-Xmx") == 0) {
52 pair.first = "-Xmx4M"; // Smallest we can go.
53 }
54 }
Andreas Gampe51f76352014-05-21 08:28:48 -070055 options->push_back(std::make_pair("-Xint", nullptr));
Andreas Gampe00c1e6d2014-04-25 15:47:13 -070056 }
Andreas Gampe525cde22014-04-22 15:44:50 -070057
Mathieu Chartier119c6bd2014-05-09 14:11:47 -070058 // Helper function needed since TEST_F makes a new class.
59 Thread::tls_ptr_sized_values* GetTlsPtr(Thread* self) {
60 return &self->tlsPtr_;
61 }
62
Andreas Gampe4fc046e2014-05-06 16:56:39 -070063 public:
Andreas Gampe525cde22014-04-22 15:44:50 -070064 size_t Invoke3(size_t arg0, size_t arg1, size_t arg2, uintptr_t code, Thread* self) {
Andreas Gampe6cf80102014-05-19 11:32:41 -070065 return Invoke3WithReferrer(arg0, arg1, arg2, code, self, nullptr);
Andreas Gampe525cde22014-04-22 15:44:50 -070066 }
Andreas Gampe6e4e59c2014-05-05 20:11:02 -070067
Andreas Gampe6e4e59c2014-05-05 20:11:02 -070068 // TODO: Set up a frame according to referrer's specs.
69 size_t Invoke3WithReferrer(size_t arg0, size_t arg1, size_t arg2, uintptr_t code, Thread* self,
70 mirror::ArtMethod* referrer) {
71 // Push a transition back into managed code onto the linked list in thread.
72 ManagedStack fragment;
73 self->PushManagedStackFragment(&fragment);
74
75 size_t result;
Andreas Gampe6cf80102014-05-19 11:32:41 -070076 size_t fpr_result = 0;
Andreas Gampe6e4e59c2014-05-05 20:11:02 -070077#if defined(__i386__)
78 // TODO: Set the thread?
79 __asm__ __volatile__(
80 "pushl %[referrer]\n\t" // Store referrer
81 "call *%%edi\n\t" // Call the stub
82 "addl $4, %%esp" // Pop referrer
83 : "=a" (result)
84 // Use the result from eax
85 : "a"(arg0), "c"(arg1), "d"(arg2), "D"(code), [referrer]"r"(referrer)
86 // This places code into edi, arg0 into eax, arg1 into ecx, and arg2 into edx
87 : ); // clobber.
88 // TODO: Should we clobber the other registers? EBX gets clobbered by some of the stubs,
89 // but compilation fails when declaring that.
90#elif defined(__arm__)
91 __asm__ __volatile__(
92 "push {r1-r12, lr}\n\t" // Save state, 13*4B = 52B
93 ".cfi_adjust_cfa_offset 52\n\t"
94 "push {r9}\n\t"
95 ".cfi_adjust_cfa_offset 4\n\t"
96 "mov r9, %[referrer]\n\n"
97 "str r9, [sp, #-8]!\n\t" // Push referrer, +8B padding so 16B aligned
98 ".cfi_adjust_cfa_offset 8\n\t"
99 "ldr r9, [sp, #8]\n\t"
100
101 // Push everything on the stack, so we don't rely on the order. What a mess. :-(
102 "sub sp, sp, #20\n\t"
103 "str %[arg0], [sp]\n\t"
104 "str %[arg1], [sp, #4]\n\t"
105 "str %[arg2], [sp, #8]\n\t"
106 "str %[code], [sp, #12]\n\t"
107 "str %[self], [sp, #16]\n\t"
108 "ldr r0, [sp]\n\t"
109 "ldr r1, [sp, #4]\n\t"
110 "ldr r2, [sp, #8]\n\t"
111 "ldr r3, [sp, #12]\n\t"
112 "ldr r9, [sp, #16]\n\t"
113 "add sp, sp, #20\n\t"
114
115 "blx r3\n\t" // Call the stub
116 "add sp, sp, #12\n\t" // Pop nullptr and padding
117 ".cfi_adjust_cfa_offset -12\n\t"
118 "pop {r1-r12, lr}\n\t" // Restore state
119 ".cfi_adjust_cfa_offset -52\n\t"
120 "mov %[result], r0\n\t" // Save the result
121 : [result] "=r" (result)
122 // Use the result from r0
123 : [arg0] "r"(arg0), [arg1] "r"(arg1), [arg2] "r"(arg2), [code] "r"(code), [self] "r"(self),
124 [referrer] "r"(referrer)
125 : ); // clobber.
126#elif defined(__aarch64__)
127 __asm__ __volatile__(
Andreas Gampe6cf80102014-05-19 11:32:41 -0700128 // Spill space for d8 - d15
129 "sub sp, sp, #64\n\t"
130 ".cfi_adjust_cfa_offset 64\n\t"
131 "stp d8, d9, [sp]\n\t"
132 "stp d10, d11, [sp, #16]\n\t"
133 "stp d12, d13, [sp, #32]\n\t"
134 "stp d14, d15, [sp, #48]\n\t"
135
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700136 "sub sp, sp, #48\n\t" // Reserve stack space, 16B aligned
137 ".cfi_adjust_cfa_offset 48\n\t"
138 "stp %[referrer], x1, [sp]\n\t"// referrer, x1
139 "stp x2, x3, [sp, #16]\n\t" // Save x2, x3
140 "stp x18, x30, [sp, #32]\n\t" // Save x18(xSELF), xLR
141
142 // Push everything on the stack, so we don't rely on the order. What a mess. :-(
143 "sub sp, sp, #48\n\t"
Andreas Gampe6cf80102014-05-19 11:32:41 -0700144 ".cfi_adjust_cfa_offset 48\n\t"
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700145 "str %[arg0], [sp]\n\t"
146 "str %[arg1], [sp, #8]\n\t"
147 "str %[arg2], [sp, #16]\n\t"
148 "str %[code], [sp, #24]\n\t"
149 "str %[self], [sp, #32]\n\t"
Andreas Gampe6cf80102014-05-19 11:32:41 -0700150
151 // Now we definitely have x0-x3 free, use it to garble d8 - d15
152 "movk x0, #0xfad0\n\t"
153 "movk x0, #0xebad, lsl #16\n\t"
154 "movk x0, #0xfad0, lsl #32\n\t"
155 "movk x0, #0xebad, lsl #48\n\t"
156 "fmov d8, x0\n\t"
157 "add x0, x0, 1\n\t"
158 "fmov d9, x0\n\t"
159 "add x0, x0, 1\n\t"
160 "fmov d10, x0\n\t"
161 "add x0, x0, 1\n\t"
162 "fmov d11, x0\n\t"
163 "add x0, x0, 1\n\t"
164 "fmov d12, x0\n\t"
165 "add x0, x0, 1\n\t"
166 "fmov d13, x0\n\t"
167 "add x0, x0, 1\n\t"
168 "fmov d14, x0\n\t"
169 "add x0, x0, 1\n\t"
170 "fmov d15, x0\n\t"
171
172 // Load call params
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700173 "ldr x0, [sp]\n\t"
174 "ldr x1, [sp, #8]\n\t"
175 "ldr x2, [sp, #16]\n\t"
176 "ldr x3, [sp, #24]\n\t"
177 "ldr x18, [sp, #32]\n\t"
178 "add sp, sp, #48\n\t"
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700179 ".cfi_adjust_cfa_offset -48\n\t"
180
Andreas Gampe6cf80102014-05-19 11:32:41 -0700181
182 "blr x3\n\t" // Call the stub
183
184 // Test d8 - d15. We can use x1 and x2.
185 "movk x1, #0xfad0\n\t"
186 "movk x1, #0xebad, lsl #16\n\t"
187 "movk x1, #0xfad0, lsl #32\n\t"
188 "movk x1, #0xebad, lsl #48\n\t"
189 "fmov x2, d8\n\t"
190 "cmp x1, x2\n\t"
191 "b.ne 1f\n\t"
192 "add x1, x1, 1\n\t"
193
194 "fmov x2, d9\n\t"
195 "cmp x1, x2\n\t"
196 "b.ne 1f\n\t"
197 "add x1, x1, 1\n\t"
198
199 "fmov x2, d10\n\t"
200 "cmp x1, x2\n\t"
201 "b.ne 1f\n\t"
202 "add x1, x1, 1\n\t"
203
204 "fmov x2, d11\n\t"
205 "cmp x1, x2\n\t"
206 "b.ne 1f\n\t"
207 "add x1, x1, 1\n\t"
208
209 "fmov x2, d12\n\t"
210 "cmp x1, x2\n\t"
211 "b.ne 1f\n\t"
212 "add x1, x1, 1\n\t"
213
214 "fmov x2, d13\n\t"
215 "cmp x1, x2\n\t"
216 "b.ne 1f\n\t"
217 "add x1, x1, 1\n\t"
218
219 "fmov x2, d14\n\t"
220 "cmp x1, x2\n\t"
221 "b.ne 1f\n\t"
222 "add x1, x1, 1\n\t"
223
224 "fmov x2, d15\n\t"
225 "cmp x1, x2\n\t"
226 "b.ne 1f\n\t"
227
Andreas Gampecf4035a2014-05-28 22:43:01 -0700228 "mov x2, #0\n\t"
229 "str x2, %[fpr_result]\n\t"
Andreas Gampe6cf80102014-05-19 11:32:41 -0700230
231 // Finish up.
232 "2:\n\t"
233 "ldp x1, x2, [sp, #8]\n\t" // Restore x1, x2
234 "ldp x3, x18, [sp, #24]\n\t" // Restore x3, xSELF
235 "ldr x30, [sp, #40]\n\t" // Restore xLR
236 "add sp, sp, #48\n\t" // Free stack space
237 ".cfi_adjust_cfa_offset -48\n\t"
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700238 "mov %[result], x0\n\t" // Save the result
Andreas Gampe6cf80102014-05-19 11:32:41 -0700239
240 "ldp d8, d9, [sp]\n\t" // Restore d8 - d15
241 "ldp d10, d11, [sp, #16]\n\t"
242 "ldp d12, d13, [sp, #32]\n\t"
243 "ldp d14, d15, [sp, #48]\n\t"
244 "add sp, sp, #64\n\t"
245 ".cfi_adjust_cfa_offset -64\n\t"
246
247 "b 3f\n\t" // Goto end
248
249 // Failed fpr verification.
250 "1:\n\t"
Andreas Gampecf4035a2014-05-28 22:43:01 -0700251 "mov x2, #1\n\t"
252 "str x2, %[fpr_result]\n\t"
Andreas Gampe6cf80102014-05-19 11:32:41 -0700253 "b 2b\n\t" // Goto finish-up
254
255 // End
256 "3:\n\t"
Andreas Gampecf4035a2014-05-28 22:43:01 -0700257 : [result] "=r" (result)
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700258 // Use the result from r0
259 : [arg0] "0"(arg0), [arg1] "r"(arg1), [arg2] "r"(arg2), [code] "r"(code), [self] "r"(self),
Andreas Gampecf4035a2014-05-28 22:43:01 -0700260 [referrer] "r"(referrer), [fpr_result] "m" (fpr_result)
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700261 : "x4", "x5", "x6", "x7", "x8", "x9", "x10", "x11", "x12", "x13", "x14", "x15", "x16", "x17"); // clobber.
262#elif defined(__x86_64__)
263 // Note: Uses the native convention
264 // TODO: Set the thread?
265 __asm__ __volatile__(
266 "pushq %[referrer]\n\t" // Push referrer
267 "pushq (%%rsp)\n\t" // & 16B alignment padding
268 ".cfi_adjust_cfa_offset 16\n\t"
269 "call *%%rax\n\t" // Call the stub
270 "addq $16, %%rsp\n\t" // Pop nullptr and padding
271 ".cfi_adjust_cfa_offset -16\n\t"
272 : "=a" (result)
273 // Use the result from rax
274 : "D"(arg0), "S"(arg1), "d"(arg2), "a"(code), [referrer] "m"(referrer)
275 // This places arg0 into rdi, arg1 into rsi, arg2 into rdx, and code into rax
276 : "rbx", "rcx", "rbp", "r8", "r9", "r10", "r11", "r12", "r13", "r14", "r15"); // clobber all
277 // TODO: Should we clobber the other registers?
278#else
279 LOG(WARNING) << "Was asked to invoke for an architecture I do not understand.";
280 result = 0;
281#endif
282 // Pop transition.
283 self->PopManagedStackFragment(fragment);
Andreas Gampe6cf80102014-05-19 11:32:41 -0700284
285 fp_result = fpr_result;
286 EXPECT_EQ(0U, fp_result);
287
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700288 return result;
289 }
290
Andreas Gampe51f76352014-05-21 08:28:48 -0700291 // TODO: Set up a frame according to referrer's specs.
292 size_t Invoke3WithReferrerAndHidden(size_t arg0, size_t arg1, size_t arg2, uintptr_t code,
293 Thread* self, mirror::ArtMethod* referrer, size_t hidden) {
294 // Push a transition back into managed code onto the linked list in thread.
295 ManagedStack fragment;
296 self->PushManagedStackFragment(&fragment);
297
298 size_t result;
299 size_t fpr_result = 0;
300#if defined(__i386__)
301 // TODO: Set the thread?
302 __asm__ __volatile__(
303 "movd %[hidden], %%xmm0\n\t"
304 "pushl %[referrer]\n\t" // Store referrer
305 "call *%%edi\n\t" // Call the stub
306 "addl $4, %%esp" // Pop referrer
307 : "=a" (result)
308 // Use the result from eax
Andreas Gampe3ecbbfc2014-05-21 14:39:45 -0700309 : "a"(arg0), "c"(arg1), "d"(arg2), "D"(code), [referrer]"m"(referrer), [hidden]"r"(hidden)
Andreas Gampe51f76352014-05-21 08:28:48 -0700310 // This places code into edi, arg0 into eax, arg1 into ecx, and arg2 into edx
311 : ); // clobber.
312 // TODO: Should we clobber the other registers? EBX gets clobbered by some of the stubs,
313 // but compilation fails when declaring that.
314#elif defined(__arm__)
315 __asm__ __volatile__(
316 "push {r1-r12, lr}\n\t" // Save state, 13*4B = 52B
317 ".cfi_adjust_cfa_offset 52\n\t"
318 "push {r9}\n\t"
319 ".cfi_adjust_cfa_offset 4\n\t"
320 "mov r9, %[referrer]\n\n"
321 "str r9, [sp, #-8]!\n\t" // Push referrer, +8B padding so 16B aligned
322 ".cfi_adjust_cfa_offset 8\n\t"
323 "ldr r9, [sp, #8]\n\t"
324
325 // Push everything on the stack, so we don't rely on the order. What a mess. :-(
326 "sub sp, sp, #24\n\t"
327 "str %[arg0], [sp]\n\t"
328 "str %[arg1], [sp, #4]\n\t"
329 "str %[arg2], [sp, #8]\n\t"
330 "str %[code], [sp, #12]\n\t"
331 "str %[self], [sp, #16]\n\t"
332 "str %[hidden], [sp, #20]\n\t"
333 "ldr r0, [sp]\n\t"
334 "ldr r1, [sp, #4]\n\t"
335 "ldr r2, [sp, #8]\n\t"
336 "ldr r3, [sp, #12]\n\t"
337 "ldr r9, [sp, #16]\n\t"
338 "ldr r12, [sp, #20]\n\t"
339 "add sp, sp, #24\n\t"
340
341 "blx r3\n\t" // Call the stub
342 "add sp, sp, #12\n\t" // Pop nullptr and padding
343 ".cfi_adjust_cfa_offset -12\n\t"
344 "pop {r1-r12, lr}\n\t" // Restore state
345 ".cfi_adjust_cfa_offset -52\n\t"
346 "mov %[result], r0\n\t" // Save the result
347 : [result] "=r" (result)
348 // Use the result from r0
349 : [arg0] "r"(arg0), [arg1] "r"(arg1), [arg2] "r"(arg2), [code] "r"(code), [self] "r"(self),
350 [referrer] "r"(referrer), [hidden] "r"(hidden)
351 : ); // clobber.
352#elif defined(__aarch64__)
353 __asm__ __volatile__(
354 // Spill space for d8 - d15
355 "sub sp, sp, #64\n\t"
356 ".cfi_adjust_cfa_offset 64\n\t"
357 "stp d8, d9, [sp]\n\t"
358 "stp d10, d11, [sp, #16]\n\t"
359 "stp d12, d13, [sp, #32]\n\t"
360 "stp d14, d15, [sp, #48]\n\t"
361
362 "sub sp, sp, #48\n\t" // Reserve stack space, 16B aligned
363 ".cfi_adjust_cfa_offset 48\n\t"
364 "stp %[referrer], x1, [sp]\n\t"// referrer, x1
365 "stp x2, x3, [sp, #16]\n\t" // Save x2, x3
366 "stp x18, x30, [sp, #32]\n\t" // Save x18(xSELF), xLR
367
368 // Push everything on the stack, so we don't rely on the order. What a mess. :-(
369 "sub sp, sp, #48\n\t"
370 ".cfi_adjust_cfa_offset 48\n\t"
371 "str %[arg0], [sp]\n\t"
372 "str %[arg1], [sp, #8]\n\t"
373 "str %[arg2], [sp, #16]\n\t"
374 "str %[code], [sp, #24]\n\t"
375 "str %[self], [sp, #32]\n\t"
376 "str %[hidden], [sp, #40]\n\t"
377
378 // Now we definitely have x0-x3 free, use it to garble d8 - d15
379 "movk x0, #0xfad0\n\t"
380 "movk x0, #0xebad, lsl #16\n\t"
381 "movk x0, #0xfad0, lsl #32\n\t"
382 "movk x0, #0xebad, lsl #48\n\t"
383 "fmov d8, x0\n\t"
384 "add x0, x0, 1\n\t"
385 "fmov d9, x0\n\t"
386 "add x0, x0, 1\n\t"
387 "fmov d10, x0\n\t"
388 "add x0, x0, 1\n\t"
389 "fmov d11, x0\n\t"
390 "add x0, x0, 1\n\t"
391 "fmov d12, x0\n\t"
392 "add x0, x0, 1\n\t"
393 "fmov d13, x0\n\t"
394 "add x0, x0, 1\n\t"
395 "fmov d14, x0\n\t"
396 "add x0, x0, 1\n\t"
397 "fmov d15, x0\n\t"
398
399 // Load call params
400 "ldr x0, [sp]\n\t"
401 "ldr x1, [sp, #8]\n\t"
402 "ldr x2, [sp, #16]\n\t"
403 "ldr x3, [sp, #24]\n\t"
404 "ldr x18, [sp, #32]\n\t"
405 "ldr x12, [sp, #40]\n\t"
406 "add sp, sp, #48\n\t"
407 ".cfi_adjust_cfa_offset -48\n\t"
408
409
410 "blr x3\n\t" // Call the stub
411
412 // Test d8 - d15. We can use x1 and x2.
413 "movk x1, #0xfad0\n\t"
414 "movk x1, #0xebad, lsl #16\n\t"
415 "movk x1, #0xfad0, lsl #32\n\t"
416 "movk x1, #0xebad, lsl #48\n\t"
417 "fmov x2, d8\n\t"
418 "cmp x1, x2\n\t"
419 "b.ne 1f\n\t"
420 "add x1, x1, 1\n\t"
421
422 "fmov x2, d9\n\t"
423 "cmp x1, x2\n\t"
424 "b.ne 1f\n\t"
425 "add x1, x1, 1\n\t"
426
427 "fmov x2, d10\n\t"
428 "cmp x1, x2\n\t"
429 "b.ne 1f\n\t"
430 "add x1, x1, 1\n\t"
431
432 "fmov x2, d11\n\t"
433 "cmp x1, x2\n\t"
434 "b.ne 1f\n\t"
435 "add x1, x1, 1\n\t"
436
437 "fmov x2, d12\n\t"
438 "cmp x1, x2\n\t"
439 "b.ne 1f\n\t"
440 "add x1, x1, 1\n\t"
441
442 "fmov x2, d13\n\t"
443 "cmp x1, x2\n\t"
444 "b.ne 1f\n\t"
445 "add x1, x1, 1\n\t"
446
447 "fmov x2, d14\n\t"
448 "cmp x1, x2\n\t"
449 "b.ne 1f\n\t"
450 "add x1, x1, 1\n\t"
451
452 "fmov x2, d15\n\t"
453 "cmp x1, x2\n\t"
454 "b.ne 1f\n\t"
455
456 "mov %[fpr_result], #0\n\t"
457
458 // Finish up.
459 "2:\n\t"
460 "ldp x1, x2, [sp, #8]\n\t" // Restore x1, x2
461 "ldp x3, x18, [sp, #24]\n\t" // Restore x3, xSELF
462 "ldr x30, [sp, #40]\n\t" // Restore xLR
463 "add sp, sp, #48\n\t" // Free stack space
464 ".cfi_adjust_cfa_offset -48\n\t"
465 "mov %[result], x0\n\t" // Save the result
466
467 "ldp d8, d9, [sp]\n\t" // Restore d8 - d15
468 "ldp d10, d11, [sp, #16]\n\t"
469 "ldp d12, d13, [sp, #32]\n\t"
470 "ldp d14, d15, [sp, #48]\n\t"
471 "add sp, sp, #64\n\t"
472 ".cfi_adjust_cfa_offset -64\n\t"
473
474 "b 3f\n\t" // Goto end
475
476 // Failed fpr verification.
477 "1:\n\t"
478 "mov %[fpr_result], #1\n\t"
479 "b 2b\n\t" // Goto finish-up
480
481 // End
482 "3:\n\t"
483 : [result] "=r" (result), [fpr_result] "=r" (fpr_result)
484 // Use the result from r0
485 : [arg0] "0"(arg0), [arg1] "r"(arg1), [arg2] "r"(arg2), [code] "r"(code), [self] "r"(self),
486 [referrer] "r"(referrer), [hidden] "r"(hidden)
487 : "x4", "x5", "x6", "x7", "x8", "x9", "x10", "x11", "x12", "x13", "x14", "x15", "x16", "x17"); // clobber.
488#elif defined(__x86_64__)
489 // Note: Uses the native convention
490 // TODO: Set the thread?
491 __asm__ __volatile__(
492 "movq %[hidden], %%r9\n\t" // No need to save r9, listed as clobbered
493 "movd %%r9, %%xmm0\n\t"
494 "pushq %[referrer]\n\t" // Push referrer
495 "pushq (%%rsp)\n\t" // & 16B alignment padding
496 ".cfi_adjust_cfa_offset 16\n\t"
497 "call *%%rax\n\t" // Call the stub
498 "addq $16, %%rsp\n\t" // Pop nullptr and padding
499 ".cfi_adjust_cfa_offset -16\n\t"
500 : "=a" (result)
501 // Use the result from rax
502 : "D"(arg0), "S"(arg1), "d"(arg2), "a"(code), [referrer] "m"(referrer), [hidden] "m"(hidden)
503 // This places arg0 into rdi, arg1 into rsi, arg2 into rdx, and code into rax
504 : "rbx", "rcx", "rbp", "r8", "r9", "r10", "r11", "r12", "r13", "r14", "r15"); // clobber all
505 // TODO: Should we clobber the other registers?
506#else
507 LOG(WARNING) << "Was asked to invoke for an architecture I do not understand.";
508 result = 0;
509#endif
510 // Pop transition.
511 self->PopManagedStackFragment(fragment);
512
513 fp_result = fpr_result;
514 EXPECT_EQ(0U, fp_result);
515
516 return result;
517 }
518
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700519 // Method with 32b arg0, 64b arg1
520 size_t Invoke3UWithReferrer(size_t arg0, uint64_t arg1, uintptr_t code, Thread* self,
521 mirror::ArtMethod* referrer) {
522#if defined(__x86_64__) || defined(__aarch64__)
523 // Just pass through.
524 return Invoke3WithReferrer(arg0, arg1, 0U, code, self, referrer);
525#else
526 // Need to split up arguments.
527 uint32_t lower = static_cast<uint32_t>(arg1 & 0xFFFFFFFF);
528 uint32_t upper = static_cast<uint32_t>((arg1 >> 32) & 0xFFFFFFFF);
529
530 return Invoke3WithReferrer(arg0, lower, upper, code, self, referrer);
531#endif
532 }
533
534 // Method with 32b arg0, 32b arg1, 64b arg2
535 size_t Invoke3UUWithReferrer(uint32_t arg0, uint32_t arg1, uint64_t arg2, uintptr_t code,
536 Thread* self, mirror::ArtMethod* referrer) {
537#if defined(__x86_64__) || defined(__aarch64__)
538 // Just pass through.
539 return Invoke3WithReferrer(arg0, arg1, arg2, code, self, referrer);
540#else
541 // TODO: Needs 4-param invoke.
542 return 0;
543#endif
544 }
Andreas Gampe6cf80102014-05-19 11:32:41 -0700545
546 protected:
547 size_t fp_result;
Andreas Gampe525cde22014-04-22 15:44:50 -0700548};
549
550
551#if defined(__i386__) || defined(__x86_64__)
552extern "C" void art_quick_memcpy(void);
553#endif
554
555TEST_F(StubTest, Memcpy) {
556#if defined(__i386__) || defined(__x86_64__)
557 Thread* self = Thread::Current();
558
559 uint32_t orig[20];
560 uint32_t trg[20];
561 for (size_t i = 0; i < 20; ++i) {
562 orig[i] = i;
563 trg[i] = 0;
564 }
565
566 Invoke3(reinterpret_cast<size_t>(&trg[4]), reinterpret_cast<size_t>(&orig[4]),
567 10 * sizeof(uint32_t), reinterpret_cast<uintptr_t>(&art_quick_memcpy), self);
568
569 EXPECT_EQ(orig[0], trg[0]);
570
571 for (size_t i = 1; i < 4; ++i) {
572 EXPECT_NE(orig[i], trg[i]);
573 }
574
575 for (size_t i = 4; i < 14; ++i) {
576 EXPECT_EQ(orig[i], trg[i]);
577 }
578
579 for (size_t i = 14; i < 20; ++i) {
580 EXPECT_NE(orig[i], trg[i]);
581 }
582
583 // TODO: Test overlapping?
584
585#else
586 LOG(INFO) << "Skipping memcpy as I don't know how to do that on " << kRuntimeISA;
587 // Force-print to std::cout so it's also outside the logcat.
588 std::cout << "Skipping memcpy as I don't know how to do that on " << kRuntimeISA << std::endl;
589#endif
590}
591
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700592#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__x86_64__)
Andreas Gampe525cde22014-04-22 15:44:50 -0700593extern "C" void art_quick_lock_object(void);
594#endif
595
596TEST_F(StubTest, LockObject) {
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700597#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__x86_64__)
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -0700598 static constexpr size_t kThinLockLoops = 100;
599
Andreas Gampe525cde22014-04-22 15:44:50 -0700600 Thread* self = Thread::Current();
601 // Create an object
602 ScopedObjectAccess soa(self);
603 // garbage is created during ClassLinker::Init
604
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700605 StackHandleScope<2> hs(soa.Self());
606 Handle<mirror::String> obj(
607 hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), "hello, world!")));
Andreas Gampe525cde22014-04-22 15:44:50 -0700608 LockWord lock = obj->GetLockWord(false);
609 LockWord::LockState old_state = lock.GetState();
610 EXPECT_EQ(LockWord::LockState::kUnlocked, old_state);
611
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700612 Invoke3(reinterpret_cast<size_t>(obj.Get()), 0U, 0U,
Andreas Gampe525cde22014-04-22 15:44:50 -0700613 reinterpret_cast<uintptr_t>(&art_quick_lock_object), self);
614
615 LockWord lock_after = obj->GetLockWord(false);
616 LockWord::LockState new_state = lock_after.GetState();
617 EXPECT_EQ(LockWord::LockState::kThinLocked, new_state);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700618 EXPECT_EQ(lock_after.ThinLockCount(), 0U); // Thin lock starts count at zero
619
620 for (size_t i = 1; i < kThinLockLoops; ++i) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700621 Invoke3(reinterpret_cast<size_t>(obj.Get()), 0U, 0U,
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700622 reinterpret_cast<uintptr_t>(&art_quick_lock_object), self);
623
624 // Check we're at lock count i
625
626 LockWord l_inc = obj->GetLockWord(false);
627 LockWord::LockState l_inc_state = l_inc.GetState();
628 EXPECT_EQ(LockWord::LockState::kThinLocked, l_inc_state);
629 EXPECT_EQ(l_inc.ThinLockCount(), i);
630 }
631
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700632 // Force a fat lock by running identity hashcode to fill up lock word.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700633 Handle<mirror::String> obj2(hs.NewHandle(
634 mirror::String::AllocFromModifiedUtf8(soa.Self(), "hello, world!")));
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700635
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700636 obj2->IdentityHashCode();
637
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700638 Invoke3(reinterpret_cast<size_t>(obj2.Get()), 0U, 0U,
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700639 reinterpret_cast<uintptr_t>(&art_quick_lock_object), self);
640
641 LockWord lock_after2 = obj2->GetLockWord(false);
642 LockWord::LockState new_state2 = lock_after2.GetState();
643 EXPECT_EQ(LockWord::LockState::kFatLocked, new_state2);
644 EXPECT_NE(lock_after2.FatLockMonitor(), static_cast<Monitor*>(nullptr));
645
646 // Test done.
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700647#else
648 LOG(INFO) << "Skipping lock_object as I don't know how to do that on " << kRuntimeISA;
649 // Force-print to std::cout so it's also outside the logcat.
650 std::cout << "Skipping lock_object as I don't know how to do that on " << kRuntimeISA << std::endl;
651#endif
652}
653
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700654
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700655class RandGen {
656 public:
657 explicit RandGen(uint32_t seed) : val_(seed) {}
658
659 uint32_t next() {
660 val_ = val_ * 48271 % 2147483647 + 13;
661 return val_;
662 }
663
664 uint32_t val_;
665};
666
667
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700668#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__x86_64__)
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700669extern "C" void art_quick_lock_object(void);
670extern "C" void art_quick_unlock_object(void);
671#endif
672
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700673// NO_THREAD_SAFETY_ANALYSIS as we do not want to grab exclusive mutator lock for MonitorInfo.
674static void TestUnlockObject(StubTest* test) NO_THREAD_SAFETY_ANALYSIS {
675#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__x86_64__)
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -0700676 static constexpr size_t kThinLockLoops = 100;
677
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700678 Thread* self = Thread::Current();
679 // Create an object
680 ScopedObjectAccess soa(self);
681 // garbage is created during ClassLinker::Init
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700682 static constexpr size_t kNumberOfLocks = 10; // Number of objects = lock
683 StackHandleScope<kNumberOfLocks + 1> hs(self);
684 Handle<mirror::String> obj(
685 hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), "hello, world!")));
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700686 LockWord lock = obj->GetLockWord(false);
687 LockWord::LockState old_state = lock.GetState();
688 EXPECT_EQ(LockWord::LockState::kUnlocked, old_state);
689
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700690 test->Invoke3(reinterpret_cast<size_t>(obj.Get()), 0U, 0U,
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700691 reinterpret_cast<uintptr_t>(&art_quick_unlock_object), self);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700692 // This should be an illegal monitor state.
693 EXPECT_TRUE(self->IsExceptionPending());
694 self->ClearException();
695
696 LockWord lock_after = obj->GetLockWord(false);
697 LockWord::LockState new_state = lock_after.GetState();
698 EXPECT_EQ(LockWord::LockState::kUnlocked, new_state);
Andreas Gampe525cde22014-04-22 15:44:50 -0700699
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700700 test->Invoke3(reinterpret_cast<size_t>(obj.Get()), 0U, 0U,
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700701 reinterpret_cast<uintptr_t>(&art_quick_lock_object), self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700702
703 LockWord lock_after2 = obj->GetLockWord(false);
704 LockWord::LockState new_state2 = lock_after2.GetState();
705 EXPECT_EQ(LockWord::LockState::kThinLocked, new_state2);
706
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700707 test->Invoke3(reinterpret_cast<size_t>(obj.Get()), 0U, 0U,
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700708 reinterpret_cast<uintptr_t>(&art_quick_unlock_object), self);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700709
710 LockWord lock_after3 = obj->GetLockWord(false);
711 LockWord::LockState new_state3 = lock_after3.GetState();
712 EXPECT_EQ(LockWord::LockState::kUnlocked, new_state3);
713
714 // Stress test:
715 // Keep a number of objects and their locks in flight. Randomly lock or unlock one of them in
716 // each step.
717
718 RandGen r(0x1234);
719
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700720 constexpr size_t kIterations = 10000; // Number of iterations
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700721 constexpr size_t kMoveToFat = 1000; // Chance of 1:kMoveFat to make a lock fat.
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700722
723 size_t counts[kNumberOfLocks];
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700724 bool fat[kNumberOfLocks]; // Whether a lock should be thin or fat.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700725 Handle<mirror::String> objects[kNumberOfLocks];
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700726
727 // Initialize = allocate.
728 for (size_t i = 0; i < kNumberOfLocks; ++i) {
729 counts[i] = 0;
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700730 fat[i] = false;
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700731 objects[i] = hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), ""));
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700732 }
733
734 for (size_t i = 0; i < kIterations; ++i) {
735 // Select which lock to update.
736 size_t index = r.next() % kNumberOfLocks;
737
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700738 // Make lock fat?
739 if (!fat[index] && (r.next() % kMoveToFat == 0)) {
740 fat[index] = true;
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700741 objects[index]->IdentityHashCode();
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700742
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700743 LockWord lock_iter = objects[index]->GetLockWord(false);
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700744 LockWord::LockState iter_state = lock_iter.GetState();
745 if (counts[index] == 0) {
746 EXPECT_EQ(LockWord::LockState::kHashCode, iter_state);
747 } else {
748 EXPECT_EQ(LockWord::LockState::kFatLocked, iter_state);
749 }
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700750 } else {
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700751 bool lock; // Whether to lock or unlock in this step.
752 if (counts[index] == 0) {
753 lock = true;
754 } else if (counts[index] == kThinLockLoops) {
755 lock = false;
756 } else {
757 // Randomly.
758 lock = r.next() % 2 == 0;
759 }
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700760
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700761 if (lock) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700762 test->Invoke3(reinterpret_cast<size_t>(objects[index].Get()), 0U, 0U,
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700763 reinterpret_cast<uintptr_t>(&art_quick_lock_object), self);
764 counts[index]++;
765 } else {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700766 test->Invoke3(reinterpret_cast<size_t>(objects[index].Get()), 0U, 0U,
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700767 reinterpret_cast<uintptr_t>(&art_quick_unlock_object), self);
768 counts[index]--;
769 }
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700770
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700771 EXPECT_FALSE(self->IsExceptionPending());
772
773 // Check the new state.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700774 LockWord lock_iter = objects[index]->GetLockWord(true);
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700775 LockWord::LockState iter_state = lock_iter.GetState();
776 if (fat[index]) {
777 // Abuse MonitorInfo.
778 EXPECT_EQ(LockWord::LockState::kFatLocked, iter_state) << index;
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700779 MonitorInfo info(objects[index].Get());
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700780 EXPECT_EQ(counts[index], info.entry_count_) << index;
781 } else {
782 if (counts[index] > 0) {
783 EXPECT_EQ(LockWord::LockState::kThinLocked, iter_state);
784 EXPECT_EQ(counts[index] - 1, lock_iter.ThinLockCount());
785 } else {
786 EXPECT_EQ(LockWord::LockState::kUnlocked, iter_state);
787 }
788 }
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700789 }
790 }
791
792 // Unlock the remaining count times and then check it's unlocked. Then deallocate.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700793 // Go reverse order to correctly handle Handles.
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700794 for (size_t i = 0; i < kNumberOfLocks; ++i) {
795 size_t index = kNumberOfLocks - 1 - i;
796 size_t count = counts[index];
797 while (count > 0) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700798 test->Invoke3(reinterpret_cast<size_t>(objects[index].Get()), 0U, 0U,
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700799 reinterpret_cast<uintptr_t>(&art_quick_unlock_object), self);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700800 count--;
801 }
802
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700803 LockWord lock_after4 = objects[index]->GetLockWord(false);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700804 LockWord::LockState new_state4 = lock_after4.GetState();
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700805 EXPECT_TRUE(LockWord::LockState::kUnlocked == new_state4
806 || LockWord::LockState::kFatLocked == new_state4);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700807 }
808
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700809 // Test done.
Andreas Gampe525cde22014-04-22 15:44:50 -0700810#else
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700811 LOG(INFO) << "Skipping unlock_object as I don't know how to do that on " << kRuntimeISA;
Andreas Gampe525cde22014-04-22 15:44:50 -0700812 // Force-print to std::cout so it's also outside the logcat.
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700813 std::cout << "Skipping unlock_object as I don't know how to do that on " << kRuntimeISA << std::endl;
Andreas Gampe525cde22014-04-22 15:44:50 -0700814#endif
815}
816
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700817TEST_F(StubTest, UnlockObject) {
818 TestUnlockObject(this);
819}
Andreas Gampe525cde22014-04-22 15:44:50 -0700820
821#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__x86_64__)
822extern "C" void art_quick_check_cast(void);
823#endif
824
825TEST_F(StubTest, CheckCast) {
826#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__x86_64__)
827 Thread* self = Thread::Current();
828 // Find some classes.
829 ScopedObjectAccess soa(self);
830 // garbage is created during ClassLinker::Init
831
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700832 StackHandleScope<2> hs(soa.Self());
833 Handle<mirror::Class> c(
834 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "[Ljava/lang/Object;")));
835 Handle<mirror::Class> c2(
836 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "[Ljava/lang/String;")));
Andreas Gampe525cde22014-04-22 15:44:50 -0700837
838 EXPECT_FALSE(self->IsExceptionPending());
839
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700840 Invoke3(reinterpret_cast<size_t>(c.Get()), reinterpret_cast<size_t>(c.Get()), 0U,
Andreas Gampe525cde22014-04-22 15:44:50 -0700841 reinterpret_cast<uintptr_t>(&art_quick_check_cast), self);
842
843 EXPECT_FALSE(self->IsExceptionPending());
844
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700845 Invoke3(reinterpret_cast<size_t>(c2.Get()), reinterpret_cast<size_t>(c2.Get()), 0U,
Andreas Gampe525cde22014-04-22 15:44:50 -0700846 reinterpret_cast<uintptr_t>(&art_quick_check_cast), self);
847
848 EXPECT_FALSE(self->IsExceptionPending());
849
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700850 Invoke3(reinterpret_cast<size_t>(c.Get()), reinterpret_cast<size_t>(c2.Get()), 0U,
Andreas Gampe525cde22014-04-22 15:44:50 -0700851 reinterpret_cast<uintptr_t>(&art_quick_check_cast), self);
852
853 EXPECT_FALSE(self->IsExceptionPending());
854
855 // TODO: Make the following work. But that would require correct managed frames.
856
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700857 Invoke3(reinterpret_cast<size_t>(c2.Get()), reinterpret_cast<size_t>(c.Get()), 0U,
Andreas Gampe525cde22014-04-22 15:44:50 -0700858 reinterpret_cast<uintptr_t>(&art_quick_check_cast), self);
859
860 EXPECT_TRUE(self->IsExceptionPending());
861 self->ClearException();
862
863#else
864 LOG(INFO) << "Skipping check_cast as I don't know how to do that on " << kRuntimeISA;
865 // Force-print to std::cout so it's also outside the logcat.
866 std::cout << "Skipping check_cast as I don't know how to do that on " << kRuntimeISA << std::endl;
867#endif
868}
869
870
Andreas Gampef4e910b2014-04-29 16:55:52 -0700871#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__x86_64__)
Andreas Gampe525cde22014-04-22 15:44:50 -0700872extern "C" void art_quick_aput_obj_with_null_and_bound_check(void);
873// Do not check non-checked ones, we'd need handlers and stuff...
874#endif
875
876TEST_F(StubTest, APutObj) {
Hiroshi Yamauchid6881ae2014-04-28 17:21:48 -0700877 TEST_DISABLED_FOR_HEAP_REFERENCE_POISONING();
878
Andreas Gampef4e910b2014-04-29 16:55:52 -0700879#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__x86_64__)
Andreas Gampe525cde22014-04-22 15:44:50 -0700880 Thread* self = Thread::Current();
881 // Create an object
882 ScopedObjectAccess soa(self);
883 // garbage is created during ClassLinker::Init
884
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700885 StackHandleScope<5> hs(soa.Self());
886 Handle<mirror::Class> c(
887 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "Ljava/lang/Object;")));
888 Handle<mirror::Class> ca(
889 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "[Ljava/lang/String;")));
Andreas Gampe525cde22014-04-22 15:44:50 -0700890
891 // Build a string array of size 1
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700892 Handle<mirror::ObjectArray<mirror::Object>> array(
893 hs.NewHandle(mirror::ObjectArray<mirror::Object>::Alloc(soa.Self(), ca.Get(), 10)));
Andreas Gampe525cde22014-04-22 15:44:50 -0700894
895 // Build a string -> should be assignable
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700896 Handle<mirror::String> str_obj(
897 hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), "hello, world!")));
Andreas Gampe525cde22014-04-22 15:44:50 -0700898
899 // Build a generic object -> should fail assigning
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700900 Handle<mirror::Object> obj_obj(hs.NewHandle(c->AllocObject(soa.Self())));
Andreas Gampe525cde22014-04-22 15:44:50 -0700901
902 // Play with it...
903
904 // 1) Success cases
Andreas Gampef4e910b2014-04-29 16:55:52 -0700905 // 1.1) Assign str_obj to array[0..3]
Andreas Gampe525cde22014-04-22 15:44:50 -0700906
907 EXPECT_FALSE(self->IsExceptionPending());
908
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700909 Invoke3(reinterpret_cast<size_t>(array.Get()), 0U, reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampe525cde22014-04-22 15:44:50 -0700910 reinterpret_cast<uintptr_t>(&art_quick_aput_obj_with_null_and_bound_check), self);
911
912 EXPECT_FALSE(self->IsExceptionPending());
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700913 EXPECT_EQ(str_obj.Get(), array->Get(0));
Andreas Gampe525cde22014-04-22 15:44:50 -0700914
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700915 Invoke3(reinterpret_cast<size_t>(array.Get()), 1U, reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampef4e910b2014-04-29 16:55:52 -0700916 reinterpret_cast<uintptr_t>(&art_quick_aput_obj_with_null_and_bound_check), self);
917
918 EXPECT_FALSE(self->IsExceptionPending());
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700919 EXPECT_EQ(str_obj.Get(), array->Get(1));
Andreas Gampef4e910b2014-04-29 16:55:52 -0700920
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700921 Invoke3(reinterpret_cast<size_t>(array.Get()), 2U, reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampef4e910b2014-04-29 16:55:52 -0700922 reinterpret_cast<uintptr_t>(&art_quick_aput_obj_with_null_and_bound_check), self);
923
924 EXPECT_FALSE(self->IsExceptionPending());
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700925 EXPECT_EQ(str_obj.Get(), array->Get(2));
Andreas Gampef4e910b2014-04-29 16:55:52 -0700926
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700927 Invoke3(reinterpret_cast<size_t>(array.Get()), 3U, reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampef4e910b2014-04-29 16:55:52 -0700928 reinterpret_cast<uintptr_t>(&art_quick_aput_obj_with_null_and_bound_check), self);
929
930 EXPECT_FALSE(self->IsExceptionPending());
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700931 EXPECT_EQ(str_obj.Get(), array->Get(3));
Andreas Gampef4e910b2014-04-29 16:55:52 -0700932
933 // 1.2) Assign null to array[0..3]
Andreas Gampe525cde22014-04-22 15:44:50 -0700934
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700935 Invoke3(reinterpret_cast<size_t>(array.Get()), 0U, reinterpret_cast<size_t>(nullptr),
Andreas Gampe525cde22014-04-22 15:44:50 -0700936 reinterpret_cast<uintptr_t>(&art_quick_aput_obj_with_null_and_bound_check), self);
937
938 EXPECT_FALSE(self->IsExceptionPending());
Andreas Gampef4e910b2014-04-29 16:55:52 -0700939 EXPECT_EQ(nullptr, array->Get(0));
940
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700941 Invoke3(reinterpret_cast<size_t>(array.Get()), 1U, reinterpret_cast<size_t>(nullptr),
Andreas Gampef4e910b2014-04-29 16:55:52 -0700942 reinterpret_cast<uintptr_t>(&art_quick_aput_obj_with_null_and_bound_check), self);
943
944 EXPECT_FALSE(self->IsExceptionPending());
945 EXPECT_EQ(nullptr, array->Get(1));
946
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700947 Invoke3(reinterpret_cast<size_t>(array.Get()), 2U, reinterpret_cast<size_t>(nullptr),
Andreas Gampef4e910b2014-04-29 16:55:52 -0700948 reinterpret_cast<uintptr_t>(&art_quick_aput_obj_with_null_and_bound_check), self);
949
950 EXPECT_FALSE(self->IsExceptionPending());
951 EXPECT_EQ(nullptr, array->Get(2));
952
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700953 Invoke3(reinterpret_cast<size_t>(array.Get()), 3U, reinterpret_cast<size_t>(nullptr),
Andreas Gampef4e910b2014-04-29 16:55:52 -0700954 reinterpret_cast<uintptr_t>(&art_quick_aput_obj_with_null_and_bound_check), self);
955
956 EXPECT_FALSE(self->IsExceptionPending());
957 EXPECT_EQ(nullptr, array->Get(3));
Andreas Gampe525cde22014-04-22 15:44:50 -0700958
959 // TODO: Check _which_ exception is thrown. Then make 3) check that it's the right check order.
960
961 // 2) Failure cases (str into str[])
962 // 2.1) Array = null
963 // TODO: Throwing NPE needs actual DEX code
964
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700965// Invoke3(reinterpret_cast<size_t>(nullptr), 0U, reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampe525cde22014-04-22 15:44:50 -0700966// reinterpret_cast<uintptr_t>(&art_quick_aput_obj_with_null_and_bound_check), self);
967//
968// EXPECT_TRUE(self->IsExceptionPending());
969// self->ClearException();
970
971 // 2.2) Index < 0
972
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700973 Invoke3(reinterpret_cast<size_t>(array.Get()), static_cast<size_t>(-1),
974 reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampe525cde22014-04-22 15:44:50 -0700975 reinterpret_cast<uintptr_t>(&art_quick_aput_obj_with_null_and_bound_check), self);
976
977 EXPECT_TRUE(self->IsExceptionPending());
978 self->ClearException();
979
980 // 2.3) Index > 0
981
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700982 Invoke3(reinterpret_cast<size_t>(array.Get()), 10U, reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampe525cde22014-04-22 15:44:50 -0700983 reinterpret_cast<uintptr_t>(&art_quick_aput_obj_with_null_and_bound_check), self);
984
985 EXPECT_TRUE(self->IsExceptionPending());
986 self->ClearException();
987
988 // 3) Failure cases (obj into str[])
989
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700990 Invoke3(reinterpret_cast<size_t>(array.Get()), 0U, reinterpret_cast<size_t>(obj_obj.Get()),
Andreas Gampe525cde22014-04-22 15:44:50 -0700991 reinterpret_cast<uintptr_t>(&art_quick_aput_obj_with_null_and_bound_check), self);
992
993 EXPECT_TRUE(self->IsExceptionPending());
994 self->ClearException();
995
996 // Tests done.
997#else
998 LOG(INFO) << "Skipping aput_obj as I don't know how to do that on " << kRuntimeISA;
999 // Force-print to std::cout so it's also outside the logcat.
1000 std::cout << "Skipping aput_obj as I don't know how to do that on " << kRuntimeISA << std::endl;
1001#endif
1002}
1003
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001004TEST_F(StubTest, AllocObject) {
1005 TEST_DISABLED_FOR_HEAP_REFERENCE_POISONING();
1006
1007#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__x86_64__)
1008 // TODO: Check the "Unresolved" allocation stubs
1009
1010 Thread* self = Thread::Current();
1011 // Create an object
1012 ScopedObjectAccess soa(self);
1013 // garbage is created during ClassLinker::Init
1014
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001015 StackHandleScope<2> hs(soa.Self());
1016 Handle<mirror::Class> c(
1017 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "Ljava/lang/Object;")));
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001018
1019 // Play with it...
1020
1021 EXPECT_FALSE(self->IsExceptionPending());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001022 {
1023 // Use an arbitrary method from c to use as referrer
1024 size_t result = Invoke3(static_cast<size_t>(c->GetDexTypeIndex()), // type_idx
1025 reinterpret_cast<size_t>(c->GetVirtualMethod(0)), // arbitrary
1026 0U,
Mathieu Chartier119c6bd2014-05-09 14:11:47 -07001027 reinterpret_cast<uintptr_t>(GetTlsPtr(self)->quick_entrypoints.pAllocObject),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001028 self);
1029
1030 EXPECT_FALSE(self->IsExceptionPending());
1031 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
1032 mirror::Object* obj = reinterpret_cast<mirror::Object*>(result);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001033 EXPECT_EQ(c.Get(), obj->GetClass());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001034 VerifyObject(obj);
1035 }
1036
1037 {
1038 // We can use nullptr in the second argument as we do not need a method here (not used in
1039 // resolved/initialized cases)
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001040 size_t result = Invoke3(reinterpret_cast<size_t>(c.Get()), reinterpret_cast<size_t>(nullptr), 0U,
Mathieu Chartier119c6bd2014-05-09 14:11:47 -07001041 reinterpret_cast<uintptr_t>(GetTlsPtr(self)->quick_entrypoints.pAllocObjectResolved),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001042 self);
1043
1044 EXPECT_FALSE(self->IsExceptionPending());
1045 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
1046 mirror::Object* obj = reinterpret_cast<mirror::Object*>(result);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001047 EXPECT_EQ(c.Get(), obj->GetClass());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001048 VerifyObject(obj);
1049 }
1050
1051 {
1052 // We can use nullptr in the second argument as we do not need a method here (not used in
1053 // resolved/initialized cases)
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001054 size_t result = Invoke3(reinterpret_cast<size_t>(c.Get()), reinterpret_cast<size_t>(nullptr), 0U,
Mathieu Chartier119c6bd2014-05-09 14:11:47 -07001055 reinterpret_cast<uintptr_t>(GetTlsPtr(self)->quick_entrypoints.pAllocObjectInitialized),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001056 self);
1057
1058 EXPECT_FALSE(self->IsExceptionPending());
1059 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
1060 mirror::Object* obj = reinterpret_cast<mirror::Object*>(result);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001061 EXPECT_EQ(c.Get(), obj->GetClass());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001062 VerifyObject(obj);
1063 }
1064
1065 // Failure tests.
1066
1067 // Out-of-memory.
1068 {
1069 Runtime::Current()->GetHeap()->SetIdealFootprint(1 * GB);
1070
1071 // Array helps to fill memory faster.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001072 Handle<mirror::Class> ca(
1073 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "[Ljava/lang/Object;")));
1074
1075 // Use arbitrary large amount for now.
1076 static const size_t kMaxHandles = 1000000;
Ian Rogers700a4022014-05-19 16:49:03 -07001077 std::unique_ptr<StackHandleScope<kMaxHandles>> hsp(new StackHandleScope<kMaxHandles>(self));
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001078
1079 std::vector<Handle<mirror::Object>> handles;
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001080 // Start allocating with 128K
1081 size_t length = 128 * KB / 4;
1082 while (length > 10) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001083 Handle<mirror::Object> h(hsp->NewHandle<mirror::Object>(
1084 mirror::ObjectArray<mirror::Object>::Alloc(soa.Self(), ca.Get(), length / 4)));
1085 if (self->IsExceptionPending() || h.Get() == nullptr) {
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001086 self->ClearException();
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001087
1088 // Try a smaller length
1089 length = length / 8;
1090 // Use at most half the reported free space.
1091 size_t mem = Runtime::Current()->GetHeap()->GetFreeMemory();
1092 if (length * 8 > mem) {
1093 length = mem / 8;
1094 }
1095 } else {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001096 handles.push_back(h);
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001097 }
1098 }
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001099 LOG(INFO) << "Used " << handles.size() << " arrays to fill space.";
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001100
1101 // Allocate simple objects till it fails.
1102 while (!self->IsExceptionPending()) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001103 Handle<mirror::Object> h = hsp->NewHandle(c->AllocObject(soa.Self()));
1104 if (!self->IsExceptionPending() && h.Get() != nullptr) {
1105 handles.push_back(h);
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001106 }
1107 }
1108 self->ClearException();
1109
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001110 size_t result = Invoke3(reinterpret_cast<size_t>(c.Get()), reinterpret_cast<size_t>(nullptr), 0U,
Mathieu Chartier119c6bd2014-05-09 14:11:47 -07001111 reinterpret_cast<uintptr_t>(GetTlsPtr(self)->quick_entrypoints.pAllocObjectInitialized),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001112 self);
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001113 EXPECT_TRUE(self->IsExceptionPending());
1114 self->ClearException();
1115 EXPECT_EQ(reinterpret_cast<size_t>(nullptr), result);
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001116 }
1117
1118 // Tests done.
1119#else
1120 LOG(INFO) << "Skipping alloc_object as I don't know how to do that on " << kRuntimeISA;
1121 // Force-print to std::cout so it's also outside the logcat.
1122 std::cout << "Skipping alloc_object as I don't know how to do that on " << kRuntimeISA << std::endl;
1123#endif
1124}
1125
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001126TEST_F(StubTest, AllocObjectArray) {
1127 TEST_DISABLED_FOR_HEAP_REFERENCE_POISONING();
1128
1129#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__x86_64__)
1130 // TODO: Check the "Unresolved" allocation stubs
1131
1132 Thread* self = Thread::Current();
1133 // Create an object
1134 ScopedObjectAccess soa(self);
1135 // garbage is created during ClassLinker::Init
1136
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001137 StackHandleScope<2> hs(self);
1138 Handle<mirror::Class> c(
1139 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "[Ljava/lang/Object;")));
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001140
1141 // Needed to have a linked method.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001142 Handle<mirror::Class> c_obj(
1143 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "Ljava/lang/Object;")));
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001144
1145 // Play with it...
1146
1147 EXPECT_FALSE(self->IsExceptionPending());
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001148
1149 // For some reason this does not work, as the type_idx is artificial and outside what the
1150 // resolved types of c_obj allow...
1151
1152 if (false) {
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001153 // Use an arbitrary method from c to use as referrer
1154 size_t result = Invoke3(static_cast<size_t>(c->GetDexTypeIndex()), // type_idx
1155 reinterpret_cast<size_t>(c_obj->GetVirtualMethod(0)), // arbitrary
1156 10U,
Mathieu Chartier119c6bd2014-05-09 14:11:47 -07001157 reinterpret_cast<uintptr_t>(GetTlsPtr(self)->quick_entrypoints.pAllocArray),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001158 self);
1159
1160 EXPECT_FALSE(self->IsExceptionPending());
1161 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
1162 mirror::Array* obj = reinterpret_cast<mirror::Array*>(result);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001163 EXPECT_EQ(c.Get(), obj->GetClass());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001164 VerifyObject(obj);
1165 EXPECT_EQ(obj->GetLength(), 10);
1166 }
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001167
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001168 {
1169 // We can use nullptr in the second argument as we do not need a method here (not used in
1170 // resolved/initialized cases)
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001171 size_t result = Invoke3(reinterpret_cast<size_t>(c.Get()), reinterpret_cast<size_t>(nullptr), 10U,
Mathieu Chartier119c6bd2014-05-09 14:11:47 -07001172 reinterpret_cast<uintptr_t>(GetTlsPtr(self)->quick_entrypoints.pAllocArrayResolved),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001173 self);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001174 EXPECT_FALSE(self->IsExceptionPending()) << PrettyTypeOf(self->GetException(nullptr));
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001175 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
1176 mirror::Object* obj = reinterpret_cast<mirror::Object*>(result);
1177 EXPECT_TRUE(obj->IsArrayInstance());
1178 EXPECT_TRUE(obj->IsObjectArray());
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001179 EXPECT_EQ(c.Get(), obj->GetClass());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001180 VerifyObject(obj);
1181 mirror::Array* array = reinterpret_cast<mirror::Array*>(result);
1182 EXPECT_EQ(array->GetLength(), 10);
1183 }
1184
1185 // Failure tests.
1186
1187 // Out-of-memory.
1188 {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001189 size_t result = Invoke3(reinterpret_cast<size_t>(c.Get()), reinterpret_cast<size_t>(nullptr),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001190 GB, // that should fail...
Mathieu Chartier119c6bd2014-05-09 14:11:47 -07001191 reinterpret_cast<uintptr_t>(GetTlsPtr(self)->quick_entrypoints.pAllocArrayResolved),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001192 self);
1193
1194 EXPECT_TRUE(self->IsExceptionPending());
1195 self->ClearException();
1196 EXPECT_EQ(reinterpret_cast<size_t>(nullptr), result);
1197 }
1198
1199 // Tests done.
1200#else
1201 LOG(INFO) << "Skipping alloc_array as I don't know how to do that on " << kRuntimeISA;
1202 // Force-print to std::cout so it's also outside the logcat.
1203 std::cout << "Skipping alloc_array as I don't know how to do that on " << kRuntimeISA << std::endl;
1204#endif
1205}
1206
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001207
Andreas Gampe266340d2014-05-02 07:55:24 -07001208#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__x86_64__)
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001209extern "C" void art_quick_string_compareto(void);
1210#endif
1211
1212TEST_F(StubTest, StringCompareTo) {
1213 TEST_DISABLED_FOR_HEAP_REFERENCE_POISONING();
1214
Andreas Gampe266340d2014-05-02 07:55:24 -07001215#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__x86_64__)
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001216 // TODO: Check the "Unresolved" allocation stubs
1217
1218 Thread* self = Thread::Current();
1219 ScopedObjectAccess soa(self);
1220 // garbage is created during ClassLinker::Init
1221
1222 // Create some strings
1223 // Use array so we can index into it and use a matrix for expected results
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -07001224 // Setup: The first half is standard. The second half uses a non-zero offset.
1225 // TODO: Shared backing arrays.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001226 static constexpr size_t kBaseStringCount = 7;
1227 const char* c[kBaseStringCount] = { "", "", "a", "aa", "ab", "aac", "aac" , };
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -07001228
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001229 static constexpr size_t kStringCount = 2 * kBaseStringCount;
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001230
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001231 StackHandleScope<kStringCount> hs(self);
1232 Handle<mirror::String> s[kStringCount];
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001233
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001234 for (size_t i = 0; i < kBaseStringCount; ++i) {
1235 s[i] = hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), c[i]));
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001236 }
1237
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -07001238 RandGen r(0x1234);
1239
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001240 for (size_t i = kBaseStringCount; i < kStringCount; ++i) {
1241 s[i] = hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), c[i - kBaseStringCount]));
1242 int32_t length = s[i]->GetLength();
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -07001243 if (length > 1) {
1244 // Set a random offset and length.
1245 int32_t new_offset = 1 + (r.next() % (length - 1));
1246 int32_t rest = length - new_offset - 1;
1247 int32_t new_length = 1 + (rest > 0 ? r.next() % rest : 0);
1248
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001249 s[i]->SetField32<false>(mirror::String::CountOffset(), new_length);
1250 s[i]->SetField32<false>(mirror::String::OffsetOffset(), new_offset);
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -07001251 }
1252 }
1253
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001254 // TODO: wide characters
1255
1256 // Matrix of expectations. First component is first parameter. Note we only check against the
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -07001257 // sign, not the value. As we are testing random offsets, we need to compute this and need to
1258 // rely on String::CompareTo being correct.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001259 int32_t expected[kStringCount][kStringCount];
1260 for (size_t x = 0; x < kStringCount; ++x) {
1261 for (size_t y = 0; y < kStringCount; ++y) {
1262 expected[x][y] = s[x]->CompareTo(s[y].Get());
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -07001263 }
1264 }
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001265
1266 // Play with it...
1267
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001268 for (size_t x = 0; x < kStringCount; ++x) {
1269 for (size_t y = 0; y < kStringCount; ++y) {
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001270 // Test string_compareto x y
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001271 size_t result = Invoke3(reinterpret_cast<size_t>(s[x].Get()),
1272 reinterpret_cast<size_t>(s[y].Get()), 0U,
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001273 reinterpret_cast<uintptr_t>(&art_quick_string_compareto), self);
1274
1275 EXPECT_FALSE(self->IsExceptionPending());
1276
1277 // The result is a 32b signed integer
1278 union {
1279 size_t r;
1280 int32_t i;
1281 } conv;
1282 conv.r = result;
1283 int32_t e = expected[x][y];
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -07001284 EXPECT_TRUE(e == 0 ? conv.i == 0 : true) << "x=" << c[x] << " y=" << c[y] << " res=" <<
1285 conv.r;
1286 EXPECT_TRUE(e < 0 ? conv.i < 0 : true) << "x=" << c[x] << " y=" << c[y] << " res=" <<
1287 conv.r;
1288 EXPECT_TRUE(e > 0 ? conv.i > 0 : true) << "x=" << c[x] << " y=" << c[y] << " res=" <<
1289 conv.r;
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001290 }
1291 }
1292
Andreas Gampe7177d7c2014-05-02 12:10:02 -07001293 // TODO: Deallocate things.
1294
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001295 // Tests done.
1296#else
1297 LOG(INFO) << "Skipping string_compareto as I don't know how to do that on " << kRuntimeISA;
1298 // Force-print to std::cout so it's also outside the logcat.
1299 std::cout << "Skipping string_compareto as I don't know how to do that on " << kRuntimeISA <<
1300 std::endl;
1301#endif
1302}
1303
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001304
1305#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__x86_64__)
1306extern "C" void art_quick_set32_static(void);
1307extern "C" void art_quick_get32_static(void);
1308#endif
1309
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001310static void GetSet32Static(Handle<mirror::Object>* obj, Handle<mirror::ArtField>* f, Thread* self,
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001311 mirror::ArtMethod* referrer, StubTest* test)
1312 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1313#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__x86_64__)
1314 constexpr size_t num_values = 7;
1315 uint32_t values[num_values] = { 0, 1, 2, 255, 32768, 1000000, 0xFFFFFFFF };
1316
1317 for (size_t i = 0; i < num_values; ++i) {
1318 test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
1319 static_cast<size_t>(values[i]),
1320 0U,
1321 reinterpret_cast<uintptr_t>(&art_quick_set32_static),
1322 self,
1323 referrer);
1324
1325 size_t res = test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
1326 0U, 0U,
1327 reinterpret_cast<uintptr_t>(&art_quick_get32_static),
1328 self,
1329 referrer);
1330
1331 EXPECT_EQ(res, values[i]) << "Iteration " << i;
1332 }
1333#else
1334 LOG(INFO) << "Skipping set32static as I don't know how to do that on " << kRuntimeISA;
1335 // Force-print to std::cout so it's also outside the logcat.
1336 std::cout << "Skipping set32static as I don't know how to do that on " << kRuntimeISA << std::endl;
1337#endif
1338}
1339
1340
1341#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__x86_64__)
1342extern "C" void art_quick_set32_instance(void);
1343extern "C" void art_quick_get32_instance(void);
1344#endif
1345
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001346static void GetSet32Instance(Handle<mirror::Object>* obj, Handle<mirror::ArtField>* f,
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001347 Thread* self, mirror::ArtMethod* referrer, StubTest* test)
1348 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1349#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__x86_64__)
1350 constexpr size_t num_values = 7;
1351 uint32_t values[num_values] = { 0, 1, 2, 255, 32768, 1000000, 0xFFFFFFFF };
1352
1353 for (size_t i = 0; i < num_values; ++i) {
1354 test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001355 reinterpret_cast<size_t>(obj->Get()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001356 static_cast<size_t>(values[i]),
1357 reinterpret_cast<uintptr_t>(&art_quick_set32_instance),
1358 self,
1359 referrer);
1360
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001361 int32_t res = f->Get()->GetInt(obj->Get());
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001362 EXPECT_EQ(res, static_cast<int32_t>(values[i])) << "Iteration " << i;
1363
1364 res++;
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001365 f->Get()->SetInt<false>(obj->Get(), res);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001366
1367 size_t res2 = test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001368 reinterpret_cast<size_t>(obj->Get()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001369 0U,
1370 reinterpret_cast<uintptr_t>(&art_quick_get32_instance),
1371 self,
1372 referrer);
1373 EXPECT_EQ(res, static_cast<int32_t>(res2));
1374 }
1375#else
1376 LOG(INFO) << "Skipping set32instance as I don't know how to do that on " << kRuntimeISA;
1377 // Force-print to std::cout so it's also outside the logcat.
1378 std::cout << "Skipping set32instance as I don't know how to do that on " << kRuntimeISA << std::endl;
1379#endif
1380}
1381
1382
1383#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__x86_64__)
1384extern "C" void art_quick_set_obj_static(void);
1385extern "C" void art_quick_get_obj_static(void);
1386
1387static void set_and_check_static(uint32_t f_idx, mirror::Object* val, Thread* self,
1388 mirror::ArtMethod* referrer, StubTest* test)
1389 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1390 test->Invoke3WithReferrer(static_cast<size_t>(f_idx),
1391 reinterpret_cast<size_t>(val),
1392 0U,
1393 reinterpret_cast<uintptr_t>(&art_quick_set_obj_static),
1394 self,
1395 referrer);
1396
1397 size_t res = test->Invoke3WithReferrer(static_cast<size_t>(f_idx),
1398 0U, 0U,
1399 reinterpret_cast<uintptr_t>(&art_quick_get_obj_static),
1400 self,
1401 referrer);
1402
1403 EXPECT_EQ(res, reinterpret_cast<size_t>(val)) << "Value " << val;
1404}
1405#endif
1406
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001407static void GetSetObjStatic(Handle<mirror::Object>* obj, Handle<mirror::ArtField>* f, Thread* self,
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001408 mirror::ArtMethod* referrer, StubTest* test)
1409 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1410#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__x86_64__)
1411 set_and_check_static((*f)->GetDexFieldIndex(), nullptr, self, referrer, test);
1412
1413 // Allocate a string object for simplicity.
1414 mirror::String* str = mirror::String::AllocFromModifiedUtf8(self, "Test");
1415 set_and_check_static((*f)->GetDexFieldIndex(), str, self, referrer, test);
1416
1417 set_and_check_static((*f)->GetDexFieldIndex(), nullptr, self, referrer, test);
1418#else
1419 LOG(INFO) << "Skipping setObjstatic as I don't know how to do that on " << kRuntimeISA;
1420 // Force-print to std::cout so it's also outside the logcat.
1421 std::cout << "Skipping setObjstatic as I don't know how to do that on " << kRuntimeISA << std::endl;
1422#endif
1423}
1424
1425
1426#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__x86_64__)
1427extern "C" void art_quick_set_obj_instance(void);
1428extern "C" void art_quick_get_obj_instance(void);
1429
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001430static void set_and_check_instance(Handle<mirror::ArtField>* f, mirror::Object* trg,
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001431 mirror::Object* val, Thread* self, mirror::ArtMethod* referrer,
1432 StubTest* test)
1433 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1434 test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
1435 reinterpret_cast<size_t>(trg),
1436 reinterpret_cast<size_t>(val),
1437 reinterpret_cast<uintptr_t>(&art_quick_set_obj_instance),
1438 self,
1439 referrer);
1440
1441 size_t res = test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
1442 reinterpret_cast<size_t>(trg),
1443 0U,
1444 reinterpret_cast<uintptr_t>(&art_quick_get_obj_instance),
1445 self,
1446 referrer);
1447
1448 EXPECT_EQ(res, reinterpret_cast<size_t>(val)) << "Value " << val;
1449
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001450 EXPECT_EQ(val, f->Get()->GetObj(trg));
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001451}
1452#endif
1453
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001454static void GetSetObjInstance(Handle<mirror::Object>* obj, Handle<mirror::ArtField>* f,
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001455 Thread* self, mirror::ArtMethod* referrer, StubTest* test)
1456 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1457#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__x86_64__)
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001458 set_and_check_instance(f, obj->Get(), nullptr, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001459
1460 // Allocate a string object for simplicity.
1461 mirror::String* str = mirror::String::AllocFromModifiedUtf8(self, "Test");
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001462 set_and_check_instance(f, obj->Get(), str, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001463
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001464 set_and_check_instance(f, obj->Get(), nullptr, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001465#else
1466 LOG(INFO) << "Skipping setObjinstance as I don't know how to do that on " << kRuntimeISA;
1467 // Force-print to std::cout so it's also outside the logcat.
1468 std::cout << "Skipping setObjinstance as I don't know how to do that on " << kRuntimeISA << std::endl;
1469#endif
1470}
1471
1472
1473// TODO: Complete these tests for 32b architectures.
1474
1475#if defined(__x86_64__) || defined(__aarch64__)
1476extern "C" void art_quick_set64_static(void);
1477extern "C" void art_quick_get64_static(void);
1478#endif
1479
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001480static void GetSet64Static(Handle<mirror::Object>* obj, Handle<mirror::ArtField>* f, Thread* self,
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001481 mirror::ArtMethod* referrer, StubTest* test)
1482 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1483#if defined(__x86_64__) || defined(__aarch64__)
1484 constexpr size_t num_values = 8;
1485 uint64_t values[num_values] = { 0, 1, 2, 255, 32768, 1000000, 0xFFFFFFFF, 0xFFFFFFFFFFFF };
1486
1487 for (size_t i = 0; i < num_values; ++i) {
1488 test->Invoke3UWithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
1489 values[i],
1490 reinterpret_cast<uintptr_t>(&art_quick_set64_static),
1491 self,
1492 referrer);
1493
1494 size_t res = test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
1495 0U, 0U,
1496 reinterpret_cast<uintptr_t>(&art_quick_get64_static),
1497 self,
1498 referrer);
1499
1500 EXPECT_EQ(res, values[i]) << "Iteration " << i;
1501 }
1502#else
1503 LOG(INFO) << "Skipping set64static as I don't know how to do that on " << kRuntimeISA;
1504 // Force-print to std::cout so it's also outside the logcat.
1505 std::cout << "Skipping set64static as I don't know how to do that on " << kRuntimeISA << std::endl;
1506#endif
1507}
1508
1509
1510#if defined(__x86_64__) || defined(__aarch64__)
1511extern "C" void art_quick_set64_instance(void);
1512extern "C" void art_quick_get64_instance(void);
1513#endif
1514
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001515static void GetSet64Instance(Handle<mirror::Object>* obj, Handle<mirror::ArtField>* f,
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001516 Thread* self, mirror::ArtMethod* referrer, StubTest* test)
1517 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1518#if defined(__x86_64__) || defined(__aarch64__)
1519 constexpr size_t num_values = 8;
1520 uint64_t values[num_values] = { 0, 1, 2, 255, 32768, 1000000, 0xFFFFFFFF, 0xFFFFFFFFFFFF };
1521
1522 for (size_t i = 0; i < num_values; ++i) {
1523 test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001524 reinterpret_cast<size_t>(obj->Get()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001525 static_cast<size_t>(values[i]),
1526 reinterpret_cast<uintptr_t>(&art_quick_set64_instance),
1527 self,
1528 referrer);
1529
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001530 int64_t res = f->Get()->GetLong(obj->Get());
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001531 EXPECT_EQ(res, static_cast<int64_t>(values[i])) << "Iteration " << i;
1532
1533 res++;
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001534 f->Get()->SetLong<false>(obj->Get(), res);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001535
1536 size_t res2 = test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001537 reinterpret_cast<size_t>(obj->Get()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001538 0U,
1539 reinterpret_cast<uintptr_t>(&art_quick_get64_instance),
1540 self,
1541 referrer);
1542 EXPECT_EQ(res, static_cast<int64_t>(res2));
1543 }
1544#else
1545 LOG(INFO) << "Skipping set64instance as I don't know how to do that on " << kRuntimeISA;
1546 // Force-print to std::cout so it's also outside the logcat.
1547 std::cout << "Skipping set64instance as I don't know how to do that on " << kRuntimeISA << std::endl;
1548#endif
1549}
1550
1551static void TestFields(Thread* self, StubTest* test, Primitive::Type test_type) {
1552 // garbage is created during ClassLinker::Init
1553
1554 JNIEnv* env = Thread::Current()->GetJniEnv();
1555 jclass jc = env->FindClass("AllFields");
1556 CHECK(jc != NULL);
1557 jobject o = env->AllocObject(jc);
1558 CHECK(o != NULL);
1559
1560 ScopedObjectAccess soa(self);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001561 StackHandleScope<5> hs(self);
1562 Handle<mirror::Object> obj(hs.NewHandle(soa.Decode<mirror::Object*>(o)));
1563 Handle<mirror::Class> c(hs.NewHandle(obj->GetClass()));
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001564 // Need a method as a referrer
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001565 Handle<mirror::ArtMethod> m(hs.NewHandle(c->GetDirectMethod(0)));
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001566
1567 // Play with it...
1568
1569 // Static fields.
1570 {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001571 Handle<mirror::ObjectArray<mirror::ArtField>> fields(hs.NewHandle(c.Get()->GetSFields()));
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001572 int32_t num_fields = fields->GetLength();
1573 for (int32_t i = 0; i < num_fields; ++i) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001574 StackHandleScope<1> hs(self);
1575 Handle<mirror::ArtField> f(hs.NewHandle(fields->Get(i)));
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001576
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001577 FieldHelper fh(f.Get());
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001578 Primitive::Type type = fh.GetTypeAsPrimitiveType();
1579 switch (type) {
1580 case Primitive::Type::kPrimInt:
1581 if (test_type == type) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001582 GetSet32Static(&obj, &f, self, m.Get(), test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001583 }
1584 break;
1585
1586 case Primitive::Type::kPrimLong:
1587 if (test_type == type) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001588 GetSet64Static(&obj, &f, self, m.Get(), test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001589 }
1590 break;
1591
1592 case Primitive::Type::kPrimNot:
1593 // Don't try array.
1594 if (test_type == type && fh.GetTypeDescriptor()[0] != '[') {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001595 GetSetObjStatic(&obj, &f, self, m.Get(), test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001596 }
1597 break;
1598
1599 default:
1600 break; // Skip.
1601 }
1602 }
1603 }
1604
1605 // Instance fields.
1606 {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001607 Handle<mirror::ObjectArray<mirror::ArtField>> fields(hs.NewHandle(c.Get()->GetIFields()));
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001608 int32_t num_fields = fields->GetLength();
1609 for (int32_t i = 0; i < num_fields; ++i) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001610 StackHandleScope<1> hs(self);
1611 Handle<mirror::ArtField> f(hs.NewHandle(fields->Get(i)));
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001612
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001613 FieldHelper fh(f.Get());
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001614 Primitive::Type type = fh.GetTypeAsPrimitiveType();
1615 switch (type) {
1616 case Primitive::Type::kPrimInt:
1617 if (test_type == type) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001618 GetSet32Instance(&obj, &f, self, m.Get(), test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001619 }
1620 break;
1621
1622 case Primitive::Type::kPrimLong:
1623 if (test_type == type) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001624 GetSet64Instance(&obj, &f, self, m.Get(), test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001625 }
1626 break;
1627
1628 case Primitive::Type::kPrimNot:
1629 // Don't try array.
1630 if (test_type == type && fh.GetTypeDescriptor()[0] != '[') {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001631 GetSetObjInstance(&obj, &f, self, m.Get(), test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001632 }
1633 break;
1634
1635 default:
1636 break; // Skip.
1637 }
1638 }
1639 }
1640
1641 // TODO: Deallocate things.
1642}
1643
1644
1645TEST_F(StubTest, Fields32) {
1646 TEST_DISABLED_FOR_HEAP_REFERENCE_POISONING();
1647
1648 Thread* self = Thread::Current();
1649
1650 self->TransitionFromSuspendedToRunnable();
1651 LoadDex("AllFields");
1652 bool started = runtime_->Start();
1653 CHECK(started);
1654
1655 TestFields(self, this, Primitive::Type::kPrimInt);
1656}
1657
1658TEST_F(StubTest, FieldsObj) {
1659 TEST_DISABLED_FOR_HEAP_REFERENCE_POISONING();
1660
1661 Thread* self = Thread::Current();
1662
1663 self->TransitionFromSuspendedToRunnable();
1664 LoadDex("AllFields");
1665 bool started = runtime_->Start();
1666 CHECK(started);
1667
1668 TestFields(self, this, Primitive::Type::kPrimNot);
1669}
1670
1671TEST_F(StubTest, Fields64) {
1672 TEST_DISABLED_FOR_HEAP_REFERENCE_POISONING();
1673
1674 Thread* self = Thread::Current();
1675
1676 self->TransitionFromSuspendedToRunnable();
1677 LoadDex("AllFields");
1678 bool started = runtime_->Start();
1679 CHECK(started);
1680
1681 TestFields(self, this, Primitive::Type::kPrimLong);
1682}
1683
Andreas Gampe51f76352014-05-21 08:28:48 -07001684#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__x86_64__)
1685extern "C" void art_quick_imt_conflict_trampoline(void);
1686#endif
1687
1688TEST_F(StubTest, IMT) {
1689#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__x86_64__)
1690 TEST_DISABLED_FOR_HEAP_REFERENCE_POISONING();
1691
1692 Thread* self = Thread::Current();
1693
1694 ScopedObjectAccess soa(self);
1695 StackHandleScope<7> hs(self);
1696
1697 JNIEnv* env = Thread::Current()->GetJniEnv();
1698
1699 // ArrayList
1700
1701 // Load ArrayList and used methods (JNI).
1702 jclass arraylist_jclass = env->FindClass("java/util/ArrayList");
1703 ASSERT_NE(nullptr, arraylist_jclass);
1704 jmethodID arraylist_constructor = env->GetMethodID(arraylist_jclass, "<init>", "()V");
1705 ASSERT_NE(nullptr, arraylist_constructor);
1706 jmethodID contains_jmethod = env->GetMethodID(arraylist_jclass, "contains", "(Ljava/lang/Object;)Z");
1707 ASSERT_NE(nullptr, contains_jmethod);
1708 jmethodID add_jmethod = env->GetMethodID(arraylist_jclass, "add", "(Ljava/lang/Object;)Z");
1709 ASSERT_NE(nullptr, add_jmethod);
1710
1711 // Get mirror representation.
1712 Handle<mirror::ArtMethod> contains_amethod(hs.NewHandle(soa.DecodeMethod(contains_jmethod)));
1713
1714 // Patch up ArrayList.contains.
1715 if (contains_amethod.Get()->GetEntryPointFromQuickCompiledCode() == nullptr) {
1716 contains_amethod.Get()->SetEntryPointFromQuickCompiledCode(reinterpret_cast<void*>(
1717 GetTlsPtr(self)->quick_entrypoints.pQuickToInterpreterBridge));
1718 }
1719
1720 // List
1721
1722 // Load List and used methods (JNI).
1723 jclass list_jclass = env->FindClass("java/util/List");
1724 ASSERT_NE(nullptr, list_jclass);
1725 jmethodID inf_contains_jmethod = env->GetMethodID(list_jclass, "contains", "(Ljava/lang/Object;)Z");
1726 ASSERT_NE(nullptr, inf_contains_jmethod);
1727
1728 // Get mirror representation.
1729 Handle<mirror::ArtMethod> inf_contains(hs.NewHandle(soa.DecodeMethod(inf_contains_jmethod)));
1730
1731 // Object
1732
1733 jclass obj_jclass = env->FindClass("java/lang/Object");
1734 ASSERT_NE(nullptr, obj_jclass);
1735 jmethodID obj_constructor = env->GetMethodID(obj_jclass, "<init>", "()V");
1736 ASSERT_NE(nullptr, obj_constructor);
1737
1738 // Sanity check: check that there is a conflict for List.contains in ArrayList.
1739
1740 mirror::Class* arraylist_class = soa.Decode<mirror::Class*>(arraylist_jclass);
1741 mirror::ArtMethod* m = arraylist_class->GetImTable()->Get(
1742 inf_contains->GetDexMethodIndex() % ClassLinker::kImtSize);
Andreas Gampe51f76352014-05-21 08:28:48 -07001743
Andreas Gampe0ea37942014-05-21 14:12:18 -07001744 if (!m->IsImtConflictMethod()) {
1745 LOG(WARNING) << "Test is meaningless, no IMT conflict in setup: " <<
1746 PrettyMethod(m, true);
1747 LOG(WARNING) << "Please update StubTest.IMT.";
1748 return;
1749 }
Andreas Gampe51f76352014-05-21 08:28:48 -07001750
1751 // Create instances.
1752
1753 jobject jarray_list = env->NewObject(arraylist_jclass, arraylist_constructor);
1754 ASSERT_NE(nullptr, jarray_list);
1755 Handle<mirror::Object> array_list(hs.NewHandle(soa.Decode<mirror::Object*>(jarray_list)));
1756
1757 jobject jobj = env->NewObject(obj_jclass, obj_constructor);
1758 ASSERT_NE(nullptr, jobj);
1759 Handle<mirror::Object> obj(hs.NewHandle(soa.Decode<mirror::Object*>(jobj)));
1760
1761 // Invoke.
1762
1763 size_t result =
1764 Invoke3WithReferrerAndHidden(0U, reinterpret_cast<size_t>(array_list.Get()),
1765 reinterpret_cast<size_t>(obj.Get()),
1766 reinterpret_cast<uintptr_t>(&art_quick_imt_conflict_trampoline),
1767 self, contains_amethod.Get(),
1768 static_cast<size_t>(inf_contains.Get()->GetDexMethodIndex()));
1769
1770 ASSERT_FALSE(self->IsExceptionPending());
1771 EXPECT_EQ(static_cast<size_t>(JNI_FALSE), result);
1772
1773 // Add object.
1774
1775 env->CallBooleanMethod(jarray_list, add_jmethod, jobj);
1776
1777 ASSERT_FALSE(self->IsExceptionPending()) << PrettyTypeOf(self->GetException(nullptr));
1778
1779 // Invoke again.
1780
1781 result = Invoke3WithReferrerAndHidden(0U, reinterpret_cast<size_t>(array_list.Get()),
1782 reinterpret_cast<size_t>(obj.Get()),
1783 reinterpret_cast<uintptr_t>(&art_quick_imt_conflict_trampoline),
1784 self, contains_amethod.Get(),
1785 static_cast<size_t>(inf_contains.Get()->GetDexMethodIndex()));
1786
1787 ASSERT_FALSE(self->IsExceptionPending());
1788 EXPECT_EQ(static_cast<size_t>(JNI_TRUE), result);
1789#else
1790 LOG(INFO) << "Skipping memcpy as I don't know how to do that on " << kRuntimeISA;
1791 // Force-print to std::cout so it's also outside the logcat.
1792 std::cout << "Skipping memcpy as I don't know how to do that on " << kRuntimeISA << std::endl;
1793#endif
1794}
1795
Andreas Gampe525cde22014-04-22 15:44:50 -07001796} // namespace art