blob: 9cccf7c761e57b8ba5a7f1dbd63f2b5ecb6021f7 [file] [log] [blame]
Andreas Gampe525cde22014-04-22 15:44:50 -07001/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
Ian Rogerse63db272014-07-15 15:36:11 -070017#include <cstdio>
18
Mathieu Chartierc7853442015-03-27 14:35:38 -070019#include "art_field-inl.h"
Vladimir Marko3481ba22015-04-13 12:22:36 +010020#include "class_linker-inl.h"
Andreas Gampe525cde22014-04-22 15:44:50 -070021#include "common_runtime_test.h"
Andreas Gampe29b38412014-08-13 00:15:43 -070022#include "entrypoints/quick/quick_entrypoints_enum.h"
Andreas Gampe51f76352014-05-21 08:28:48 -070023#include "mirror/art_method-inl.h"
24#include "mirror/class-inl.h"
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -070025#include "mirror/string-inl.h"
Ian Rogerse63db272014-07-15 15:36:11 -070026#include "scoped_thread_state_change.h"
Andreas Gampe525cde22014-04-22 15:44:50 -070027
28namespace art {
29
30
31class StubTest : public CommonRuntimeTest {
32 protected:
33 // We need callee-save methods set up in the Runtime for exceptions.
34 void SetUp() OVERRIDE {
35 // Do the normal setup.
36 CommonRuntimeTest::SetUp();
37
38 {
39 // Create callee-save methods
40 ScopedObjectAccess soa(Thread::Current());
Vladimir Marko7624d252014-05-02 14:40:15 +010041 runtime_->SetInstructionSet(kRuntimeISA);
Andreas Gampe525cde22014-04-22 15:44:50 -070042 for (int i = 0; i < Runtime::kLastCalleeSaveType; i++) {
43 Runtime::CalleeSaveType type = Runtime::CalleeSaveType(i);
44 if (!runtime_->HasCalleeSaveMethod(type)) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -070045 runtime_->SetCalleeSaveMethod(runtime_->CreateCalleeSaveMethod(), type);
Andreas Gampe525cde22014-04-22 15:44:50 -070046 }
47 }
48 }
49 }
50
Ian Rogerse63db272014-07-15 15:36:11 -070051 void SetUpRuntimeOptions(RuntimeOptions *options) OVERRIDE {
Andreas Gampe00c1e6d2014-04-25 15:47:13 -070052 // Use a smaller heap
53 for (std::pair<std::string, const void*>& pair : *options) {
54 if (pair.first.find("-Xmx") == 0) {
55 pair.first = "-Xmx4M"; // Smallest we can go.
56 }
57 }
Andreas Gampe51f76352014-05-21 08:28:48 -070058 options->push_back(std::make_pair("-Xint", nullptr));
Andreas Gampe00c1e6d2014-04-25 15:47:13 -070059 }
Andreas Gampe525cde22014-04-22 15:44:50 -070060
Mathieu Chartier119c6bd2014-05-09 14:11:47 -070061 // Helper function needed since TEST_F makes a new class.
62 Thread::tls_ptr_sized_values* GetTlsPtr(Thread* self) {
63 return &self->tlsPtr_;
64 }
65
Andreas Gampe4fc046e2014-05-06 16:56:39 -070066 public:
Andreas Gampe525cde22014-04-22 15:44:50 -070067 size_t Invoke3(size_t arg0, size_t arg1, size_t arg2, uintptr_t code, Thread* self) {
Andreas Gampe6cf80102014-05-19 11:32:41 -070068 return Invoke3WithReferrer(arg0, arg1, arg2, code, self, nullptr);
Andreas Gampe525cde22014-04-22 15:44:50 -070069 }
Andreas Gampe6e4e59c2014-05-05 20:11:02 -070070
Andreas Gampe6e4e59c2014-05-05 20:11:02 -070071 // TODO: Set up a frame according to referrer's specs.
72 size_t Invoke3WithReferrer(size_t arg0, size_t arg1, size_t arg2, uintptr_t code, Thread* self,
73 mirror::ArtMethod* referrer) {
74 // Push a transition back into managed code onto the linked list in thread.
75 ManagedStack fragment;
76 self->PushManagedStackFragment(&fragment);
77
78 size_t result;
Andreas Gampe6cf80102014-05-19 11:32:41 -070079 size_t fpr_result = 0;
Andreas Gampe6e4e59c2014-05-05 20:11:02 -070080#if defined(__i386__)
81 // TODO: Set the thread?
82 __asm__ __volatile__(
Ian Rogersc5f17732014-06-05 20:48:42 -070083 "subl $12, %%esp\n\t" // Align stack.
84 "pushl %[referrer]\n\t" // Store referrer.
Andreas Gampe6e4e59c2014-05-05 20:11:02 -070085 "call *%%edi\n\t" // Call the stub
Ian Rogersc5f17732014-06-05 20:48:42 -070086 "addl $16, %%esp" // Pop referrer
Andreas Gampe6e4e59c2014-05-05 20:11:02 -070087 : "=a" (result)
88 // Use the result from eax
Andreas Gampe2f6e3512014-06-07 01:32:33 -070089 : "a"(arg0), "c"(arg1), "d"(arg2), "D"(code), [referrer]"r"(referrer)
90 // This places code into edi, arg0 into eax, arg1 into ecx, and arg2 into edx
91 : "memory"); // clobber.
Andreas Gampe6e4e59c2014-05-05 20:11:02 -070092 // TODO: Should we clobber the other registers? EBX gets clobbered by some of the stubs,
93 // but compilation fails when declaring that.
94#elif defined(__arm__)
95 __asm__ __volatile__(
96 "push {r1-r12, lr}\n\t" // Save state, 13*4B = 52B
97 ".cfi_adjust_cfa_offset 52\n\t"
98 "push {r9}\n\t"
99 ".cfi_adjust_cfa_offset 4\n\t"
100 "mov r9, %[referrer]\n\n"
101 "str r9, [sp, #-8]!\n\t" // Push referrer, +8B padding so 16B aligned
102 ".cfi_adjust_cfa_offset 8\n\t"
103 "ldr r9, [sp, #8]\n\t"
104
105 // Push everything on the stack, so we don't rely on the order. What a mess. :-(
106 "sub sp, sp, #20\n\t"
107 "str %[arg0], [sp]\n\t"
108 "str %[arg1], [sp, #4]\n\t"
109 "str %[arg2], [sp, #8]\n\t"
110 "str %[code], [sp, #12]\n\t"
111 "str %[self], [sp, #16]\n\t"
112 "ldr r0, [sp]\n\t"
113 "ldr r1, [sp, #4]\n\t"
114 "ldr r2, [sp, #8]\n\t"
115 "ldr r3, [sp, #12]\n\t"
116 "ldr r9, [sp, #16]\n\t"
117 "add sp, sp, #20\n\t"
118
119 "blx r3\n\t" // Call the stub
120 "add sp, sp, #12\n\t" // Pop nullptr and padding
121 ".cfi_adjust_cfa_offset -12\n\t"
122 "pop {r1-r12, lr}\n\t" // Restore state
123 ".cfi_adjust_cfa_offset -52\n\t"
124 "mov %[result], r0\n\t" // Save the result
125 : [result] "=r" (result)
126 // Use the result from r0
127 : [arg0] "r"(arg0), [arg1] "r"(arg1), [arg2] "r"(arg2), [code] "r"(code), [self] "r"(self),
128 [referrer] "r"(referrer)
Andreas Gampe2f6e3512014-06-07 01:32:33 -0700129 : "memory"); // clobber.
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700130#elif defined(__aarch64__)
131 __asm__ __volatile__(
Andreas Gampef39b3782014-06-03 14:38:30 -0700132 // Spill x0-x7 which we say we don't clobber. May contain args.
Andreas Gampe6cf80102014-05-19 11:32:41 -0700133 "sub sp, sp, #64\n\t"
134 ".cfi_adjust_cfa_offset 64\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700135 "stp x0, x1, [sp]\n\t"
136 "stp x2, x3, [sp, #16]\n\t"
137 "stp x4, x5, [sp, #32]\n\t"
138 "stp x6, x7, [sp, #48]\n\t"
Andreas Gampe6cf80102014-05-19 11:32:41 -0700139
Andreas Gampef39b3782014-06-03 14:38:30 -0700140 "sub sp, sp, #16\n\t" // Reserve stack space, 16B aligned
141 ".cfi_adjust_cfa_offset 16\n\t"
142 "str %[referrer], [sp]\n\t" // referrer
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700143
144 // Push everything on the stack, so we don't rely on the order. What a mess. :-(
145 "sub sp, sp, #48\n\t"
Andreas Gampe6cf80102014-05-19 11:32:41 -0700146 ".cfi_adjust_cfa_offset 48\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700147 // All things are "r" constraints, so direct str/stp should work.
148 "stp %[arg0], %[arg1], [sp]\n\t"
149 "stp %[arg2], %[code], [sp, #16]\n\t"
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700150 "str %[self], [sp, #32]\n\t"
Andreas Gampe6cf80102014-05-19 11:32:41 -0700151
152 // Now we definitely have x0-x3 free, use it to garble d8 - d15
153 "movk x0, #0xfad0\n\t"
154 "movk x0, #0xebad, lsl #16\n\t"
155 "movk x0, #0xfad0, lsl #32\n\t"
156 "movk x0, #0xebad, lsl #48\n\t"
157 "fmov d8, x0\n\t"
158 "add x0, x0, 1\n\t"
159 "fmov d9, x0\n\t"
160 "add x0, x0, 1\n\t"
161 "fmov d10, x0\n\t"
162 "add x0, x0, 1\n\t"
163 "fmov d11, x0\n\t"
164 "add x0, x0, 1\n\t"
165 "fmov d12, x0\n\t"
166 "add x0, x0, 1\n\t"
167 "fmov d13, x0\n\t"
168 "add x0, x0, 1\n\t"
169 "fmov d14, x0\n\t"
170 "add x0, x0, 1\n\t"
171 "fmov d15, x0\n\t"
172
Andreas Gampef39b3782014-06-03 14:38:30 -0700173 // Load call params into the right registers.
174 "ldp x0, x1, [sp]\n\t"
175 "ldp x2, x3, [sp, #16]\n\t"
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700176 "ldr x18, [sp, #32]\n\t"
177 "add sp, sp, #48\n\t"
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700178 ".cfi_adjust_cfa_offset -48\n\t"
179
Andreas Gampe6cf80102014-05-19 11:32:41 -0700180
181 "blr x3\n\t" // Call the stub
Andreas Gampef39b3782014-06-03 14:38:30 -0700182 "mov x8, x0\n\t" // Store result
183 "add sp, sp, #16\n\t" // Drop the quick "frame"
184 ".cfi_adjust_cfa_offset -16\n\t"
Andreas Gampe6cf80102014-05-19 11:32:41 -0700185
186 // Test d8 - d15. We can use x1 and x2.
187 "movk x1, #0xfad0\n\t"
188 "movk x1, #0xebad, lsl #16\n\t"
189 "movk x1, #0xfad0, lsl #32\n\t"
190 "movk x1, #0xebad, lsl #48\n\t"
191 "fmov x2, d8\n\t"
192 "cmp x1, x2\n\t"
193 "b.ne 1f\n\t"
194 "add x1, x1, 1\n\t"
195
196 "fmov x2, d9\n\t"
197 "cmp x1, x2\n\t"
198 "b.ne 1f\n\t"
199 "add x1, x1, 1\n\t"
200
201 "fmov x2, d10\n\t"
202 "cmp x1, x2\n\t"
203 "b.ne 1f\n\t"
204 "add x1, x1, 1\n\t"
205
206 "fmov x2, d11\n\t"
207 "cmp x1, x2\n\t"
208 "b.ne 1f\n\t"
209 "add x1, x1, 1\n\t"
210
211 "fmov x2, d12\n\t"
212 "cmp x1, x2\n\t"
213 "b.ne 1f\n\t"
214 "add x1, x1, 1\n\t"
215
216 "fmov x2, d13\n\t"
217 "cmp x1, x2\n\t"
218 "b.ne 1f\n\t"
219 "add x1, x1, 1\n\t"
220
221 "fmov x2, d14\n\t"
222 "cmp x1, x2\n\t"
223 "b.ne 1f\n\t"
224 "add x1, x1, 1\n\t"
225
226 "fmov x2, d15\n\t"
227 "cmp x1, x2\n\t"
228 "b.ne 1f\n\t"
229
Andreas Gampef39b3782014-06-03 14:38:30 -0700230 "mov x9, #0\n\t" // Use x9 as flag, in clobber list
Andreas Gampe6cf80102014-05-19 11:32:41 -0700231
232 // Finish up.
233 "2:\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700234 "ldp x0, x1, [sp]\n\t" // Restore stuff not named clobbered, may contain fpr_result
235 "ldp x2, x3, [sp, #16]\n\t"
236 "ldp x4, x5, [sp, #32]\n\t"
237 "ldp x6, x7, [sp, #48]\n\t"
238 "add sp, sp, #64\n\t" // Free stack space, now sp as on entry
Andreas Gampe6cf80102014-05-19 11:32:41 -0700239 ".cfi_adjust_cfa_offset -64\n\t"
240
Andreas Gampef39b3782014-06-03 14:38:30 -0700241 "str x9, %[fpr_result]\n\t" // Store the FPR comparison result
242 "mov %[result], x8\n\t" // Store the call result
243
Andreas Gampe6cf80102014-05-19 11:32:41 -0700244 "b 3f\n\t" // Goto end
245
246 // Failed fpr verification.
247 "1:\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700248 "mov x9, #1\n\t"
Andreas Gampe6cf80102014-05-19 11:32:41 -0700249 "b 2b\n\t" // Goto finish-up
250
251 // End
252 "3:\n\t"
Andreas Gampecf4035a2014-05-28 22:43:01 -0700253 : [result] "=r" (result)
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700254 // Use the result from r0
255 : [arg0] "0"(arg0), [arg1] "r"(arg1), [arg2] "r"(arg2), [code] "r"(code), [self] "r"(self),
Andreas Gampecf4035a2014-05-28 22:43:01 -0700256 [referrer] "r"(referrer), [fpr_result] "m" (fpr_result)
Andreas Gampef39b3782014-06-03 14:38:30 -0700257 : "x8", "x9", "x10", "x11", "x12", "x13", "x14", "x15", "x16", "x17", "x18", "x19", "x20",
258 "x21", "x22", "x23", "x24", "x25", "x26", "x27", "x28", "x30",
259 "d0", "d1", "d2", "d3", "d4", "d5", "d6", "d7",
260 "d8", "d9", "d10", "d11", "d12", "d13", "d14", "d15",
261 "d16", "d17", "d18", "d19", "d20", "d21", "d22", "d23",
Andreas Gampe2f6e3512014-06-07 01:32:33 -0700262 "d24", "d25", "d26", "d27", "d28", "d29", "d30", "d31",
263 "memory"); // clobber.
Ian Rogers6f3dbba2014-10-14 17:41:57 -0700264#elif defined(__x86_64__) && !defined(__APPLE__) && defined(__clang__)
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700265 // Note: Uses the native convention
266 // TODO: Set the thread?
267 __asm__ __volatile__(
268 "pushq %[referrer]\n\t" // Push referrer
269 "pushq (%%rsp)\n\t" // & 16B alignment padding
270 ".cfi_adjust_cfa_offset 16\n\t"
271 "call *%%rax\n\t" // Call the stub
272 "addq $16, %%rsp\n\t" // Pop nullptr and padding
273 ".cfi_adjust_cfa_offset -16\n\t"
274 : "=a" (result)
275 // Use the result from rax
Andreas Gampe5c3d3a92015-01-21 12:23:50 -0800276 : "D"(arg0), "S"(arg1), "d"(arg2), "a"(code), [referrer] "c"(referrer)
Andreas Gampe2f6e3512014-06-07 01:32:33 -0700277 // This places arg0 into rdi, arg1 into rsi, arg2 into rdx, and code into rax
Andreas Gampe5c3d3a92015-01-21 12:23:50 -0800278 : "rbx", "rbp", "r8", "r9", "r10", "r11", "r12", "r13", "r14", "r15",
Andreas Gampe2f6e3512014-06-07 01:32:33 -0700279 "memory"); // clobber all
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700280 // TODO: Should we clobber the other registers?
281#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -0800282 UNUSED(arg0, arg1, arg2, code, referrer);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700283 LOG(WARNING) << "Was asked to invoke for an architecture I do not understand.";
284 result = 0;
285#endif
286 // Pop transition.
287 self->PopManagedStackFragment(fragment);
Andreas Gampe6cf80102014-05-19 11:32:41 -0700288
289 fp_result = fpr_result;
290 EXPECT_EQ(0U, fp_result);
291
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700292 return result;
293 }
294
Andreas Gampe51f76352014-05-21 08:28:48 -0700295 // TODO: Set up a frame according to referrer's specs.
296 size_t Invoke3WithReferrerAndHidden(size_t arg0, size_t arg1, size_t arg2, uintptr_t code,
297 Thread* self, mirror::ArtMethod* referrer, size_t hidden) {
298 // Push a transition back into managed code onto the linked list in thread.
299 ManagedStack fragment;
300 self->PushManagedStackFragment(&fragment);
301
302 size_t result;
303 size_t fpr_result = 0;
304#if defined(__i386__)
305 // TODO: Set the thread?
306 __asm__ __volatile__(
Mark P Mendell966c3ae2015-01-27 15:45:27 +0000307 "movd %[hidden], %%xmm7\n\t"
Ian Rogersc5f17732014-06-05 20:48:42 -0700308 "subl $12, %%esp\n\t" // Align stack.
Andreas Gampe51f76352014-05-21 08:28:48 -0700309 "pushl %[referrer]\n\t" // Store referrer
310 "call *%%edi\n\t" // Call the stub
Ian Rogersc5f17732014-06-05 20:48:42 -0700311 "addl $16, %%esp" // Pop referrer
Andreas Gampe51f76352014-05-21 08:28:48 -0700312 : "=a" (result)
313 // Use the result from eax
Andreas Gampe1a7e2922014-05-21 15:37:53 -0700314 : "a"(arg0), "c"(arg1), "d"(arg2), "D"(code), [referrer]"r"(referrer), [hidden]"m"(hidden)
Andreas Gampe2f6e3512014-06-07 01:32:33 -0700315 // This places code into edi, arg0 into eax, arg1 into ecx, and arg2 into edx
316 : "memory"); // clobber.
Andreas Gampe51f76352014-05-21 08:28:48 -0700317 // TODO: Should we clobber the other registers? EBX gets clobbered by some of the stubs,
318 // but compilation fails when declaring that.
319#elif defined(__arm__)
320 __asm__ __volatile__(
321 "push {r1-r12, lr}\n\t" // Save state, 13*4B = 52B
322 ".cfi_adjust_cfa_offset 52\n\t"
323 "push {r9}\n\t"
324 ".cfi_adjust_cfa_offset 4\n\t"
325 "mov r9, %[referrer]\n\n"
326 "str r9, [sp, #-8]!\n\t" // Push referrer, +8B padding so 16B aligned
327 ".cfi_adjust_cfa_offset 8\n\t"
328 "ldr r9, [sp, #8]\n\t"
329
330 // Push everything on the stack, so we don't rely on the order. What a mess. :-(
331 "sub sp, sp, #24\n\t"
332 "str %[arg0], [sp]\n\t"
333 "str %[arg1], [sp, #4]\n\t"
334 "str %[arg2], [sp, #8]\n\t"
335 "str %[code], [sp, #12]\n\t"
336 "str %[self], [sp, #16]\n\t"
337 "str %[hidden], [sp, #20]\n\t"
338 "ldr r0, [sp]\n\t"
339 "ldr r1, [sp, #4]\n\t"
340 "ldr r2, [sp, #8]\n\t"
341 "ldr r3, [sp, #12]\n\t"
342 "ldr r9, [sp, #16]\n\t"
343 "ldr r12, [sp, #20]\n\t"
344 "add sp, sp, #24\n\t"
345
346 "blx r3\n\t" // Call the stub
347 "add sp, sp, #12\n\t" // Pop nullptr and padding
348 ".cfi_adjust_cfa_offset -12\n\t"
349 "pop {r1-r12, lr}\n\t" // Restore state
350 ".cfi_adjust_cfa_offset -52\n\t"
351 "mov %[result], r0\n\t" // Save the result
352 : [result] "=r" (result)
353 // Use the result from r0
Andreas Gampe2f6e3512014-06-07 01:32:33 -0700354 : [arg0] "r"(arg0), [arg1] "r"(arg1), [arg2] "r"(arg2), [code] "r"(code), [self] "r"(self),
355 [referrer] "r"(referrer), [hidden] "r"(hidden)
356 : "memory"); // clobber.
Andreas Gampe51f76352014-05-21 08:28:48 -0700357#elif defined(__aarch64__)
358 __asm__ __volatile__(
Andreas Gampef39b3782014-06-03 14:38:30 -0700359 // Spill x0-x7 which we say we don't clobber. May contain args.
Andreas Gampe51f76352014-05-21 08:28:48 -0700360 "sub sp, sp, #64\n\t"
361 ".cfi_adjust_cfa_offset 64\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700362 "stp x0, x1, [sp]\n\t"
363 "stp x2, x3, [sp, #16]\n\t"
364 "stp x4, x5, [sp, #32]\n\t"
365 "stp x6, x7, [sp, #48]\n\t"
Andreas Gampe51f76352014-05-21 08:28:48 -0700366
Andreas Gampef39b3782014-06-03 14:38:30 -0700367 "sub sp, sp, #16\n\t" // Reserve stack space, 16B aligned
368 ".cfi_adjust_cfa_offset 16\n\t"
369 "str %[referrer], [sp]\n\t" // referrer
Andreas Gampe51f76352014-05-21 08:28:48 -0700370
371 // Push everything on the stack, so we don't rely on the order. What a mess. :-(
372 "sub sp, sp, #48\n\t"
373 ".cfi_adjust_cfa_offset 48\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700374 // All things are "r" constraints, so direct str/stp should work.
375 "stp %[arg0], %[arg1], [sp]\n\t"
376 "stp %[arg2], %[code], [sp, #16]\n\t"
377 "stp %[self], %[hidden], [sp, #32]\n\t"
Andreas Gampe51f76352014-05-21 08:28:48 -0700378
379 // Now we definitely have x0-x3 free, use it to garble d8 - d15
380 "movk x0, #0xfad0\n\t"
381 "movk x0, #0xebad, lsl #16\n\t"
382 "movk x0, #0xfad0, lsl #32\n\t"
383 "movk x0, #0xebad, lsl #48\n\t"
384 "fmov d8, x0\n\t"
385 "add x0, x0, 1\n\t"
386 "fmov d9, x0\n\t"
387 "add x0, x0, 1\n\t"
388 "fmov d10, x0\n\t"
389 "add x0, x0, 1\n\t"
390 "fmov d11, x0\n\t"
391 "add x0, x0, 1\n\t"
392 "fmov d12, x0\n\t"
393 "add x0, x0, 1\n\t"
394 "fmov d13, x0\n\t"
395 "add x0, x0, 1\n\t"
396 "fmov d14, x0\n\t"
397 "add x0, x0, 1\n\t"
398 "fmov d15, x0\n\t"
399
Andreas Gampef39b3782014-06-03 14:38:30 -0700400 // Load call params into the right registers.
401 "ldp x0, x1, [sp]\n\t"
402 "ldp x2, x3, [sp, #16]\n\t"
Andreas Gampe1a7e2922014-05-21 15:37:53 -0700403 "ldp x18, x17, [sp, #32]\n\t"
Andreas Gampe51f76352014-05-21 08:28:48 -0700404 "add sp, sp, #48\n\t"
405 ".cfi_adjust_cfa_offset -48\n\t"
406
Andreas Gampe51f76352014-05-21 08:28:48 -0700407 "blr x3\n\t" // Call the stub
Andreas Gampef39b3782014-06-03 14:38:30 -0700408 "mov x8, x0\n\t" // Store result
409 "add sp, sp, #16\n\t" // Drop the quick "frame"
410 ".cfi_adjust_cfa_offset -16\n\t"
Andreas Gampe51f76352014-05-21 08:28:48 -0700411
412 // Test d8 - d15. We can use x1 and x2.
413 "movk x1, #0xfad0\n\t"
414 "movk x1, #0xebad, lsl #16\n\t"
415 "movk x1, #0xfad0, lsl #32\n\t"
416 "movk x1, #0xebad, lsl #48\n\t"
417 "fmov x2, d8\n\t"
418 "cmp x1, x2\n\t"
419 "b.ne 1f\n\t"
420 "add x1, x1, 1\n\t"
421
422 "fmov x2, d9\n\t"
423 "cmp x1, x2\n\t"
424 "b.ne 1f\n\t"
425 "add x1, x1, 1\n\t"
426
427 "fmov x2, d10\n\t"
428 "cmp x1, x2\n\t"
429 "b.ne 1f\n\t"
430 "add x1, x1, 1\n\t"
431
432 "fmov x2, d11\n\t"
433 "cmp x1, x2\n\t"
434 "b.ne 1f\n\t"
435 "add x1, x1, 1\n\t"
436
437 "fmov x2, d12\n\t"
438 "cmp x1, x2\n\t"
439 "b.ne 1f\n\t"
440 "add x1, x1, 1\n\t"
441
442 "fmov x2, d13\n\t"
443 "cmp x1, x2\n\t"
444 "b.ne 1f\n\t"
445 "add x1, x1, 1\n\t"
446
447 "fmov x2, d14\n\t"
448 "cmp x1, x2\n\t"
449 "b.ne 1f\n\t"
450 "add x1, x1, 1\n\t"
451
452 "fmov x2, d15\n\t"
453 "cmp x1, x2\n\t"
454 "b.ne 1f\n\t"
455
Andreas Gampef39b3782014-06-03 14:38:30 -0700456 "mov x9, #0\n\t" // Use x9 as flag, in clobber list
Andreas Gampe51f76352014-05-21 08:28:48 -0700457
458 // Finish up.
459 "2:\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700460 "ldp x0, x1, [sp]\n\t" // Restore stuff not named clobbered, may contain fpr_result
461 "ldp x2, x3, [sp, #16]\n\t"
462 "ldp x4, x5, [sp, #32]\n\t"
463 "ldp x6, x7, [sp, #48]\n\t"
464 "add sp, sp, #64\n\t" // Free stack space, now sp as on entry
Andreas Gampe51f76352014-05-21 08:28:48 -0700465 ".cfi_adjust_cfa_offset -64\n\t"
466
Andreas Gampef39b3782014-06-03 14:38:30 -0700467 "str x9, %[fpr_result]\n\t" // Store the FPR comparison result
468 "mov %[result], x8\n\t" // Store the call result
469
Andreas Gampe51f76352014-05-21 08:28:48 -0700470 "b 3f\n\t" // Goto end
471
472 // Failed fpr verification.
473 "1:\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700474 "mov x9, #1\n\t"
Andreas Gampe51f76352014-05-21 08:28:48 -0700475 "b 2b\n\t" // Goto finish-up
476
477 // End
478 "3:\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700479 : [result] "=r" (result)
480 // Use the result from r0
Andreas Gampe51f76352014-05-21 08:28:48 -0700481 : [arg0] "0"(arg0), [arg1] "r"(arg1), [arg2] "r"(arg2), [code] "r"(code), [self] "r"(self),
Andreas Gampef39b3782014-06-03 14:38:30 -0700482 [referrer] "r"(referrer), [hidden] "r"(hidden), [fpr_result] "m" (fpr_result)
483 : "x8", "x9", "x10", "x11", "x12", "x13", "x14", "x15", "x16", "x17", "x18", "x19", "x20",
484 "x21", "x22", "x23", "x24", "x25", "x26", "x27", "x28", "x30",
485 "d0", "d1", "d2", "d3", "d4", "d5", "d6", "d7",
486 "d8", "d9", "d10", "d11", "d12", "d13", "d14", "d15",
487 "d16", "d17", "d18", "d19", "d20", "d21", "d22", "d23",
Andreas Gampe2f6e3512014-06-07 01:32:33 -0700488 "d24", "d25", "d26", "d27", "d28", "d29", "d30", "d31",
489 "memory"); // clobber.
Ian Rogers6f3dbba2014-10-14 17:41:57 -0700490#elif defined(__x86_64__) && !defined(__APPLE__) && defined(__clang__)
Andreas Gampe51f76352014-05-21 08:28:48 -0700491 // Note: Uses the native convention
492 // TODO: Set the thread?
493 __asm__ __volatile__(
Andreas Gampe51f76352014-05-21 08:28:48 -0700494 "pushq %[referrer]\n\t" // Push referrer
495 "pushq (%%rsp)\n\t" // & 16B alignment padding
496 ".cfi_adjust_cfa_offset 16\n\t"
Andreas Gampe1a7e2922014-05-21 15:37:53 -0700497 "call *%%rbx\n\t" // Call the stub
Andreas Gampe51f76352014-05-21 08:28:48 -0700498 "addq $16, %%rsp\n\t" // Pop nullptr and padding
499 ".cfi_adjust_cfa_offset -16\n\t"
500 : "=a" (result)
501 // Use the result from rax
Andreas Gampe1a7e2922014-05-21 15:37:53 -0700502 : "D"(arg0), "S"(arg1), "d"(arg2), "b"(code), [referrer] "c"(referrer), [hidden] "a"(hidden)
Andreas Gampe51f76352014-05-21 08:28:48 -0700503 // This places arg0 into rdi, arg1 into rsi, arg2 into rdx, and code into rax
Andreas Gampe1a7e2922014-05-21 15:37:53 -0700504 : "rbp", "r8", "r9", "r10", "r11", "r12", "r13", "r14", "r15",
Andreas Gampe2f6e3512014-06-07 01:32:33 -0700505 "memory"); // clobber all
Andreas Gampe51f76352014-05-21 08:28:48 -0700506 // TODO: Should we clobber the other registers?
507#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -0800508 UNUSED(arg0, arg1, arg2, code, referrer, hidden);
Andreas Gampe51f76352014-05-21 08:28:48 -0700509 LOG(WARNING) << "Was asked to invoke for an architecture I do not understand.";
510 result = 0;
511#endif
512 // Pop transition.
513 self->PopManagedStackFragment(fragment);
514
515 fp_result = fpr_result;
516 EXPECT_EQ(0U, fp_result);
517
518 return result;
519 }
520
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700521 // Method with 32b arg0, 64b arg1
522 size_t Invoke3UWithReferrer(size_t arg0, uint64_t arg1, uintptr_t code, Thread* self,
523 mirror::ArtMethod* referrer) {
Ian Rogersc3ccc102014-06-25 11:52:14 -0700524#if (defined(__x86_64__) && !defined(__APPLE__)) || defined(__aarch64__)
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700525 // Just pass through.
526 return Invoke3WithReferrer(arg0, arg1, 0U, code, self, referrer);
527#else
528 // Need to split up arguments.
529 uint32_t lower = static_cast<uint32_t>(arg1 & 0xFFFFFFFF);
530 uint32_t upper = static_cast<uint32_t>((arg1 >> 32) & 0xFFFFFFFF);
531
532 return Invoke3WithReferrer(arg0, lower, upper, code, self, referrer);
533#endif
534 }
535
Andreas Gampe29b38412014-08-13 00:15:43 -0700536 static uintptr_t GetEntrypoint(Thread* self, QuickEntrypointEnum entrypoint) {
537 int32_t offset;
538#ifdef __LP64__
539 offset = GetThreadOffset<8>(entrypoint).Int32Value();
540#else
541 offset = GetThreadOffset<4>(entrypoint).Int32Value();
542#endif
543 return *reinterpret_cast<uintptr_t*>(reinterpret_cast<uint8_t*>(self) + offset);
544 }
545
Andreas Gampe6cf80102014-05-19 11:32:41 -0700546 protected:
547 size_t fp_result;
Andreas Gampe525cde22014-04-22 15:44:50 -0700548};
549
550
Andreas Gampe525cde22014-04-22 15:44:50 -0700551TEST_F(StubTest, Memcpy) {
Ian Rogersc3ccc102014-06-25 11:52:14 -0700552#if defined(__i386__) || (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe525cde22014-04-22 15:44:50 -0700553 Thread* self = Thread::Current();
554
555 uint32_t orig[20];
556 uint32_t trg[20];
557 for (size_t i = 0; i < 20; ++i) {
558 orig[i] = i;
559 trg[i] = 0;
560 }
561
562 Invoke3(reinterpret_cast<size_t>(&trg[4]), reinterpret_cast<size_t>(&orig[4]),
Andreas Gampe29b38412014-08-13 00:15:43 -0700563 10 * sizeof(uint32_t), StubTest::GetEntrypoint(self, kQuickMemcpy), self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700564
565 EXPECT_EQ(orig[0], trg[0]);
566
567 for (size_t i = 1; i < 4; ++i) {
568 EXPECT_NE(orig[i], trg[i]);
569 }
570
571 for (size_t i = 4; i < 14; ++i) {
572 EXPECT_EQ(orig[i], trg[i]);
573 }
574
575 for (size_t i = 14; i < 20; ++i) {
576 EXPECT_NE(orig[i], trg[i]);
577 }
578
579 // TODO: Test overlapping?
580
581#else
582 LOG(INFO) << "Skipping memcpy as I don't know how to do that on " << kRuntimeISA;
583 // Force-print to std::cout so it's also outside the logcat.
584 std::cout << "Skipping memcpy as I don't know how to do that on " << kRuntimeISA << std::endl;
585#endif
586}
587
Andreas Gampe525cde22014-04-22 15:44:50 -0700588TEST_F(StubTest, LockObject) {
Ian Rogersc3ccc102014-06-25 11:52:14 -0700589#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -0700590 static constexpr size_t kThinLockLoops = 100;
591
Andreas Gampe525cde22014-04-22 15:44:50 -0700592 Thread* self = Thread::Current();
Andreas Gampe29b38412014-08-13 00:15:43 -0700593
594 const uintptr_t art_quick_lock_object = StubTest::GetEntrypoint(self, kQuickLockObject);
595
Andreas Gampe525cde22014-04-22 15:44:50 -0700596 // Create an object
597 ScopedObjectAccess soa(self);
598 // garbage is created during ClassLinker::Init
599
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700600 StackHandleScope<2> hs(soa.Self());
601 Handle<mirror::String> obj(
602 hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), "hello, world!")));
Andreas Gampe525cde22014-04-22 15:44:50 -0700603 LockWord lock = obj->GetLockWord(false);
604 LockWord::LockState old_state = lock.GetState();
605 EXPECT_EQ(LockWord::LockState::kUnlocked, old_state);
606
Andreas Gampe29b38412014-08-13 00:15:43 -0700607 Invoke3(reinterpret_cast<size_t>(obj.Get()), 0U, 0U, art_quick_lock_object, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700608
609 LockWord lock_after = obj->GetLockWord(false);
610 LockWord::LockState new_state = lock_after.GetState();
611 EXPECT_EQ(LockWord::LockState::kThinLocked, new_state);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700612 EXPECT_EQ(lock_after.ThinLockCount(), 0U); // Thin lock starts count at zero
613
614 for (size_t i = 1; i < kThinLockLoops; ++i) {
Andreas Gampe29b38412014-08-13 00:15:43 -0700615 Invoke3(reinterpret_cast<size_t>(obj.Get()), 0U, 0U, art_quick_lock_object, self);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700616
617 // Check we're at lock count i
618
619 LockWord l_inc = obj->GetLockWord(false);
620 LockWord::LockState l_inc_state = l_inc.GetState();
621 EXPECT_EQ(LockWord::LockState::kThinLocked, l_inc_state);
622 EXPECT_EQ(l_inc.ThinLockCount(), i);
623 }
624
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700625 // Force a fat lock by running identity hashcode to fill up lock word.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700626 Handle<mirror::String> obj2(hs.NewHandle(
627 mirror::String::AllocFromModifiedUtf8(soa.Self(), "hello, world!")));
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700628
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700629 obj2->IdentityHashCode();
630
Andreas Gampe29b38412014-08-13 00:15:43 -0700631 Invoke3(reinterpret_cast<size_t>(obj2.Get()), 0U, 0U, art_quick_lock_object, self);
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700632
633 LockWord lock_after2 = obj2->GetLockWord(false);
634 LockWord::LockState new_state2 = lock_after2.GetState();
635 EXPECT_EQ(LockWord::LockState::kFatLocked, new_state2);
636 EXPECT_NE(lock_after2.FatLockMonitor(), static_cast<Monitor*>(nullptr));
637
638 // Test done.
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700639#else
640 LOG(INFO) << "Skipping lock_object as I don't know how to do that on " << kRuntimeISA;
641 // Force-print to std::cout so it's also outside the logcat.
642 std::cout << "Skipping lock_object as I don't know how to do that on " << kRuntimeISA << std::endl;
643#endif
644}
645
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700646
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700647class RandGen {
648 public:
649 explicit RandGen(uint32_t seed) : val_(seed) {}
650
651 uint32_t next() {
652 val_ = val_ * 48271 % 2147483647 + 13;
653 return val_;
654 }
655
656 uint32_t val_;
657};
658
659
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700660// NO_THREAD_SAFETY_ANALYSIS as we do not want to grab exclusive mutator lock for MonitorInfo.
661static void TestUnlockObject(StubTest* test) NO_THREAD_SAFETY_ANALYSIS {
Ian Rogersc3ccc102014-06-25 11:52:14 -0700662#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -0700663 static constexpr size_t kThinLockLoops = 100;
664
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700665 Thread* self = Thread::Current();
Andreas Gampe29b38412014-08-13 00:15:43 -0700666
667 const uintptr_t art_quick_lock_object = StubTest::GetEntrypoint(self, kQuickLockObject);
668 const uintptr_t art_quick_unlock_object = StubTest::GetEntrypoint(self, kQuickUnlockObject);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700669 // Create an object
670 ScopedObjectAccess soa(self);
671 // garbage is created during ClassLinker::Init
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700672 static constexpr size_t kNumberOfLocks = 10; // Number of objects = lock
673 StackHandleScope<kNumberOfLocks + 1> hs(self);
674 Handle<mirror::String> obj(
675 hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), "hello, world!")));
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700676 LockWord lock = obj->GetLockWord(false);
677 LockWord::LockState old_state = lock.GetState();
678 EXPECT_EQ(LockWord::LockState::kUnlocked, old_state);
679
Andreas Gampe29b38412014-08-13 00:15:43 -0700680 test->Invoke3(reinterpret_cast<size_t>(obj.Get()), 0U, 0U, art_quick_unlock_object, self);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700681 // This should be an illegal monitor state.
682 EXPECT_TRUE(self->IsExceptionPending());
683 self->ClearException();
684
685 LockWord lock_after = obj->GetLockWord(false);
686 LockWord::LockState new_state = lock_after.GetState();
687 EXPECT_EQ(LockWord::LockState::kUnlocked, new_state);
Andreas Gampe525cde22014-04-22 15:44:50 -0700688
Andreas Gampe29b38412014-08-13 00:15:43 -0700689 test->Invoke3(reinterpret_cast<size_t>(obj.Get()), 0U, 0U, art_quick_lock_object, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700690
691 LockWord lock_after2 = obj->GetLockWord(false);
692 LockWord::LockState new_state2 = lock_after2.GetState();
693 EXPECT_EQ(LockWord::LockState::kThinLocked, new_state2);
694
Andreas Gampe29b38412014-08-13 00:15:43 -0700695 test->Invoke3(reinterpret_cast<size_t>(obj.Get()), 0U, 0U, art_quick_unlock_object, self);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700696
697 LockWord lock_after3 = obj->GetLockWord(false);
698 LockWord::LockState new_state3 = lock_after3.GetState();
699 EXPECT_EQ(LockWord::LockState::kUnlocked, new_state3);
700
701 // Stress test:
702 // Keep a number of objects and their locks in flight. Randomly lock or unlock one of them in
703 // each step.
704
705 RandGen r(0x1234);
706
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700707 constexpr size_t kIterations = 10000; // Number of iterations
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700708 constexpr size_t kMoveToFat = 1000; // Chance of 1:kMoveFat to make a lock fat.
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700709
710 size_t counts[kNumberOfLocks];
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700711 bool fat[kNumberOfLocks]; // Whether a lock should be thin or fat.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700712 Handle<mirror::String> objects[kNumberOfLocks];
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700713
714 // Initialize = allocate.
715 for (size_t i = 0; i < kNumberOfLocks; ++i) {
716 counts[i] = 0;
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700717 fat[i] = false;
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700718 objects[i] = hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), ""));
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700719 }
720
721 for (size_t i = 0; i < kIterations; ++i) {
722 // Select which lock to update.
723 size_t index = r.next() % kNumberOfLocks;
724
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700725 // Make lock fat?
726 if (!fat[index] && (r.next() % kMoveToFat == 0)) {
727 fat[index] = true;
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700728 objects[index]->IdentityHashCode();
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700729
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700730 LockWord lock_iter = objects[index]->GetLockWord(false);
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700731 LockWord::LockState iter_state = lock_iter.GetState();
732 if (counts[index] == 0) {
733 EXPECT_EQ(LockWord::LockState::kHashCode, iter_state);
734 } else {
735 EXPECT_EQ(LockWord::LockState::kFatLocked, iter_state);
736 }
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700737 } else {
Andreas Gampe277ccbd2014-11-03 21:36:10 -0800738 bool take_lock; // Whether to lock or unlock in this step.
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700739 if (counts[index] == 0) {
Andreas Gampe277ccbd2014-11-03 21:36:10 -0800740 take_lock = true;
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700741 } else if (counts[index] == kThinLockLoops) {
Andreas Gampe277ccbd2014-11-03 21:36:10 -0800742 take_lock = false;
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700743 } else {
744 // Randomly.
Andreas Gampe277ccbd2014-11-03 21:36:10 -0800745 take_lock = r.next() % 2 == 0;
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700746 }
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700747
Andreas Gampe277ccbd2014-11-03 21:36:10 -0800748 if (take_lock) {
Andreas Gampe29b38412014-08-13 00:15:43 -0700749 test->Invoke3(reinterpret_cast<size_t>(objects[index].Get()), 0U, 0U, art_quick_lock_object,
750 self);
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700751 counts[index]++;
752 } else {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700753 test->Invoke3(reinterpret_cast<size_t>(objects[index].Get()), 0U, 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -0700754 art_quick_unlock_object, self);
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700755 counts[index]--;
756 }
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700757
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700758 EXPECT_FALSE(self->IsExceptionPending());
759
760 // Check the new state.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700761 LockWord lock_iter = objects[index]->GetLockWord(true);
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700762 LockWord::LockState iter_state = lock_iter.GetState();
763 if (fat[index]) {
764 // Abuse MonitorInfo.
765 EXPECT_EQ(LockWord::LockState::kFatLocked, iter_state) << index;
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700766 MonitorInfo info(objects[index].Get());
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700767 EXPECT_EQ(counts[index], info.entry_count_) << index;
768 } else {
769 if (counts[index] > 0) {
770 EXPECT_EQ(LockWord::LockState::kThinLocked, iter_state);
771 EXPECT_EQ(counts[index] - 1, lock_iter.ThinLockCount());
772 } else {
773 EXPECT_EQ(LockWord::LockState::kUnlocked, iter_state);
774 }
775 }
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700776 }
777 }
778
779 // Unlock the remaining count times and then check it's unlocked. Then deallocate.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700780 // Go reverse order to correctly handle Handles.
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700781 for (size_t i = 0; i < kNumberOfLocks; ++i) {
782 size_t index = kNumberOfLocks - 1 - i;
783 size_t count = counts[index];
784 while (count > 0) {
Andreas Gampe29b38412014-08-13 00:15:43 -0700785 test->Invoke3(reinterpret_cast<size_t>(objects[index].Get()), 0U, 0U, art_quick_unlock_object,
786 self);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700787 count--;
788 }
789
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700790 LockWord lock_after4 = objects[index]->GetLockWord(false);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700791 LockWord::LockState new_state4 = lock_after4.GetState();
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700792 EXPECT_TRUE(LockWord::LockState::kUnlocked == new_state4
793 || LockWord::LockState::kFatLocked == new_state4);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700794 }
795
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700796 // Test done.
Andreas Gampe525cde22014-04-22 15:44:50 -0700797#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -0800798 UNUSED(test);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700799 LOG(INFO) << "Skipping unlock_object as I don't know how to do that on " << kRuntimeISA;
Andreas Gampe525cde22014-04-22 15:44:50 -0700800 // Force-print to std::cout so it's also outside the logcat.
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700801 std::cout << "Skipping unlock_object as I don't know how to do that on " << kRuntimeISA << std::endl;
Andreas Gampe525cde22014-04-22 15:44:50 -0700802#endif
803}
804
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700805TEST_F(StubTest, UnlockObject) {
Andreas Gampe369810a2015-01-14 19:53:31 -0800806 // This will lead to monitor error messages in the log.
807 ScopedLogSeverity sls(LogSeverity::FATAL);
808
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700809 TestUnlockObject(this);
810}
Andreas Gampe525cde22014-04-22 15:44:50 -0700811
Ian Rogersc3ccc102014-06-25 11:52:14 -0700812#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe525cde22014-04-22 15:44:50 -0700813extern "C" void art_quick_check_cast(void);
814#endif
815
816TEST_F(StubTest, CheckCast) {
Ian Rogersc3ccc102014-06-25 11:52:14 -0700817#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe525cde22014-04-22 15:44:50 -0700818 Thread* self = Thread::Current();
Andreas Gampe29b38412014-08-13 00:15:43 -0700819
820 const uintptr_t art_quick_check_cast = StubTest::GetEntrypoint(self, kQuickCheckCast);
821
Andreas Gampe525cde22014-04-22 15:44:50 -0700822 // Find some classes.
823 ScopedObjectAccess soa(self);
824 // garbage is created during ClassLinker::Init
825
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700826 StackHandleScope<2> hs(soa.Self());
827 Handle<mirror::Class> c(
828 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "[Ljava/lang/Object;")));
829 Handle<mirror::Class> c2(
830 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "[Ljava/lang/String;")));
Andreas Gampe525cde22014-04-22 15:44:50 -0700831
832 EXPECT_FALSE(self->IsExceptionPending());
833
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700834 Invoke3(reinterpret_cast<size_t>(c.Get()), reinterpret_cast<size_t>(c.Get()), 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -0700835 art_quick_check_cast, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700836
837 EXPECT_FALSE(self->IsExceptionPending());
838
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700839 Invoke3(reinterpret_cast<size_t>(c2.Get()), reinterpret_cast<size_t>(c2.Get()), 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -0700840 art_quick_check_cast, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700841
842 EXPECT_FALSE(self->IsExceptionPending());
843
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700844 Invoke3(reinterpret_cast<size_t>(c.Get()), reinterpret_cast<size_t>(c2.Get()), 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -0700845 art_quick_check_cast, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700846
847 EXPECT_FALSE(self->IsExceptionPending());
848
849 // TODO: Make the following work. But that would require correct managed frames.
850
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700851 Invoke3(reinterpret_cast<size_t>(c2.Get()), reinterpret_cast<size_t>(c.Get()), 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -0700852 art_quick_check_cast, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700853
854 EXPECT_TRUE(self->IsExceptionPending());
855 self->ClearException();
856
857#else
858 LOG(INFO) << "Skipping check_cast as I don't know how to do that on " << kRuntimeISA;
859 // Force-print to std::cout so it's also outside the logcat.
860 std::cout << "Skipping check_cast as I don't know how to do that on " << kRuntimeISA << std::endl;
861#endif
862}
863
864
Andreas Gampe525cde22014-04-22 15:44:50 -0700865TEST_F(StubTest, APutObj) {
Hiroshi Yamauchid6881ae2014-04-28 17:21:48 -0700866 TEST_DISABLED_FOR_HEAP_REFERENCE_POISONING();
867
Ian Rogersc3ccc102014-06-25 11:52:14 -0700868#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe525cde22014-04-22 15:44:50 -0700869 Thread* self = Thread::Current();
Andreas Gampe29b38412014-08-13 00:15:43 -0700870
871 // Do not check non-checked ones, we'd need handlers and stuff...
872 const uintptr_t art_quick_aput_obj_with_null_and_bound_check =
873 StubTest::GetEntrypoint(self, kQuickAputObjectWithNullAndBoundCheck);
874
Andreas Gampe525cde22014-04-22 15:44:50 -0700875 // Create an object
876 ScopedObjectAccess soa(self);
877 // garbage is created during ClassLinker::Init
878
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700879 StackHandleScope<5> hs(soa.Self());
880 Handle<mirror::Class> c(
881 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "Ljava/lang/Object;")));
882 Handle<mirror::Class> ca(
883 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "[Ljava/lang/String;")));
Andreas Gampe525cde22014-04-22 15:44:50 -0700884
885 // Build a string array of size 1
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700886 Handle<mirror::ObjectArray<mirror::Object>> array(
887 hs.NewHandle(mirror::ObjectArray<mirror::Object>::Alloc(soa.Self(), ca.Get(), 10)));
Andreas Gampe525cde22014-04-22 15:44:50 -0700888
889 // Build a string -> should be assignable
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700890 Handle<mirror::String> str_obj(
891 hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), "hello, world!")));
Andreas Gampe525cde22014-04-22 15:44:50 -0700892
893 // Build a generic object -> should fail assigning
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700894 Handle<mirror::Object> obj_obj(hs.NewHandle(c->AllocObject(soa.Self())));
Andreas Gampe525cde22014-04-22 15:44:50 -0700895
896 // Play with it...
897
898 // 1) Success cases
Andreas Gampef4e910b2014-04-29 16:55:52 -0700899 // 1.1) Assign str_obj to array[0..3]
Andreas Gampe525cde22014-04-22 15:44:50 -0700900
901 EXPECT_FALSE(self->IsExceptionPending());
902
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700903 Invoke3(reinterpret_cast<size_t>(array.Get()), 0U, reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampe29b38412014-08-13 00:15:43 -0700904 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700905
906 EXPECT_FALSE(self->IsExceptionPending());
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700907 EXPECT_EQ(str_obj.Get(), array->Get(0));
Andreas Gampe525cde22014-04-22 15:44:50 -0700908
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700909 Invoke3(reinterpret_cast<size_t>(array.Get()), 1U, reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampe29b38412014-08-13 00:15:43 -0700910 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampef4e910b2014-04-29 16:55:52 -0700911
912 EXPECT_FALSE(self->IsExceptionPending());
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700913 EXPECT_EQ(str_obj.Get(), array->Get(1));
Andreas Gampef4e910b2014-04-29 16:55:52 -0700914
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700915 Invoke3(reinterpret_cast<size_t>(array.Get()), 2U, reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampe29b38412014-08-13 00:15:43 -0700916 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampef4e910b2014-04-29 16:55:52 -0700917
918 EXPECT_FALSE(self->IsExceptionPending());
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700919 EXPECT_EQ(str_obj.Get(), array->Get(2));
Andreas Gampef4e910b2014-04-29 16:55:52 -0700920
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700921 Invoke3(reinterpret_cast<size_t>(array.Get()), 3U, reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampe29b38412014-08-13 00:15:43 -0700922 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampef4e910b2014-04-29 16:55:52 -0700923
924 EXPECT_FALSE(self->IsExceptionPending());
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700925 EXPECT_EQ(str_obj.Get(), array->Get(3));
Andreas Gampef4e910b2014-04-29 16:55:52 -0700926
927 // 1.2) Assign null to array[0..3]
Andreas Gampe525cde22014-04-22 15:44:50 -0700928
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700929 Invoke3(reinterpret_cast<size_t>(array.Get()), 0U, reinterpret_cast<size_t>(nullptr),
Andreas Gampe29b38412014-08-13 00:15:43 -0700930 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700931
932 EXPECT_FALSE(self->IsExceptionPending());
Andreas Gampef4e910b2014-04-29 16:55:52 -0700933 EXPECT_EQ(nullptr, array->Get(0));
934
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700935 Invoke3(reinterpret_cast<size_t>(array.Get()), 1U, reinterpret_cast<size_t>(nullptr),
Andreas Gampe29b38412014-08-13 00:15:43 -0700936 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampef4e910b2014-04-29 16:55:52 -0700937
938 EXPECT_FALSE(self->IsExceptionPending());
939 EXPECT_EQ(nullptr, array->Get(1));
940
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700941 Invoke3(reinterpret_cast<size_t>(array.Get()), 2U, reinterpret_cast<size_t>(nullptr),
Andreas Gampe29b38412014-08-13 00:15:43 -0700942 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampef4e910b2014-04-29 16:55:52 -0700943
944 EXPECT_FALSE(self->IsExceptionPending());
945 EXPECT_EQ(nullptr, array->Get(2));
946
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700947 Invoke3(reinterpret_cast<size_t>(array.Get()), 3U, reinterpret_cast<size_t>(nullptr),
Andreas Gampe29b38412014-08-13 00:15:43 -0700948 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampef4e910b2014-04-29 16:55:52 -0700949
950 EXPECT_FALSE(self->IsExceptionPending());
951 EXPECT_EQ(nullptr, array->Get(3));
Andreas Gampe525cde22014-04-22 15:44:50 -0700952
953 // TODO: Check _which_ exception is thrown. Then make 3) check that it's the right check order.
954
955 // 2) Failure cases (str into str[])
956 // 2.1) Array = null
957 // TODO: Throwing NPE needs actual DEX code
958
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700959// Invoke3(reinterpret_cast<size_t>(nullptr), 0U, reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampe525cde22014-04-22 15:44:50 -0700960// reinterpret_cast<uintptr_t>(&art_quick_aput_obj_with_null_and_bound_check), self);
961//
962// EXPECT_TRUE(self->IsExceptionPending());
963// self->ClearException();
964
965 // 2.2) Index < 0
966
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700967 Invoke3(reinterpret_cast<size_t>(array.Get()), static_cast<size_t>(-1),
968 reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampe29b38412014-08-13 00:15:43 -0700969 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700970
971 EXPECT_TRUE(self->IsExceptionPending());
972 self->ClearException();
973
974 // 2.3) Index > 0
975
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700976 Invoke3(reinterpret_cast<size_t>(array.Get()), 10U, reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampe29b38412014-08-13 00:15:43 -0700977 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700978
979 EXPECT_TRUE(self->IsExceptionPending());
980 self->ClearException();
981
982 // 3) Failure cases (obj into str[])
983
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700984 Invoke3(reinterpret_cast<size_t>(array.Get()), 0U, reinterpret_cast<size_t>(obj_obj.Get()),
Andreas Gampe29b38412014-08-13 00:15:43 -0700985 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700986
987 EXPECT_TRUE(self->IsExceptionPending());
988 self->ClearException();
989
990 // Tests done.
991#else
992 LOG(INFO) << "Skipping aput_obj as I don't know how to do that on " << kRuntimeISA;
993 // Force-print to std::cout so it's also outside the logcat.
994 std::cout << "Skipping aput_obj as I don't know how to do that on " << kRuntimeISA << std::endl;
995#endif
996}
997
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700998TEST_F(StubTest, AllocObject) {
999 TEST_DISABLED_FOR_HEAP_REFERENCE_POISONING();
1000
Ian Rogersc3ccc102014-06-25 11:52:14 -07001001#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe369810a2015-01-14 19:53:31 -08001002 // This will lead to OOM error messages in the log.
1003 ScopedLogSeverity sls(LogSeverity::FATAL);
1004
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001005 // TODO: Check the "Unresolved" allocation stubs
1006
1007 Thread* self = Thread::Current();
1008 // Create an object
1009 ScopedObjectAccess soa(self);
1010 // garbage is created during ClassLinker::Init
1011
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001012 StackHandleScope<2> hs(soa.Self());
1013 Handle<mirror::Class> c(
1014 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "Ljava/lang/Object;")));
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001015
1016 // Play with it...
1017
1018 EXPECT_FALSE(self->IsExceptionPending());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001019 {
1020 // Use an arbitrary method from c to use as referrer
1021 size_t result = Invoke3(static_cast<size_t>(c->GetDexTypeIndex()), // type_idx
1022 reinterpret_cast<size_t>(c->GetVirtualMethod(0)), // arbitrary
1023 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001024 StubTest::GetEntrypoint(self, kQuickAllocObject),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001025 self);
1026
1027 EXPECT_FALSE(self->IsExceptionPending());
1028 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
1029 mirror::Object* obj = reinterpret_cast<mirror::Object*>(result);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001030 EXPECT_EQ(c.Get(), obj->GetClass());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001031 VerifyObject(obj);
1032 }
1033
1034 {
1035 // We can use nullptr in the second argument as we do not need a method here (not used in
1036 // resolved/initialized cases)
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001037 size_t result = Invoke3(reinterpret_cast<size_t>(c.Get()), reinterpret_cast<size_t>(nullptr), 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001038 StubTest::GetEntrypoint(self, kQuickAllocObjectResolved),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001039 self);
1040
1041 EXPECT_FALSE(self->IsExceptionPending());
1042 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
1043 mirror::Object* obj = reinterpret_cast<mirror::Object*>(result);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001044 EXPECT_EQ(c.Get(), obj->GetClass());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001045 VerifyObject(obj);
1046 }
1047
1048 {
1049 // We can use nullptr in the second argument as we do not need a method here (not used in
1050 // resolved/initialized cases)
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001051 size_t result = Invoke3(reinterpret_cast<size_t>(c.Get()), reinterpret_cast<size_t>(nullptr), 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001052 StubTest::GetEntrypoint(self, kQuickAllocObjectInitialized),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001053 self);
1054
1055 EXPECT_FALSE(self->IsExceptionPending());
1056 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
1057 mirror::Object* obj = reinterpret_cast<mirror::Object*>(result);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001058 EXPECT_EQ(c.Get(), obj->GetClass());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001059 VerifyObject(obj);
1060 }
1061
1062 // Failure tests.
1063
1064 // Out-of-memory.
1065 {
1066 Runtime::Current()->GetHeap()->SetIdealFootprint(1 * GB);
1067
1068 // Array helps to fill memory faster.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001069 Handle<mirror::Class> ca(
1070 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "[Ljava/lang/Object;")));
1071
1072 // Use arbitrary large amount for now.
1073 static const size_t kMaxHandles = 1000000;
Ian Rogers700a4022014-05-19 16:49:03 -07001074 std::unique_ptr<StackHandleScope<kMaxHandles>> hsp(new StackHandleScope<kMaxHandles>(self));
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001075
1076 std::vector<Handle<mirror::Object>> handles;
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001077 // Start allocating with 128K
1078 size_t length = 128 * KB / 4;
1079 while (length > 10) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001080 Handle<mirror::Object> h(hsp->NewHandle<mirror::Object>(
1081 mirror::ObjectArray<mirror::Object>::Alloc(soa.Self(), ca.Get(), length / 4)));
1082 if (self->IsExceptionPending() || h.Get() == nullptr) {
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001083 self->ClearException();
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001084
1085 // Try a smaller length
1086 length = length / 8;
1087 // Use at most half the reported free space.
1088 size_t mem = Runtime::Current()->GetHeap()->GetFreeMemory();
1089 if (length * 8 > mem) {
1090 length = mem / 8;
1091 }
1092 } else {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001093 handles.push_back(h);
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001094 }
1095 }
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001096 LOG(INFO) << "Used " << handles.size() << " arrays to fill space.";
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001097
1098 // Allocate simple objects till it fails.
1099 while (!self->IsExceptionPending()) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001100 Handle<mirror::Object> h = hsp->NewHandle(c->AllocObject(soa.Self()));
1101 if (!self->IsExceptionPending() && h.Get() != nullptr) {
1102 handles.push_back(h);
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001103 }
1104 }
1105 self->ClearException();
1106
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001107 size_t result = Invoke3(reinterpret_cast<size_t>(c.Get()), reinterpret_cast<size_t>(nullptr), 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001108 StubTest::GetEntrypoint(self, kQuickAllocObjectInitialized),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001109 self);
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001110 EXPECT_TRUE(self->IsExceptionPending());
1111 self->ClearException();
1112 EXPECT_EQ(reinterpret_cast<size_t>(nullptr), result);
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001113 }
1114
1115 // Tests done.
1116#else
1117 LOG(INFO) << "Skipping alloc_object as I don't know how to do that on " << kRuntimeISA;
1118 // Force-print to std::cout so it's also outside the logcat.
1119 std::cout << "Skipping alloc_object as I don't know how to do that on " << kRuntimeISA << std::endl;
1120#endif
1121}
1122
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001123TEST_F(StubTest, AllocObjectArray) {
1124 TEST_DISABLED_FOR_HEAP_REFERENCE_POISONING();
1125
Ian Rogersc3ccc102014-06-25 11:52:14 -07001126#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001127 // TODO: Check the "Unresolved" allocation stubs
1128
Andreas Gampe369810a2015-01-14 19:53:31 -08001129 // This will lead to OOM error messages in the log.
1130 ScopedLogSeverity sls(LogSeverity::FATAL);
1131
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001132 Thread* self = Thread::Current();
1133 // Create an object
1134 ScopedObjectAccess soa(self);
1135 // garbage is created during ClassLinker::Init
1136
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001137 StackHandleScope<2> hs(self);
1138 Handle<mirror::Class> c(
1139 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "[Ljava/lang/Object;")));
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001140
1141 // Needed to have a linked method.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001142 Handle<mirror::Class> c_obj(
1143 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "Ljava/lang/Object;")));
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001144
1145 // Play with it...
1146
1147 EXPECT_FALSE(self->IsExceptionPending());
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001148
1149 // For some reason this does not work, as the type_idx is artificial and outside what the
1150 // resolved types of c_obj allow...
1151
Ian Rogerscf7f1912014-10-22 22:06:39 -07001152 if ((false)) {
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001153 // Use an arbitrary method from c to use as referrer
1154 size_t result = Invoke3(static_cast<size_t>(c->GetDexTypeIndex()), // type_idx
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001155 10U,
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08001156 reinterpret_cast<size_t>(c_obj->GetVirtualMethod(0)), // arbitrary
Andreas Gampe29b38412014-08-13 00:15:43 -07001157 StubTest::GetEntrypoint(self, kQuickAllocArray),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001158 self);
1159
1160 EXPECT_FALSE(self->IsExceptionPending());
1161 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
1162 mirror::Array* obj = reinterpret_cast<mirror::Array*>(result);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001163 EXPECT_EQ(c.Get(), obj->GetClass());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001164 VerifyObject(obj);
1165 EXPECT_EQ(obj->GetLength(), 10);
1166 }
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001167
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001168 {
1169 // We can use nullptr in the second argument as we do not need a method here (not used in
1170 // resolved/initialized cases)
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08001171 size_t result = Invoke3(reinterpret_cast<size_t>(c.Get()), 10U,
1172 reinterpret_cast<size_t>(nullptr),
Andreas Gampe29b38412014-08-13 00:15:43 -07001173 StubTest::GetEntrypoint(self, kQuickAllocArrayResolved),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001174 self);
Nicolas Geoffray14691c52015-03-05 10:40:17 +00001175 EXPECT_FALSE(self->IsExceptionPending()) << PrettyTypeOf(self->GetException());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001176 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
1177 mirror::Object* obj = reinterpret_cast<mirror::Object*>(result);
1178 EXPECT_TRUE(obj->IsArrayInstance());
1179 EXPECT_TRUE(obj->IsObjectArray());
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001180 EXPECT_EQ(c.Get(), obj->GetClass());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001181 VerifyObject(obj);
1182 mirror::Array* array = reinterpret_cast<mirror::Array*>(result);
1183 EXPECT_EQ(array->GetLength(), 10);
1184 }
1185
1186 // Failure tests.
1187
1188 // Out-of-memory.
1189 {
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08001190 size_t result = Invoke3(reinterpret_cast<size_t>(c.Get()),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001191 GB, // that should fail...
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08001192 reinterpret_cast<size_t>(nullptr),
Andreas Gampe29b38412014-08-13 00:15:43 -07001193 StubTest::GetEntrypoint(self, kQuickAllocArrayResolved),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001194 self);
1195
1196 EXPECT_TRUE(self->IsExceptionPending());
1197 self->ClearException();
1198 EXPECT_EQ(reinterpret_cast<size_t>(nullptr), result);
1199 }
1200
1201 // Tests done.
1202#else
1203 LOG(INFO) << "Skipping alloc_array as I don't know how to do that on " << kRuntimeISA;
1204 // Force-print to std::cout so it's also outside the logcat.
1205 std::cout << "Skipping alloc_array as I don't know how to do that on " << kRuntimeISA << std::endl;
1206#endif
1207}
1208
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001209
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001210TEST_F(StubTest, StringCompareTo) {
1211 TEST_DISABLED_FOR_HEAP_REFERENCE_POISONING();
1212
Ian Rogersc3ccc102014-06-25 11:52:14 -07001213#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001214 // TODO: Check the "Unresolved" allocation stubs
1215
1216 Thread* self = Thread::Current();
Andreas Gampe29b38412014-08-13 00:15:43 -07001217
1218 const uintptr_t art_quick_string_compareto = StubTest::GetEntrypoint(self, kQuickStringCompareTo);
1219
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001220 ScopedObjectAccess soa(self);
1221 // garbage is created during ClassLinker::Init
1222
1223 // Create some strings
1224 // Use array so we can index into it and use a matrix for expected results
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -07001225 // Setup: The first half is standard. The second half uses a non-zero offset.
1226 // TODO: Shared backing arrays.
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001227 const char* c[] = { "", "", "a", "aa", "ab",
Serban Constantinescu86797a72014-06-19 16:17:56 +01001228 "aacaacaacaacaacaac", // This one's under the default limit to go to __memcmp16.
1229 "aacaacaacaacaacaacaacaacaacaacaacaac", // This one's over.
1230 "aacaacaacaacaacaacaacaacaacaacaacaaca" }; // As is this one. We need a separate one to
1231 // defeat object-equal optimizations.
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001232 static constexpr size_t kBaseStringCount = arraysize(c);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001233 static constexpr size_t kStringCount = 2 * kBaseStringCount;
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001234
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001235 StackHandleScope<kStringCount> hs(self);
1236 Handle<mirror::String> s[kStringCount];
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001237
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001238 for (size_t i = 0; i < kBaseStringCount; ++i) {
1239 s[i] = hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), c[i]));
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001240 }
1241
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -07001242 RandGen r(0x1234);
1243
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001244 for (size_t i = kBaseStringCount; i < kStringCount; ++i) {
1245 s[i] = hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), c[i - kBaseStringCount]));
1246 int32_t length = s[i]->GetLength();
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -07001247 if (length > 1) {
1248 // Set a random offset and length.
1249 int32_t new_offset = 1 + (r.next() % (length - 1));
1250 int32_t rest = length - new_offset - 1;
1251 int32_t new_length = 1 + (rest > 0 ? r.next() % rest : 0);
1252
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001253 s[i]->SetField32<false>(mirror::String::CountOffset(), new_length);
1254 s[i]->SetField32<false>(mirror::String::OffsetOffset(), new_offset);
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -07001255 }
1256 }
1257
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001258 // TODO: wide characters
1259
1260 // Matrix of expectations. First component is first parameter. Note we only check against the
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -07001261 // sign, not the value. As we are testing random offsets, we need to compute this and need to
1262 // rely on String::CompareTo being correct.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001263 int32_t expected[kStringCount][kStringCount];
1264 for (size_t x = 0; x < kStringCount; ++x) {
1265 for (size_t y = 0; y < kStringCount; ++y) {
1266 expected[x][y] = s[x]->CompareTo(s[y].Get());
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -07001267 }
1268 }
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001269
1270 // Play with it...
1271
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001272 for (size_t x = 0; x < kStringCount; ++x) {
1273 for (size_t y = 0; y < kStringCount; ++y) {
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001274 // Test string_compareto x y
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001275 size_t result = Invoke3(reinterpret_cast<size_t>(s[x].Get()),
1276 reinterpret_cast<size_t>(s[y].Get()), 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001277 art_quick_string_compareto, self);
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001278
1279 EXPECT_FALSE(self->IsExceptionPending());
1280
1281 // The result is a 32b signed integer
1282 union {
1283 size_t r;
1284 int32_t i;
1285 } conv;
1286 conv.r = result;
1287 int32_t e = expected[x][y];
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -07001288 EXPECT_TRUE(e == 0 ? conv.i == 0 : true) << "x=" << c[x] << " y=" << c[y] << " res=" <<
1289 conv.r;
1290 EXPECT_TRUE(e < 0 ? conv.i < 0 : true) << "x=" << c[x] << " y=" << c[y] << " res=" <<
1291 conv.r;
1292 EXPECT_TRUE(e > 0 ? conv.i > 0 : true) << "x=" << c[x] << " y=" << c[y] << " res=" <<
1293 conv.r;
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001294 }
1295 }
1296
Andreas Gampe7177d7c2014-05-02 12:10:02 -07001297 // TODO: Deallocate things.
1298
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001299 // Tests done.
1300#else
1301 LOG(INFO) << "Skipping string_compareto as I don't know how to do that on " << kRuntimeISA;
1302 // Force-print to std::cout so it's also outside the logcat.
1303 std::cout << "Skipping string_compareto as I don't know how to do that on " << kRuntimeISA <<
1304 std::endl;
1305#endif
1306}
1307
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001308
Mathieu Chartierc7853442015-03-27 14:35:38 -07001309static void GetSetBooleanStatic(ArtField* f, Thread* self,
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001310 mirror::ArtMethod* referrer, StubTest* test)
Fred Shih37f05ef2014-07-16 18:38:08 -07001311 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1312#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
1313 constexpr size_t num_values = 5;
1314 uint8_t values[num_values] = { 0, 1, 2, 128, 0xFF };
1315
1316 for (size_t i = 0; i < num_values; ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001317 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001318 static_cast<size_t>(values[i]),
1319 0U,
1320 StubTest::GetEntrypoint(self, kQuickSet8Static),
1321 self,
1322 referrer);
1323
Mathieu Chartierc7853442015-03-27 14:35:38 -07001324 size_t res = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001325 0U, 0U,
1326 StubTest::GetEntrypoint(self, kQuickGetBooleanStatic),
1327 self,
1328 referrer);
1329 // Boolean currently stores bools as uint8_t, be more zealous about asserting correct writes/gets.
1330 EXPECT_EQ(values[i], static_cast<uint8_t>(res)) << "Iteration " << i;
1331 }
1332#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001333 UNUSED(f, self, referrer, test);
Fred Shih37f05ef2014-07-16 18:38:08 -07001334 LOG(INFO) << "Skipping set_boolean_static as I don't know how to do that on " << kRuntimeISA;
1335 // Force-print to std::cout so it's also outside the logcat.
1336 std::cout << "Skipping set_boolean_static as I don't know how to do that on " << kRuntimeISA << std::endl;
1337#endif
1338}
Mathieu Chartierc7853442015-03-27 14:35:38 -07001339static void GetSetByteStatic(ArtField* f, Thread* self, mirror::ArtMethod* referrer,
1340 StubTest* test)
Fred Shih37f05ef2014-07-16 18:38:08 -07001341 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1342#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001343 int8_t values[] = { -128, -64, 0, 64, 127 };
Fred Shih37f05ef2014-07-16 18:38:08 -07001344
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001345 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001346 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001347 static_cast<size_t>(values[i]),
1348 0U,
1349 StubTest::GetEntrypoint(self, kQuickSet8Static),
1350 self,
1351 referrer);
1352
Mathieu Chartierc7853442015-03-27 14:35:38 -07001353 size_t res = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001354 0U, 0U,
1355 StubTest::GetEntrypoint(self, kQuickGetByteStatic),
1356 self,
1357 referrer);
1358 EXPECT_EQ(values[i], static_cast<int8_t>(res)) << "Iteration " << i;
1359 }
1360#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001361 UNUSED(f, self, referrer, test);
Fred Shih37f05ef2014-07-16 18:38:08 -07001362 LOG(INFO) << "Skipping set_byte_static as I don't know how to do that on " << kRuntimeISA;
1363 // Force-print to std::cout so it's also outside the logcat.
1364 std::cout << "Skipping set_byte_static as I don't know how to do that on " << kRuntimeISA << std::endl;
1365#endif
1366}
1367
1368
Mathieu Chartierc7853442015-03-27 14:35:38 -07001369static void GetSetBooleanInstance(Handle<mirror::Object>* obj, ArtField* f, Thread* self,
1370 mirror::ArtMethod* referrer, StubTest* test)
Fred Shih37f05ef2014-07-16 18:38:08 -07001371 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1372#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001373 uint8_t values[] = { 0, true, 2, 128, 0xFF };
Fred Shih37f05ef2014-07-16 18:38:08 -07001374
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001375 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001376 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001377 reinterpret_cast<size_t>(obj->Get()),
1378 static_cast<size_t>(values[i]),
1379 StubTest::GetEntrypoint(self, kQuickSet8Instance),
1380 self,
1381 referrer);
1382
Mathieu Chartierc7853442015-03-27 14:35:38 -07001383 uint8_t res = f->GetBoolean(obj->Get());
Fred Shih37f05ef2014-07-16 18:38:08 -07001384 EXPECT_EQ(values[i], res) << "Iteration " << i;
1385
Mathieu Chartierc7853442015-03-27 14:35:38 -07001386 f->SetBoolean<false>(obj->Get(), res);
Fred Shih37f05ef2014-07-16 18:38:08 -07001387
Mathieu Chartierc7853442015-03-27 14:35:38 -07001388 size_t res2 = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001389 reinterpret_cast<size_t>(obj->Get()),
1390 0U,
1391 StubTest::GetEntrypoint(self, kQuickGetBooleanInstance),
1392 self,
1393 referrer);
1394 EXPECT_EQ(res, static_cast<uint8_t>(res2));
1395 }
1396#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001397 UNUSED(obj, f, self, referrer, test);
Fred Shih37f05ef2014-07-16 18:38:08 -07001398 LOG(INFO) << "Skipping set_boolean_instance as I don't know how to do that on " << kRuntimeISA;
1399 // Force-print to std::cout so it's also outside the logcat.
1400 std::cout << "Skipping set_boolean_instance as I don't know how to do that on " << kRuntimeISA << std::endl;
1401#endif
1402}
Mathieu Chartierc7853442015-03-27 14:35:38 -07001403static void GetSetByteInstance(Handle<mirror::Object>* obj, ArtField* f,
Fred Shih37f05ef2014-07-16 18:38:08 -07001404 Thread* self, mirror::ArtMethod* referrer, StubTest* test)
1405 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1406#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001407 int8_t values[] = { -128, -64, 0, 64, 127 };
Fred Shih37f05ef2014-07-16 18:38:08 -07001408
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001409 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001410 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001411 reinterpret_cast<size_t>(obj->Get()),
1412 static_cast<size_t>(values[i]),
1413 StubTest::GetEntrypoint(self, kQuickSet8Instance),
1414 self,
1415 referrer);
1416
Mathieu Chartierc7853442015-03-27 14:35:38 -07001417 int8_t res = f->GetByte(obj->Get());
Fred Shih37f05ef2014-07-16 18:38:08 -07001418 EXPECT_EQ(res, values[i]) << "Iteration " << i;
Mathieu Chartierc7853442015-03-27 14:35:38 -07001419 f->SetByte<false>(obj->Get(), ++res);
Fred Shih37f05ef2014-07-16 18:38:08 -07001420
Mathieu Chartierc7853442015-03-27 14:35:38 -07001421 size_t res2 = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001422 reinterpret_cast<size_t>(obj->Get()),
1423 0U,
1424 StubTest::GetEntrypoint(self, kQuickGetByteInstance),
1425 self,
1426 referrer);
1427 EXPECT_EQ(res, static_cast<int8_t>(res2));
1428 }
1429#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001430 UNUSED(obj, f, self, referrer, test);
Fred Shih37f05ef2014-07-16 18:38:08 -07001431 LOG(INFO) << "Skipping set_byte_instance as I don't know how to do that on " << kRuntimeISA;
1432 // Force-print to std::cout so it's also outside the logcat.
1433 std::cout << "Skipping set_byte_instance as I don't know how to do that on " << kRuntimeISA << std::endl;
1434#endif
1435}
1436
Mathieu Chartierc7853442015-03-27 14:35:38 -07001437static void GetSetCharStatic(ArtField* f, Thread* self, mirror::ArtMethod* referrer,
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001438 StubTest* test)
Fred Shih37f05ef2014-07-16 18:38:08 -07001439 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1440#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001441 uint16_t values[] = { 0, 1, 2, 255, 32768, 0xFFFF };
Fred Shih37f05ef2014-07-16 18:38:08 -07001442
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001443 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001444 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001445 static_cast<size_t>(values[i]),
1446 0U,
1447 StubTest::GetEntrypoint(self, kQuickSet16Static),
1448 self,
1449 referrer);
1450
Mathieu Chartierc7853442015-03-27 14:35:38 -07001451 size_t res = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001452 0U, 0U,
1453 StubTest::GetEntrypoint(self, kQuickGetCharStatic),
1454 self,
1455 referrer);
1456
1457 EXPECT_EQ(values[i], static_cast<uint16_t>(res)) << "Iteration " << i;
1458 }
1459#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001460 UNUSED(f, self, referrer, test);
Fred Shih37f05ef2014-07-16 18:38:08 -07001461 LOG(INFO) << "Skipping set_char_static as I don't know how to do that on " << kRuntimeISA;
1462 // Force-print to std::cout so it's also outside the logcat.
1463 std::cout << "Skipping set_char_static as I don't know how to do that on " << kRuntimeISA << std::endl;
1464#endif
1465}
Mathieu Chartierc7853442015-03-27 14:35:38 -07001466static void GetSetShortStatic(ArtField* f, Thread* self,
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001467 mirror::ArtMethod* referrer, StubTest* test)
Fred Shih37f05ef2014-07-16 18:38:08 -07001468 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1469#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001470 int16_t values[] = { -0x7FFF, -32768, 0, 255, 32767, 0x7FFE };
Fred Shih37f05ef2014-07-16 18:38:08 -07001471
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001472 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001473 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001474 static_cast<size_t>(values[i]),
1475 0U,
1476 StubTest::GetEntrypoint(self, kQuickSet16Static),
1477 self,
1478 referrer);
1479
Mathieu Chartierc7853442015-03-27 14:35:38 -07001480 size_t res = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001481 0U, 0U,
1482 StubTest::GetEntrypoint(self, kQuickGetShortStatic),
1483 self,
1484 referrer);
1485
1486 EXPECT_EQ(static_cast<int16_t>(res), values[i]) << "Iteration " << i;
1487 }
1488#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001489 UNUSED(f, self, referrer, test);
Fred Shih37f05ef2014-07-16 18:38:08 -07001490 LOG(INFO) << "Skipping set_short_static as I don't know how to do that on " << kRuntimeISA;
1491 // Force-print to std::cout so it's also outside the logcat.
1492 std::cout << "Skipping set_short_static as I don't know how to do that on " << kRuntimeISA << std::endl;
1493#endif
1494}
1495
Mathieu Chartierc7853442015-03-27 14:35:38 -07001496static void GetSetCharInstance(Handle<mirror::Object>* obj, ArtField* f,
1497 Thread* self, mirror::ArtMethod* referrer, StubTest* test)
Fred Shih37f05ef2014-07-16 18:38:08 -07001498 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1499#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001500 uint16_t values[] = { 0, 1, 2, 255, 32768, 0xFFFF };
Fred Shih37f05ef2014-07-16 18:38:08 -07001501
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001502 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001503 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001504 reinterpret_cast<size_t>(obj->Get()),
1505 static_cast<size_t>(values[i]),
1506 StubTest::GetEntrypoint(self, kQuickSet16Instance),
1507 self,
1508 referrer);
1509
Mathieu Chartierc7853442015-03-27 14:35:38 -07001510 uint16_t res = f->GetChar(obj->Get());
Fred Shih37f05ef2014-07-16 18:38:08 -07001511 EXPECT_EQ(res, values[i]) << "Iteration " << i;
Mathieu Chartierc7853442015-03-27 14:35:38 -07001512 f->SetChar<false>(obj->Get(), ++res);
Fred Shih37f05ef2014-07-16 18:38:08 -07001513
Mathieu Chartierc7853442015-03-27 14:35:38 -07001514 size_t res2 = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001515 reinterpret_cast<size_t>(obj->Get()),
1516 0U,
1517 StubTest::GetEntrypoint(self, kQuickGetCharInstance),
1518 self,
1519 referrer);
1520 EXPECT_EQ(res, static_cast<uint16_t>(res2));
1521 }
1522#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001523 UNUSED(obj, f, self, referrer, test);
Fred Shih37f05ef2014-07-16 18:38:08 -07001524 LOG(INFO) << "Skipping set_char_instance as I don't know how to do that on " << kRuntimeISA;
1525 // Force-print to std::cout so it's also outside the logcat.
1526 std::cout << "Skipping set_char_instance as I don't know how to do that on " << kRuntimeISA << std::endl;
1527#endif
1528}
Mathieu Chartierc7853442015-03-27 14:35:38 -07001529static void GetSetShortInstance(Handle<mirror::Object>* obj, ArtField* f,
Fred Shih37f05ef2014-07-16 18:38:08 -07001530 Thread* self, mirror::ArtMethod* referrer, StubTest* test)
1531 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1532#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001533 int16_t values[] = { -0x7FFF, -32768, 0, 255, 32767, 0x7FFE };
Fred Shih37f05ef2014-07-16 18:38:08 -07001534
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001535 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001536 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001537 reinterpret_cast<size_t>(obj->Get()),
1538 static_cast<size_t>(values[i]),
1539 StubTest::GetEntrypoint(self, kQuickSet16Instance),
1540 self,
1541 referrer);
1542
Mathieu Chartierc7853442015-03-27 14:35:38 -07001543 int16_t res = f->GetShort(obj->Get());
Fred Shih37f05ef2014-07-16 18:38:08 -07001544 EXPECT_EQ(res, values[i]) << "Iteration " << i;
Mathieu Chartierc7853442015-03-27 14:35:38 -07001545 f->SetShort<false>(obj->Get(), ++res);
Fred Shih37f05ef2014-07-16 18:38:08 -07001546
Mathieu Chartierc7853442015-03-27 14:35:38 -07001547 size_t res2 = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001548 reinterpret_cast<size_t>(obj->Get()),
1549 0U,
1550 StubTest::GetEntrypoint(self, kQuickGetShortInstance),
1551 self,
1552 referrer);
1553 EXPECT_EQ(res, static_cast<int16_t>(res2));
1554 }
1555#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001556 UNUSED(obj, f, self, referrer, test);
Fred Shih37f05ef2014-07-16 18:38:08 -07001557 LOG(INFO) << "Skipping set_short_instance as I don't know how to do that on " << kRuntimeISA;
1558 // Force-print to std::cout so it's also outside the logcat.
1559 std::cout << "Skipping set_short_instance as I don't know how to do that on " << kRuntimeISA << std::endl;
1560#endif
1561}
1562
Mathieu Chartierc7853442015-03-27 14:35:38 -07001563static void GetSet32Static(ArtField* f, Thread* self, mirror::ArtMethod* referrer,
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001564 StubTest* test)
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001565 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Ian Rogersc3ccc102014-06-25 11:52:14 -07001566#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001567 uint32_t values[] = { 0, 1, 2, 255, 32768, 1000000, 0xFFFFFFFF };
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001568
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001569 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001570 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001571 static_cast<size_t>(values[i]),
1572 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001573 StubTest::GetEntrypoint(self, kQuickSet32Static),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001574 self,
1575 referrer);
1576
Mathieu Chartierc7853442015-03-27 14:35:38 -07001577 size_t res = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001578 0U, 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001579 StubTest::GetEntrypoint(self, kQuickGet32Static),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001580 self,
1581 referrer);
1582
1583 EXPECT_EQ(res, values[i]) << "Iteration " << i;
1584 }
1585#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001586 UNUSED(f, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001587 LOG(INFO) << "Skipping set32static as I don't know how to do that on " << kRuntimeISA;
1588 // Force-print to std::cout so it's also outside the logcat.
1589 std::cout << "Skipping set32static as I don't know how to do that on " << kRuntimeISA << std::endl;
1590#endif
1591}
1592
1593
Mathieu Chartierc7853442015-03-27 14:35:38 -07001594static void GetSet32Instance(Handle<mirror::Object>* obj, ArtField* f,
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001595 Thread* self, mirror::ArtMethod* referrer, StubTest* test)
1596 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Ian Rogersc3ccc102014-06-25 11:52:14 -07001597#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001598 uint32_t values[] = { 0, 1, 2, 255, 32768, 1000000, 0xFFFFFFFF };
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001599
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001600 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001601 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001602 reinterpret_cast<size_t>(obj->Get()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001603 static_cast<size_t>(values[i]),
Andreas Gampe29b38412014-08-13 00:15:43 -07001604 StubTest::GetEntrypoint(self, kQuickSet32Instance),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001605 self,
1606 referrer);
1607
Mathieu Chartierc7853442015-03-27 14:35:38 -07001608 int32_t res = f->GetInt(obj->Get());
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001609 EXPECT_EQ(res, static_cast<int32_t>(values[i])) << "Iteration " << i;
1610
1611 res++;
Mathieu Chartierc7853442015-03-27 14:35:38 -07001612 f->SetInt<false>(obj->Get(), res);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001613
Mathieu Chartierc7853442015-03-27 14:35:38 -07001614 size_t res2 = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001615 reinterpret_cast<size_t>(obj->Get()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001616 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001617 StubTest::GetEntrypoint(self, kQuickGet32Instance),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001618 self,
1619 referrer);
1620 EXPECT_EQ(res, static_cast<int32_t>(res2));
1621 }
1622#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001623 UNUSED(obj, f, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001624 LOG(INFO) << "Skipping set32instance as I don't know how to do that on " << kRuntimeISA;
1625 // Force-print to std::cout so it's also outside the logcat.
1626 std::cout << "Skipping set32instance as I don't know how to do that on " << kRuntimeISA << std::endl;
1627#endif
1628}
1629
1630
Ian Rogersc3ccc102014-06-25 11:52:14 -07001631#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001632
1633static void set_and_check_static(uint32_t f_idx, mirror::Object* val, Thread* self,
1634 mirror::ArtMethod* referrer, StubTest* test)
1635 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1636 test->Invoke3WithReferrer(static_cast<size_t>(f_idx),
1637 reinterpret_cast<size_t>(val),
1638 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001639 StubTest::GetEntrypoint(self, kQuickSetObjStatic),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001640 self,
1641 referrer);
1642
1643 size_t res = test->Invoke3WithReferrer(static_cast<size_t>(f_idx),
1644 0U, 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001645 StubTest::GetEntrypoint(self, kQuickGetObjStatic),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001646 self,
1647 referrer);
1648
1649 EXPECT_EQ(res, reinterpret_cast<size_t>(val)) << "Value " << val;
1650}
1651#endif
1652
Mathieu Chartierc7853442015-03-27 14:35:38 -07001653static void GetSetObjStatic(ArtField* f, Thread* self, mirror::ArtMethod* referrer,
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001654 StubTest* test)
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001655 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Ian Rogersc3ccc102014-06-25 11:52:14 -07001656#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Mathieu Chartierc7853442015-03-27 14:35:38 -07001657 set_and_check_static(f->GetDexFieldIndex(), nullptr, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001658
1659 // Allocate a string object for simplicity.
1660 mirror::String* str = mirror::String::AllocFromModifiedUtf8(self, "Test");
Mathieu Chartierc7853442015-03-27 14:35:38 -07001661 set_and_check_static(f->GetDexFieldIndex(), str, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001662
Mathieu Chartierc7853442015-03-27 14:35:38 -07001663 set_and_check_static(f->GetDexFieldIndex(), nullptr, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001664#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001665 UNUSED(f, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001666 LOG(INFO) << "Skipping setObjstatic as I don't know how to do that on " << kRuntimeISA;
1667 // Force-print to std::cout so it's also outside the logcat.
1668 std::cout << "Skipping setObjstatic as I don't know how to do that on " << kRuntimeISA << std::endl;
1669#endif
1670}
1671
1672
Ian Rogersc3ccc102014-06-25 11:52:14 -07001673#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Mathieu Chartierc7853442015-03-27 14:35:38 -07001674static void set_and_check_instance(ArtField* f, mirror::Object* trg,
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001675 mirror::Object* val, Thread* self, mirror::ArtMethod* referrer,
1676 StubTest* test)
1677 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001678 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001679 reinterpret_cast<size_t>(trg),
1680 reinterpret_cast<size_t>(val),
Andreas Gampe29b38412014-08-13 00:15:43 -07001681 StubTest::GetEntrypoint(self, kQuickSetObjInstance),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001682 self,
1683 referrer);
1684
Mathieu Chartierc7853442015-03-27 14:35:38 -07001685 size_t res = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001686 reinterpret_cast<size_t>(trg),
1687 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001688 StubTest::GetEntrypoint(self, kQuickGetObjInstance),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001689 self,
1690 referrer);
1691
1692 EXPECT_EQ(res, reinterpret_cast<size_t>(val)) << "Value " << val;
1693
Mathieu Chartierc7853442015-03-27 14:35:38 -07001694 EXPECT_EQ(val, f->GetObj(trg));
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001695}
1696#endif
1697
Mathieu Chartierc7853442015-03-27 14:35:38 -07001698static void GetSetObjInstance(Handle<mirror::Object>* obj, ArtField* f,
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001699 Thread* self, mirror::ArtMethod* referrer, StubTest* test)
1700 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Ian Rogersc3ccc102014-06-25 11:52:14 -07001701#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001702 set_and_check_instance(f, obj->Get(), nullptr, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001703
1704 // Allocate a string object for simplicity.
1705 mirror::String* str = mirror::String::AllocFromModifiedUtf8(self, "Test");
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001706 set_and_check_instance(f, obj->Get(), str, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001707
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001708 set_and_check_instance(f, obj->Get(), nullptr, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001709#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001710 UNUSED(obj, f, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001711 LOG(INFO) << "Skipping setObjinstance as I don't know how to do that on " << kRuntimeISA;
1712 // Force-print to std::cout so it's also outside the logcat.
1713 std::cout << "Skipping setObjinstance as I don't know how to do that on " << kRuntimeISA << std::endl;
1714#endif
1715}
1716
1717
1718// TODO: Complete these tests for 32b architectures.
1719
Mathieu Chartierc7853442015-03-27 14:35:38 -07001720static void GetSet64Static(ArtField* f, Thread* self, mirror::ArtMethod* referrer,
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001721 StubTest* test)
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001722 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Ian Rogersc3ccc102014-06-25 11:52:14 -07001723#if (defined(__x86_64__) && !defined(__APPLE__)) || defined(__aarch64__)
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001724 uint64_t values[] = { 0, 1, 2, 255, 32768, 1000000, 0xFFFFFFFF, 0xFFFFFFFFFFFF };
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001725
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001726 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001727 test->Invoke3UWithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001728 values[i],
Andreas Gampe29b38412014-08-13 00:15:43 -07001729 StubTest::GetEntrypoint(self, kQuickSet64Static),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001730 self,
1731 referrer);
1732
Mathieu Chartierc7853442015-03-27 14:35:38 -07001733 size_t res = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001734 0U, 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001735 StubTest::GetEntrypoint(self, kQuickGet64Static),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001736 self,
1737 referrer);
1738
1739 EXPECT_EQ(res, values[i]) << "Iteration " << i;
1740 }
1741#else
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001742 UNUSED(f, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001743 LOG(INFO) << "Skipping set64static as I don't know how to do that on " << kRuntimeISA;
1744 // Force-print to std::cout so it's also outside the logcat.
1745 std::cout << "Skipping set64static as I don't know how to do that on " << kRuntimeISA << std::endl;
1746#endif
1747}
1748
1749
Mathieu Chartierc7853442015-03-27 14:35:38 -07001750static void GetSet64Instance(Handle<mirror::Object>* obj, ArtField* f,
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001751 Thread* self, mirror::ArtMethod* referrer, StubTest* test)
1752 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Ian Rogersc3ccc102014-06-25 11:52:14 -07001753#if (defined(__x86_64__) && !defined(__APPLE__)) || defined(__aarch64__)
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001754 uint64_t values[] = { 0, 1, 2, 255, 32768, 1000000, 0xFFFFFFFF, 0xFFFFFFFFFFFF };
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001755
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001756 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001757 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001758 reinterpret_cast<size_t>(obj->Get()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001759 static_cast<size_t>(values[i]),
Andreas Gampe29b38412014-08-13 00:15:43 -07001760 StubTest::GetEntrypoint(self, kQuickSet64Instance),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001761 self,
1762 referrer);
1763
Mathieu Chartierc7853442015-03-27 14:35:38 -07001764 int64_t res = f->GetLong(obj->Get());
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001765 EXPECT_EQ(res, static_cast<int64_t>(values[i])) << "Iteration " << i;
1766
1767 res++;
Mathieu Chartierc7853442015-03-27 14:35:38 -07001768 f->SetLong<false>(obj->Get(), res);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001769
Mathieu Chartierc7853442015-03-27 14:35:38 -07001770 size_t res2 = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001771 reinterpret_cast<size_t>(obj->Get()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001772 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001773 StubTest::GetEntrypoint(self, kQuickGet64Instance),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001774 self,
1775 referrer);
1776 EXPECT_EQ(res, static_cast<int64_t>(res2));
1777 }
1778#else
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001779 UNUSED(obj, f, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001780 LOG(INFO) << "Skipping set64instance as I don't know how to do that on " << kRuntimeISA;
1781 // Force-print to std::cout so it's also outside the logcat.
1782 std::cout << "Skipping set64instance as I don't know how to do that on " << kRuntimeISA << std::endl;
1783#endif
1784}
1785
1786static void TestFields(Thread* self, StubTest* test, Primitive::Type test_type) {
1787 // garbage is created during ClassLinker::Init
1788
1789 JNIEnv* env = Thread::Current()->GetJniEnv();
1790 jclass jc = env->FindClass("AllFields");
1791 CHECK(jc != NULL);
1792 jobject o = env->AllocObject(jc);
1793 CHECK(o != NULL);
1794
1795 ScopedObjectAccess soa(self);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001796 StackHandleScope<4> hs(self);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001797 Handle<mirror::Object> obj(hs.NewHandle(soa.Decode<mirror::Object*>(o)));
1798 Handle<mirror::Class> c(hs.NewHandle(obj->GetClass()));
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001799 // Need a method as a referrer
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001800 Handle<mirror::ArtMethod> m(hs.NewHandle(c->GetDirectMethod(0)));
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001801
1802 // Play with it...
1803
1804 // Static fields.
Mathieu Chartierc7853442015-03-27 14:35:38 -07001805 ArtField* fields = c->GetSFields();
1806 size_t num_fields = c->NumStaticFields();
1807 for (size_t i = 0; i < num_fields; ++i) {
1808 ArtField* f = &fields[i];
1809 Primitive::Type type = f->GetTypeAsPrimitiveType();
1810 if (test_type != type) {
1811 continue;
1812 }
1813 switch (type) {
1814 case Primitive::Type::kPrimBoolean:
1815 GetSetBooleanStatic(f, self, m.Get(), test);
1816 break;
1817 case Primitive::Type::kPrimByte:
1818 GetSetByteStatic(f, self, m.Get(), test);
1819 break;
1820 case Primitive::Type::kPrimChar:
1821 GetSetCharStatic(f, self, m.Get(), test);
1822 break;
1823 case Primitive::Type::kPrimShort:
1824 GetSetShortStatic(f, self, m.Get(), test);
1825 break;
1826 case Primitive::Type::kPrimInt:
1827 GetSet32Static(f, self, m.Get(), test);
1828 break;
1829 case Primitive::Type::kPrimLong:
1830 GetSet64Static(f, self, m.Get(), test);
1831 break;
1832 case Primitive::Type::kPrimNot:
1833 // Don't try array.
1834 if (f->GetTypeDescriptor()[0] != '[') {
1835 GetSetObjStatic(f, self, m.Get(), test);
1836 }
1837 break;
1838 default:
1839 break; // Skip.
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001840 }
1841 }
1842
1843 // Instance fields.
Mathieu Chartierc7853442015-03-27 14:35:38 -07001844 fields = c->GetIFields();
1845 num_fields = c->NumInstanceFields();
1846 for (size_t i = 0; i < num_fields; ++i) {
1847 ArtField* f = &fields[i];
1848 Primitive::Type type = f->GetTypeAsPrimitiveType();
1849 if (test_type != type) {
1850 continue;
1851 }
1852 switch (type) {
1853 case Primitive::Type::kPrimBoolean:
1854 GetSetBooleanInstance(&obj, f, self, m.Get(), test);
1855 break;
1856 case Primitive::Type::kPrimByte:
1857 GetSetByteInstance(&obj, f, self, m.Get(), test);
1858 break;
1859 case Primitive::Type::kPrimChar:
1860 GetSetCharInstance(&obj, f, self, m.Get(), test);
1861 break;
1862 case Primitive::Type::kPrimShort:
1863 GetSetShortInstance(&obj, f, self, m.Get(), test);
1864 break;
1865 case Primitive::Type::kPrimInt:
1866 GetSet32Instance(&obj, f, self, m.Get(), test);
1867 break;
1868 case Primitive::Type::kPrimLong:
1869 GetSet64Instance(&obj, f, self, m.Get(), test);
1870 break;
1871 case Primitive::Type::kPrimNot:
1872 // Don't try array.
1873 if (f->GetTypeDescriptor()[0] != '[') {
1874 GetSetObjInstance(&obj, f, self, m.Get(), test);
1875 }
1876 break;
1877 default:
1878 break; // Skip.
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001879 }
1880 }
1881
1882 // TODO: Deallocate things.
1883}
1884
Fred Shih37f05ef2014-07-16 18:38:08 -07001885TEST_F(StubTest, Fields8) {
1886 TEST_DISABLED_FOR_HEAP_REFERENCE_POISONING();
1887
1888 Thread* self = Thread::Current();
1889
1890 self->TransitionFromSuspendedToRunnable();
1891 LoadDex("AllFields");
1892 bool started = runtime_->Start();
1893 CHECK(started);
1894
1895 TestFields(self, this, Primitive::Type::kPrimBoolean);
1896 TestFields(self, this, Primitive::Type::kPrimByte);
1897}
1898
1899TEST_F(StubTest, Fields16) {
1900 TEST_DISABLED_FOR_HEAP_REFERENCE_POISONING();
1901
1902 Thread* self = Thread::Current();
1903
1904 self->TransitionFromSuspendedToRunnable();
1905 LoadDex("AllFields");
1906 bool started = runtime_->Start();
1907 CHECK(started);
1908
1909 TestFields(self, this, Primitive::Type::kPrimChar);
1910 TestFields(self, this, Primitive::Type::kPrimShort);
1911}
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001912
1913TEST_F(StubTest, Fields32) {
1914 TEST_DISABLED_FOR_HEAP_REFERENCE_POISONING();
1915
1916 Thread* self = Thread::Current();
1917
1918 self->TransitionFromSuspendedToRunnable();
1919 LoadDex("AllFields");
1920 bool started = runtime_->Start();
1921 CHECK(started);
1922
1923 TestFields(self, this, Primitive::Type::kPrimInt);
1924}
1925
1926TEST_F(StubTest, FieldsObj) {
1927 TEST_DISABLED_FOR_HEAP_REFERENCE_POISONING();
1928
1929 Thread* self = Thread::Current();
1930
1931 self->TransitionFromSuspendedToRunnable();
1932 LoadDex("AllFields");
1933 bool started = runtime_->Start();
1934 CHECK(started);
1935
1936 TestFields(self, this, Primitive::Type::kPrimNot);
1937}
1938
1939TEST_F(StubTest, Fields64) {
1940 TEST_DISABLED_FOR_HEAP_REFERENCE_POISONING();
1941
1942 Thread* self = Thread::Current();
1943
1944 self->TransitionFromSuspendedToRunnable();
1945 LoadDex("AllFields");
1946 bool started = runtime_->Start();
1947 CHECK(started);
1948
1949 TestFields(self, this, Primitive::Type::kPrimLong);
1950}
1951
Andreas Gampe51f76352014-05-21 08:28:48 -07001952TEST_F(StubTest, IMT) {
Ian Rogersc3ccc102014-06-25 11:52:14 -07001953#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe51f76352014-05-21 08:28:48 -07001954 TEST_DISABLED_FOR_HEAP_REFERENCE_POISONING();
1955
1956 Thread* self = Thread::Current();
1957
1958 ScopedObjectAccess soa(self);
1959 StackHandleScope<7> hs(self);
1960
1961 JNIEnv* env = Thread::Current()->GetJniEnv();
1962
1963 // ArrayList
1964
1965 // Load ArrayList and used methods (JNI).
1966 jclass arraylist_jclass = env->FindClass("java/util/ArrayList");
1967 ASSERT_NE(nullptr, arraylist_jclass);
1968 jmethodID arraylist_constructor = env->GetMethodID(arraylist_jclass, "<init>", "()V");
1969 ASSERT_NE(nullptr, arraylist_constructor);
1970 jmethodID contains_jmethod = env->GetMethodID(arraylist_jclass, "contains", "(Ljava/lang/Object;)Z");
1971 ASSERT_NE(nullptr, contains_jmethod);
1972 jmethodID add_jmethod = env->GetMethodID(arraylist_jclass, "add", "(Ljava/lang/Object;)Z");
1973 ASSERT_NE(nullptr, add_jmethod);
1974
1975 // Get mirror representation.
1976 Handle<mirror::ArtMethod> contains_amethod(hs.NewHandle(soa.DecodeMethod(contains_jmethod)));
1977
1978 // Patch up ArrayList.contains.
1979 if (contains_amethod.Get()->GetEntryPointFromQuickCompiledCode() == nullptr) {
1980 contains_amethod.Get()->SetEntryPointFromQuickCompiledCode(reinterpret_cast<void*>(
Andreas Gampe29b38412014-08-13 00:15:43 -07001981 StubTest::GetEntrypoint(self, kQuickQuickToInterpreterBridge)));
Andreas Gampe51f76352014-05-21 08:28:48 -07001982 }
1983
1984 // List
1985
1986 // Load List and used methods (JNI).
1987 jclass list_jclass = env->FindClass("java/util/List");
1988 ASSERT_NE(nullptr, list_jclass);
1989 jmethodID inf_contains_jmethod = env->GetMethodID(list_jclass, "contains", "(Ljava/lang/Object;)Z");
1990 ASSERT_NE(nullptr, inf_contains_jmethod);
1991
1992 // Get mirror representation.
1993 Handle<mirror::ArtMethod> inf_contains(hs.NewHandle(soa.DecodeMethod(inf_contains_jmethod)));
1994
1995 // Object
1996
1997 jclass obj_jclass = env->FindClass("java/lang/Object");
1998 ASSERT_NE(nullptr, obj_jclass);
1999 jmethodID obj_constructor = env->GetMethodID(obj_jclass, "<init>", "()V");
2000 ASSERT_NE(nullptr, obj_constructor);
2001
Andreas Gampe51f76352014-05-21 08:28:48 -07002002 // Create instances.
2003
2004 jobject jarray_list = env->NewObject(arraylist_jclass, arraylist_constructor);
2005 ASSERT_NE(nullptr, jarray_list);
2006 Handle<mirror::Object> array_list(hs.NewHandle(soa.Decode<mirror::Object*>(jarray_list)));
2007
2008 jobject jobj = env->NewObject(obj_jclass, obj_constructor);
2009 ASSERT_NE(nullptr, jobj);
2010 Handle<mirror::Object> obj(hs.NewHandle(soa.Decode<mirror::Object*>(jobj)));
2011
Andreas Gampe1a7e2922014-05-21 15:37:53 -07002012 // Invocation tests.
2013
2014 // 1. imt_conflict
2015
2016 // Contains.
Andreas Gampe51f76352014-05-21 08:28:48 -07002017
2018 size_t result =
2019 Invoke3WithReferrerAndHidden(0U, reinterpret_cast<size_t>(array_list.Get()),
2020 reinterpret_cast<size_t>(obj.Get()),
Andreas Gampe29b38412014-08-13 00:15:43 -07002021 StubTest::GetEntrypoint(self, kQuickQuickImtConflictTrampoline),
Andreas Gampe51f76352014-05-21 08:28:48 -07002022 self, contains_amethod.Get(),
2023 static_cast<size_t>(inf_contains.Get()->GetDexMethodIndex()));
2024
2025 ASSERT_FALSE(self->IsExceptionPending());
2026 EXPECT_EQ(static_cast<size_t>(JNI_FALSE), result);
2027
2028 // Add object.
2029
2030 env->CallBooleanMethod(jarray_list, add_jmethod, jobj);
2031
Nicolas Geoffray14691c52015-03-05 10:40:17 +00002032 ASSERT_FALSE(self->IsExceptionPending()) << PrettyTypeOf(self->GetException());
Andreas Gampe51f76352014-05-21 08:28:48 -07002033
Andreas Gampe1a7e2922014-05-21 15:37:53 -07002034 // Contains.
Andreas Gampe51f76352014-05-21 08:28:48 -07002035
2036 result = Invoke3WithReferrerAndHidden(0U, reinterpret_cast<size_t>(array_list.Get()),
2037 reinterpret_cast<size_t>(obj.Get()),
Andreas Gampe29b38412014-08-13 00:15:43 -07002038 StubTest::GetEntrypoint(self, kQuickQuickImtConflictTrampoline),
Andreas Gampe51f76352014-05-21 08:28:48 -07002039 self, contains_amethod.Get(),
2040 static_cast<size_t>(inf_contains.Get()->GetDexMethodIndex()));
2041
2042 ASSERT_FALSE(self->IsExceptionPending());
2043 EXPECT_EQ(static_cast<size_t>(JNI_TRUE), result);
Andreas Gampe1a7e2922014-05-21 15:37:53 -07002044
2045 // 2. regular interface trampoline
2046
2047 result = Invoke3WithReferrer(static_cast<size_t>(inf_contains.Get()->GetDexMethodIndex()),
2048 reinterpret_cast<size_t>(array_list.Get()),
2049 reinterpret_cast<size_t>(obj.Get()),
2050 StubTest::GetEntrypoint(self,
2051 kQuickInvokeInterfaceTrampolineWithAccessCheck),
2052 self, contains_amethod.Get());
2053
2054 ASSERT_FALSE(self->IsExceptionPending());
2055 EXPECT_EQ(static_cast<size_t>(JNI_TRUE), result);
2056
2057 result = Invoke3WithReferrer(static_cast<size_t>(inf_contains.Get()->GetDexMethodIndex()),
2058 reinterpret_cast<size_t>(array_list.Get()),
2059 reinterpret_cast<size_t>(array_list.Get()),
2060 StubTest::GetEntrypoint(self,
2061 kQuickInvokeInterfaceTrampolineWithAccessCheck),
2062 self, contains_amethod.Get());
2063
2064 ASSERT_FALSE(self->IsExceptionPending());
2065 EXPECT_EQ(static_cast<size_t>(JNI_FALSE), result);
Andreas Gampe51f76352014-05-21 08:28:48 -07002066#else
Andreas Gampe6aac3552014-06-09 14:55:53 -07002067 LOG(INFO) << "Skipping imt as I don't know how to do that on " << kRuntimeISA;
Andreas Gampe51f76352014-05-21 08:28:48 -07002068 // Force-print to std::cout so it's also outside the logcat.
Andreas Gampe6aac3552014-06-09 14:55:53 -07002069 std::cout << "Skipping imt as I don't know how to do that on " << kRuntimeISA << std::endl;
2070#endif
2071}
2072
Andreas Gampe6aac3552014-06-09 14:55:53 -07002073TEST_F(StubTest, StringIndexOf) {
2074#if defined(__arm__) || defined(__aarch64__)
Hiroshi Yamauchi52fa8142014-06-16 12:59:49 -07002075 TEST_DISABLED_FOR_HEAP_REFERENCE_POISONING();
2076
Andreas Gampe6aac3552014-06-09 14:55:53 -07002077 Thread* self = Thread::Current();
2078 ScopedObjectAccess soa(self);
2079 // garbage is created during ClassLinker::Init
2080
2081 // Create some strings
2082 // Use array so we can index into it and use a matrix for expected results
2083 // Setup: The first half is standard. The second half uses a non-zero offset.
2084 // TODO: Shared backing arrays.
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07002085 const char* c_str[] = { "", "a", "ba", "cba", "dcba", "edcba", "asdfghjkl" };
2086 static constexpr size_t kStringCount = arraysize(c_str);
2087 const char c_char[] = { 'a', 'b', 'c', 'd', 'e' };
2088 static constexpr size_t kCharCount = arraysize(c_char);
Andreas Gampe6aac3552014-06-09 14:55:53 -07002089
2090 StackHandleScope<kStringCount> hs(self);
2091 Handle<mirror::String> s[kStringCount];
2092
2093 for (size_t i = 0; i < kStringCount; ++i) {
2094 s[i] = hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), c_str[i]));
2095 }
2096
2097 // Matrix of expectations. First component is first parameter. Note we only check against the
2098 // sign, not the value. As we are testing random offsets, we need to compute this and need to
2099 // rely on String::CompareTo being correct.
2100 static constexpr size_t kMaxLen = 9;
2101 DCHECK_LE(strlen(c_str[kStringCount-1]), kMaxLen) << "Please fix the indexof test.";
2102
2103 // Last dimension: start, offset by 1.
2104 int32_t expected[kStringCount][kCharCount][kMaxLen + 3];
2105 for (size_t x = 0; x < kStringCount; ++x) {
2106 for (size_t y = 0; y < kCharCount; ++y) {
2107 for (size_t z = 0; z <= kMaxLen + 2; ++z) {
2108 expected[x][y][z] = s[x]->FastIndexOf(c_char[y], static_cast<int32_t>(z) - 1);
2109 }
2110 }
2111 }
2112
2113 // Play with it...
2114
2115 for (size_t x = 0; x < kStringCount; ++x) {
2116 for (size_t y = 0; y < kCharCount; ++y) {
2117 for (size_t z = 0; z <= kMaxLen + 2; ++z) {
2118 int32_t start = static_cast<int32_t>(z) - 1;
2119
2120 // Test string_compareto x y
2121 size_t result = Invoke3(reinterpret_cast<size_t>(s[x].Get()), c_char[y], start,
Andreas Gampe29b38412014-08-13 00:15:43 -07002122 StubTest::GetEntrypoint(self, kQuickIndexOf), self);
Andreas Gampe6aac3552014-06-09 14:55:53 -07002123
2124 EXPECT_FALSE(self->IsExceptionPending());
2125
2126 // The result is a 32b signed integer
2127 union {
2128 size_t r;
2129 int32_t i;
2130 } conv;
2131 conv.r = result;
2132
2133 EXPECT_EQ(expected[x][y][z], conv.i) << "Wrong result for " << c_str[x] << " / " <<
2134 c_char[y] << " @ " << start;
2135 }
2136 }
2137 }
2138
2139 // TODO: Deallocate things.
2140
2141 // Tests done.
2142#else
2143 LOG(INFO) << "Skipping indexof as I don't know how to do that on " << kRuntimeISA;
2144 // Force-print to std::cout so it's also outside the logcat.
2145 std::cout << "Skipping indexof as I don't know how to do that on " << kRuntimeISA << std::endl;
Andreas Gampe51f76352014-05-21 08:28:48 -07002146#endif
2147}
2148
Andreas Gampe525cde22014-04-22 15:44:50 -07002149} // namespace art