blob: de7804f7591a8455d0b8b49ac2d547d91635e44d [file] [log] [blame]
Andreas Gampe525cde22014-04-22 15:44:50 -07001/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
Ian Rogerse63db272014-07-15 15:36:11 -070017#include <cstdio>
18
Mathieu Chartierc7853442015-03-27 14:35:38 -070019#include "art_field-inl.h"
Vladimir Marko3481ba22015-04-13 12:22:36 +010020#include "class_linker-inl.h"
Andreas Gampe525cde22014-04-22 15:44:50 -070021#include "common_runtime_test.h"
Andreas Gampe29b38412014-08-13 00:15:43 -070022#include "entrypoints/quick/quick_entrypoints_enum.h"
Andreas Gampe51f76352014-05-21 08:28:48 -070023#include "mirror/art_method-inl.h"
24#include "mirror/class-inl.h"
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -070025#include "mirror/string-inl.h"
Ian Rogerse63db272014-07-15 15:36:11 -070026#include "scoped_thread_state_change.h"
Andreas Gampe525cde22014-04-22 15:44:50 -070027
28namespace art {
29
30
31class StubTest : public CommonRuntimeTest {
32 protected:
33 // We need callee-save methods set up in the Runtime for exceptions.
34 void SetUp() OVERRIDE {
35 // Do the normal setup.
36 CommonRuntimeTest::SetUp();
37
38 {
39 // Create callee-save methods
40 ScopedObjectAccess soa(Thread::Current());
Vladimir Marko7624d252014-05-02 14:40:15 +010041 runtime_->SetInstructionSet(kRuntimeISA);
Andreas Gampe525cde22014-04-22 15:44:50 -070042 for (int i = 0; i < Runtime::kLastCalleeSaveType; i++) {
43 Runtime::CalleeSaveType type = Runtime::CalleeSaveType(i);
44 if (!runtime_->HasCalleeSaveMethod(type)) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -070045 runtime_->SetCalleeSaveMethod(runtime_->CreateCalleeSaveMethod(), type);
Andreas Gampe525cde22014-04-22 15:44:50 -070046 }
47 }
48 }
49 }
50
Ian Rogerse63db272014-07-15 15:36:11 -070051 void SetUpRuntimeOptions(RuntimeOptions *options) OVERRIDE {
Andreas Gampe00c1e6d2014-04-25 15:47:13 -070052 // Use a smaller heap
53 for (std::pair<std::string, const void*>& pair : *options) {
54 if (pair.first.find("-Xmx") == 0) {
55 pair.first = "-Xmx4M"; // Smallest we can go.
56 }
57 }
Andreas Gampe51f76352014-05-21 08:28:48 -070058 options->push_back(std::make_pair("-Xint", nullptr));
Andreas Gampe00c1e6d2014-04-25 15:47:13 -070059 }
Andreas Gampe525cde22014-04-22 15:44:50 -070060
Mathieu Chartier119c6bd2014-05-09 14:11:47 -070061 // Helper function needed since TEST_F makes a new class.
62 Thread::tls_ptr_sized_values* GetTlsPtr(Thread* self) {
63 return &self->tlsPtr_;
64 }
65
Andreas Gampe4fc046e2014-05-06 16:56:39 -070066 public:
Andreas Gampe525cde22014-04-22 15:44:50 -070067 size_t Invoke3(size_t arg0, size_t arg1, size_t arg2, uintptr_t code, Thread* self) {
Andreas Gampe6cf80102014-05-19 11:32:41 -070068 return Invoke3WithReferrer(arg0, arg1, arg2, code, self, nullptr);
Andreas Gampe525cde22014-04-22 15:44:50 -070069 }
Andreas Gampe6e4e59c2014-05-05 20:11:02 -070070
Andreas Gampe6e4e59c2014-05-05 20:11:02 -070071 // TODO: Set up a frame according to referrer's specs.
72 size_t Invoke3WithReferrer(size_t arg0, size_t arg1, size_t arg2, uintptr_t code, Thread* self,
73 mirror::ArtMethod* referrer) {
74 // Push a transition back into managed code onto the linked list in thread.
75 ManagedStack fragment;
76 self->PushManagedStackFragment(&fragment);
77
78 size_t result;
Andreas Gampe6cf80102014-05-19 11:32:41 -070079 size_t fpr_result = 0;
Andreas Gampe6e4e59c2014-05-05 20:11:02 -070080#if defined(__i386__)
81 // TODO: Set the thread?
82 __asm__ __volatile__(
Ian Rogersc5f17732014-06-05 20:48:42 -070083 "subl $12, %%esp\n\t" // Align stack.
84 "pushl %[referrer]\n\t" // Store referrer.
Andreas Gampe6e4e59c2014-05-05 20:11:02 -070085 "call *%%edi\n\t" // Call the stub
Ian Rogersc5f17732014-06-05 20:48:42 -070086 "addl $16, %%esp" // Pop referrer
Andreas Gampe6e4e59c2014-05-05 20:11:02 -070087 : "=a" (result)
88 // Use the result from eax
Andreas Gampe2f6e3512014-06-07 01:32:33 -070089 : "a"(arg0), "c"(arg1), "d"(arg2), "D"(code), [referrer]"r"(referrer)
90 // This places code into edi, arg0 into eax, arg1 into ecx, and arg2 into edx
91 : "memory"); // clobber.
Andreas Gampe6e4e59c2014-05-05 20:11:02 -070092 // TODO: Should we clobber the other registers? EBX gets clobbered by some of the stubs,
93 // but compilation fails when declaring that.
94#elif defined(__arm__)
95 __asm__ __volatile__(
96 "push {r1-r12, lr}\n\t" // Save state, 13*4B = 52B
97 ".cfi_adjust_cfa_offset 52\n\t"
98 "push {r9}\n\t"
99 ".cfi_adjust_cfa_offset 4\n\t"
100 "mov r9, %[referrer]\n\n"
101 "str r9, [sp, #-8]!\n\t" // Push referrer, +8B padding so 16B aligned
102 ".cfi_adjust_cfa_offset 8\n\t"
103 "ldr r9, [sp, #8]\n\t"
104
105 // Push everything on the stack, so we don't rely on the order. What a mess. :-(
106 "sub sp, sp, #20\n\t"
107 "str %[arg0], [sp]\n\t"
108 "str %[arg1], [sp, #4]\n\t"
109 "str %[arg2], [sp, #8]\n\t"
110 "str %[code], [sp, #12]\n\t"
111 "str %[self], [sp, #16]\n\t"
112 "ldr r0, [sp]\n\t"
113 "ldr r1, [sp, #4]\n\t"
114 "ldr r2, [sp, #8]\n\t"
115 "ldr r3, [sp, #12]\n\t"
116 "ldr r9, [sp, #16]\n\t"
117 "add sp, sp, #20\n\t"
118
119 "blx r3\n\t" // Call the stub
Mathieu Chartier2cebb242015-04-21 16:50:40 -0700120 "add sp, sp, #12\n\t" // Pop null and padding
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700121 ".cfi_adjust_cfa_offset -12\n\t"
122 "pop {r1-r12, lr}\n\t" // Restore state
123 ".cfi_adjust_cfa_offset -52\n\t"
124 "mov %[result], r0\n\t" // Save the result
125 : [result] "=r" (result)
126 // Use the result from r0
127 : [arg0] "r"(arg0), [arg1] "r"(arg1), [arg2] "r"(arg2), [code] "r"(code), [self] "r"(self),
128 [referrer] "r"(referrer)
Andreas Gampe2f6e3512014-06-07 01:32:33 -0700129 : "memory"); // clobber.
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700130#elif defined(__aarch64__)
131 __asm__ __volatile__(
Andreas Gampef39b3782014-06-03 14:38:30 -0700132 // Spill x0-x7 which we say we don't clobber. May contain args.
Andreas Gampe6cf80102014-05-19 11:32:41 -0700133 "sub sp, sp, #64\n\t"
134 ".cfi_adjust_cfa_offset 64\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700135 "stp x0, x1, [sp]\n\t"
136 "stp x2, x3, [sp, #16]\n\t"
137 "stp x4, x5, [sp, #32]\n\t"
138 "stp x6, x7, [sp, #48]\n\t"
Andreas Gampe6cf80102014-05-19 11:32:41 -0700139
Andreas Gampef39b3782014-06-03 14:38:30 -0700140 "sub sp, sp, #16\n\t" // Reserve stack space, 16B aligned
141 ".cfi_adjust_cfa_offset 16\n\t"
142 "str %[referrer], [sp]\n\t" // referrer
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700143
144 // Push everything on the stack, so we don't rely on the order. What a mess. :-(
145 "sub sp, sp, #48\n\t"
Andreas Gampe6cf80102014-05-19 11:32:41 -0700146 ".cfi_adjust_cfa_offset 48\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700147 // All things are "r" constraints, so direct str/stp should work.
148 "stp %[arg0], %[arg1], [sp]\n\t"
149 "stp %[arg2], %[code], [sp, #16]\n\t"
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700150 "str %[self], [sp, #32]\n\t"
Andreas Gampe6cf80102014-05-19 11:32:41 -0700151
152 // Now we definitely have x0-x3 free, use it to garble d8 - d15
153 "movk x0, #0xfad0\n\t"
154 "movk x0, #0xebad, lsl #16\n\t"
155 "movk x0, #0xfad0, lsl #32\n\t"
156 "movk x0, #0xebad, lsl #48\n\t"
157 "fmov d8, x0\n\t"
158 "add x0, x0, 1\n\t"
159 "fmov d9, x0\n\t"
160 "add x0, x0, 1\n\t"
161 "fmov d10, x0\n\t"
162 "add x0, x0, 1\n\t"
163 "fmov d11, x0\n\t"
164 "add x0, x0, 1\n\t"
165 "fmov d12, x0\n\t"
166 "add x0, x0, 1\n\t"
167 "fmov d13, x0\n\t"
168 "add x0, x0, 1\n\t"
169 "fmov d14, x0\n\t"
170 "add x0, x0, 1\n\t"
171 "fmov d15, x0\n\t"
172
Andreas Gampef39b3782014-06-03 14:38:30 -0700173 // Load call params into the right registers.
174 "ldp x0, x1, [sp]\n\t"
175 "ldp x2, x3, [sp, #16]\n\t"
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700176 "ldr x18, [sp, #32]\n\t"
177 "add sp, sp, #48\n\t"
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700178 ".cfi_adjust_cfa_offset -48\n\t"
179
Andreas Gampe6cf80102014-05-19 11:32:41 -0700180
181 "blr x3\n\t" // Call the stub
Andreas Gampef39b3782014-06-03 14:38:30 -0700182 "mov x8, x0\n\t" // Store result
183 "add sp, sp, #16\n\t" // Drop the quick "frame"
184 ".cfi_adjust_cfa_offset -16\n\t"
Andreas Gampe6cf80102014-05-19 11:32:41 -0700185
186 // Test d8 - d15. We can use x1 and x2.
187 "movk x1, #0xfad0\n\t"
188 "movk x1, #0xebad, lsl #16\n\t"
189 "movk x1, #0xfad0, lsl #32\n\t"
190 "movk x1, #0xebad, lsl #48\n\t"
191 "fmov x2, d8\n\t"
192 "cmp x1, x2\n\t"
193 "b.ne 1f\n\t"
194 "add x1, x1, 1\n\t"
195
196 "fmov x2, d9\n\t"
197 "cmp x1, x2\n\t"
198 "b.ne 1f\n\t"
199 "add x1, x1, 1\n\t"
200
201 "fmov x2, d10\n\t"
202 "cmp x1, x2\n\t"
203 "b.ne 1f\n\t"
204 "add x1, x1, 1\n\t"
205
206 "fmov x2, d11\n\t"
207 "cmp x1, x2\n\t"
208 "b.ne 1f\n\t"
209 "add x1, x1, 1\n\t"
210
211 "fmov x2, d12\n\t"
212 "cmp x1, x2\n\t"
213 "b.ne 1f\n\t"
214 "add x1, x1, 1\n\t"
215
216 "fmov x2, d13\n\t"
217 "cmp x1, x2\n\t"
218 "b.ne 1f\n\t"
219 "add x1, x1, 1\n\t"
220
221 "fmov x2, d14\n\t"
222 "cmp x1, x2\n\t"
223 "b.ne 1f\n\t"
224 "add x1, x1, 1\n\t"
225
226 "fmov x2, d15\n\t"
227 "cmp x1, x2\n\t"
228 "b.ne 1f\n\t"
229
Andreas Gampef39b3782014-06-03 14:38:30 -0700230 "mov x9, #0\n\t" // Use x9 as flag, in clobber list
Andreas Gampe6cf80102014-05-19 11:32:41 -0700231
232 // Finish up.
233 "2:\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700234 "ldp x0, x1, [sp]\n\t" // Restore stuff not named clobbered, may contain fpr_result
235 "ldp x2, x3, [sp, #16]\n\t"
236 "ldp x4, x5, [sp, #32]\n\t"
237 "ldp x6, x7, [sp, #48]\n\t"
238 "add sp, sp, #64\n\t" // Free stack space, now sp as on entry
Andreas Gampe6cf80102014-05-19 11:32:41 -0700239 ".cfi_adjust_cfa_offset -64\n\t"
240
Andreas Gampef39b3782014-06-03 14:38:30 -0700241 "str x9, %[fpr_result]\n\t" // Store the FPR comparison result
242 "mov %[result], x8\n\t" // Store the call result
243
Andreas Gampe6cf80102014-05-19 11:32:41 -0700244 "b 3f\n\t" // Goto end
245
246 // Failed fpr verification.
247 "1:\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700248 "mov x9, #1\n\t"
Andreas Gampe6cf80102014-05-19 11:32:41 -0700249 "b 2b\n\t" // Goto finish-up
250
251 // End
252 "3:\n\t"
Andreas Gampecf4035a2014-05-28 22:43:01 -0700253 : [result] "=r" (result)
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700254 // Use the result from r0
255 : [arg0] "0"(arg0), [arg1] "r"(arg1), [arg2] "r"(arg2), [code] "r"(code), [self] "r"(self),
Andreas Gampecf4035a2014-05-28 22:43:01 -0700256 [referrer] "r"(referrer), [fpr_result] "m" (fpr_result)
Andreas Gampef39b3782014-06-03 14:38:30 -0700257 : "x8", "x9", "x10", "x11", "x12", "x13", "x14", "x15", "x16", "x17", "x18", "x19", "x20",
258 "x21", "x22", "x23", "x24", "x25", "x26", "x27", "x28", "x30",
259 "d0", "d1", "d2", "d3", "d4", "d5", "d6", "d7",
260 "d8", "d9", "d10", "d11", "d12", "d13", "d14", "d15",
261 "d16", "d17", "d18", "d19", "d20", "d21", "d22", "d23",
Andreas Gampe2f6e3512014-06-07 01:32:33 -0700262 "d24", "d25", "d26", "d27", "d28", "d29", "d30", "d31",
263 "memory"); // clobber.
Ian Rogers6f3dbba2014-10-14 17:41:57 -0700264#elif defined(__x86_64__) && !defined(__APPLE__) && defined(__clang__)
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700265 // Note: Uses the native convention
266 // TODO: Set the thread?
267 __asm__ __volatile__(
268 "pushq %[referrer]\n\t" // Push referrer
269 "pushq (%%rsp)\n\t" // & 16B alignment padding
270 ".cfi_adjust_cfa_offset 16\n\t"
271 "call *%%rax\n\t" // Call the stub
Mathieu Chartier2cebb242015-04-21 16:50:40 -0700272 "addq $16, %%rsp\n\t" // Pop null and padding
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700273 ".cfi_adjust_cfa_offset -16\n\t"
274 : "=a" (result)
275 // Use the result from rax
Andreas Gampe5c3d3a92015-01-21 12:23:50 -0800276 : "D"(arg0), "S"(arg1), "d"(arg2), "a"(code), [referrer] "c"(referrer)
Andreas Gampe2f6e3512014-06-07 01:32:33 -0700277 // This places arg0 into rdi, arg1 into rsi, arg2 into rdx, and code into rax
Andreas Gampe5c3d3a92015-01-21 12:23:50 -0800278 : "rbx", "rbp", "r8", "r9", "r10", "r11", "r12", "r13", "r14", "r15",
Andreas Gampe2f6e3512014-06-07 01:32:33 -0700279 "memory"); // clobber all
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700280 // TODO: Should we clobber the other registers?
281#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -0800282 UNUSED(arg0, arg1, arg2, code, referrer);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700283 LOG(WARNING) << "Was asked to invoke for an architecture I do not understand.";
284 result = 0;
285#endif
286 // Pop transition.
287 self->PopManagedStackFragment(fragment);
Andreas Gampe6cf80102014-05-19 11:32:41 -0700288
289 fp_result = fpr_result;
290 EXPECT_EQ(0U, fp_result);
291
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700292 return result;
293 }
294
Andreas Gampe51f76352014-05-21 08:28:48 -0700295 // TODO: Set up a frame according to referrer's specs.
296 size_t Invoke3WithReferrerAndHidden(size_t arg0, size_t arg1, size_t arg2, uintptr_t code,
297 Thread* self, mirror::ArtMethod* referrer, size_t hidden) {
298 // Push a transition back into managed code onto the linked list in thread.
299 ManagedStack fragment;
300 self->PushManagedStackFragment(&fragment);
301
302 size_t result;
303 size_t fpr_result = 0;
304#if defined(__i386__)
305 // TODO: Set the thread?
306 __asm__ __volatile__(
Mark P Mendell966c3ae2015-01-27 15:45:27 +0000307 "movd %[hidden], %%xmm7\n\t"
Ian Rogersc5f17732014-06-05 20:48:42 -0700308 "subl $12, %%esp\n\t" // Align stack.
Andreas Gampe51f76352014-05-21 08:28:48 -0700309 "pushl %[referrer]\n\t" // Store referrer
310 "call *%%edi\n\t" // Call the stub
Ian Rogersc5f17732014-06-05 20:48:42 -0700311 "addl $16, %%esp" // Pop referrer
Andreas Gampe51f76352014-05-21 08:28:48 -0700312 : "=a" (result)
313 // Use the result from eax
Andreas Gampe1a7e2922014-05-21 15:37:53 -0700314 : "a"(arg0), "c"(arg1), "d"(arg2), "D"(code), [referrer]"r"(referrer), [hidden]"m"(hidden)
Andreas Gampe2f6e3512014-06-07 01:32:33 -0700315 // This places code into edi, arg0 into eax, arg1 into ecx, and arg2 into edx
316 : "memory"); // clobber.
Andreas Gampe51f76352014-05-21 08:28:48 -0700317 // TODO: Should we clobber the other registers? EBX gets clobbered by some of the stubs,
318 // but compilation fails when declaring that.
319#elif defined(__arm__)
320 __asm__ __volatile__(
321 "push {r1-r12, lr}\n\t" // Save state, 13*4B = 52B
322 ".cfi_adjust_cfa_offset 52\n\t"
323 "push {r9}\n\t"
324 ".cfi_adjust_cfa_offset 4\n\t"
325 "mov r9, %[referrer]\n\n"
326 "str r9, [sp, #-8]!\n\t" // Push referrer, +8B padding so 16B aligned
327 ".cfi_adjust_cfa_offset 8\n\t"
328 "ldr r9, [sp, #8]\n\t"
329
330 // Push everything on the stack, so we don't rely on the order. What a mess. :-(
331 "sub sp, sp, #24\n\t"
332 "str %[arg0], [sp]\n\t"
333 "str %[arg1], [sp, #4]\n\t"
334 "str %[arg2], [sp, #8]\n\t"
335 "str %[code], [sp, #12]\n\t"
336 "str %[self], [sp, #16]\n\t"
337 "str %[hidden], [sp, #20]\n\t"
338 "ldr r0, [sp]\n\t"
339 "ldr r1, [sp, #4]\n\t"
340 "ldr r2, [sp, #8]\n\t"
341 "ldr r3, [sp, #12]\n\t"
342 "ldr r9, [sp, #16]\n\t"
343 "ldr r12, [sp, #20]\n\t"
344 "add sp, sp, #24\n\t"
345
346 "blx r3\n\t" // Call the stub
Mathieu Chartier2cebb242015-04-21 16:50:40 -0700347 "add sp, sp, #12\n\t" // Pop null and padding
Andreas Gampe51f76352014-05-21 08:28:48 -0700348 ".cfi_adjust_cfa_offset -12\n\t"
349 "pop {r1-r12, lr}\n\t" // Restore state
350 ".cfi_adjust_cfa_offset -52\n\t"
351 "mov %[result], r0\n\t" // Save the result
352 : [result] "=r" (result)
353 // Use the result from r0
Andreas Gampe2f6e3512014-06-07 01:32:33 -0700354 : [arg0] "r"(arg0), [arg1] "r"(arg1), [arg2] "r"(arg2), [code] "r"(code), [self] "r"(self),
355 [referrer] "r"(referrer), [hidden] "r"(hidden)
356 : "memory"); // clobber.
Andreas Gampe51f76352014-05-21 08:28:48 -0700357#elif defined(__aarch64__)
358 __asm__ __volatile__(
Andreas Gampef39b3782014-06-03 14:38:30 -0700359 // Spill x0-x7 which we say we don't clobber. May contain args.
Andreas Gampe51f76352014-05-21 08:28:48 -0700360 "sub sp, sp, #64\n\t"
361 ".cfi_adjust_cfa_offset 64\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700362 "stp x0, x1, [sp]\n\t"
363 "stp x2, x3, [sp, #16]\n\t"
364 "stp x4, x5, [sp, #32]\n\t"
365 "stp x6, x7, [sp, #48]\n\t"
Andreas Gampe51f76352014-05-21 08:28:48 -0700366
Andreas Gampef39b3782014-06-03 14:38:30 -0700367 "sub sp, sp, #16\n\t" // Reserve stack space, 16B aligned
368 ".cfi_adjust_cfa_offset 16\n\t"
369 "str %[referrer], [sp]\n\t" // referrer
Andreas Gampe51f76352014-05-21 08:28:48 -0700370
371 // Push everything on the stack, so we don't rely on the order. What a mess. :-(
372 "sub sp, sp, #48\n\t"
373 ".cfi_adjust_cfa_offset 48\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700374 // All things are "r" constraints, so direct str/stp should work.
375 "stp %[arg0], %[arg1], [sp]\n\t"
376 "stp %[arg2], %[code], [sp, #16]\n\t"
377 "stp %[self], %[hidden], [sp, #32]\n\t"
Andreas Gampe51f76352014-05-21 08:28:48 -0700378
379 // Now we definitely have x0-x3 free, use it to garble d8 - d15
380 "movk x0, #0xfad0\n\t"
381 "movk x0, #0xebad, lsl #16\n\t"
382 "movk x0, #0xfad0, lsl #32\n\t"
383 "movk x0, #0xebad, lsl #48\n\t"
384 "fmov d8, x0\n\t"
385 "add x0, x0, 1\n\t"
386 "fmov d9, x0\n\t"
387 "add x0, x0, 1\n\t"
388 "fmov d10, x0\n\t"
389 "add x0, x0, 1\n\t"
390 "fmov d11, x0\n\t"
391 "add x0, x0, 1\n\t"
392 "fmov d12, x0\n\t"
393 "add x0, x0, 1\n\t"
394 "fmov d13, x0\n\t"
395 "add x0, x0, 1\n\t"
396 "fmov d14, x0\n\t"
397 "add x0, x0, 1\n\t"
398 "fmov d15, x0\n\t"
399
Andreas Gampef39b3782014-06-03 14:38:30 -0700400 // Load call params into the right registers.
401 "ldp x0, x1, [sp]\n\t"
402 "ldp x2, x3, [sp, #16]\n\t"
Andreas Gampe1a7e2922014-05-21 15:37:53 -0700403 "ldp x18, x17, [sp, #32]\n\t"
Andreas Gampe51f76352014-05-21 08:28:48 -0700404 "add sp, sp, #48\n\t"
405 ".cfi_adjust_cfa_offset -48\n\t"
406
Andreas Gampe51f76352014-05-21 08:28:48 -0700407 "blr x3\n\t" // Call the stub
Andreas Gampef39b3782014-06-03 14:38:30 -0700408 "mov x8, x0\n\t" // Store result
409 "add sp, sp, #16\n\t" // Drop the quick "frame"
410 ".cfi_adjust_cfa_offset -16\n\t"
Andreas Gampe51f76352014-05-21 08:28:48 -0700411
412 // Test d8 - d15. We can use x1 and x2.
413 "movk x1, #0xfad0\n\t"
414 "movk x1, #0xebad, lsl #16\n\t"
415 "movk x1, #0xfad0, lsl #32\n\t"
416 "movk x1, #0xebad, lsl #48\n\t"
417 "fmov x2, d8\n\t"
418 "cmp x1, x2\n\t"
419 "b.ne 1f\n\t"
420 "add x1, x1, 1\n\t"
421
422 "fmov x2, d9\n\t"
423 "cmp x1, x2\n\t"
424 "b.ne 1f\n\t"
425 "add x1, x1, 1\n\t"
426
427 "fmov x2, d10\n\t"
428 "cmp x1, x2\n\t"
429 "b.ne 1f\n\t"
430 "add x1, x1, 1\n\t"
431
432 "fmov x2, d11\n\t"
433 "cmp x1, x2\n\t"
434 "b.ne 1f\n\t"
435 "add x1, x1, 1\n\t"
436
437 "fmov x2, d12\n\t"
438 "cmp x1, x2\n\t"
439 "b.ne 1f\n\t"
440 "add x1, x1, 1\n\t"
441
442 "fmov x2, d13\n\t"
443 "cmp x1, x2\n\t"
444 "b.ne 1f\n\t"
445 "add x1, x1, 1\n\t"
446
447 "fmov x2, d14\n\t"
448 "cmp x1, x2\n\t"
449 "b.ne 1f\n\t"
450 "add x1, x1, 1\n\t"
451
452 "fmov x2, d15\n\t"
453 "cmp x1, x2\n\t"
454 "b.ne 1f\n\t"
455
Andreas Gampef39b3782014-06-03 14:38:30 -0700456 "mov x9, #0\n\t" // Use x9 as flag, in clobber list
Andreas Gampe51f76352014-05-21 08:28:48 -0700457
458 // Finish up.
459 "2:\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700460 "ldp x0, x1, [sp]\n\t" // Restore stuff not named clobbered, may contain fpr_result
461 "ldp x2, x3, [sp, #16]\n\t"
462 "ldp x4, x5, [sp, #32]\n\t"
463 "ldp x6, x7, [sp, #48]\n\t"
464 "add sp, sp, #64\n\t" // Free stack space, now sp as on entry
Andreas Gampe51f76352014-05-21 08:28:48 -0700465 ".cfi_adjust_cfa_offset -64\n\t"
466
Andreas Gampef39b3782014-06-03 14:38:30 -0700467 "str x9, %[fpr_result]\n\t" // Store the FPR comparison result
468 "mov %[result], x8\n\t" // Store the call result
469
Andreas Gampe51f76352014-05-21 08:28:48 -0700470 "b 3f\n\t" // Goto end
471
472 // Failed fpr verification.
473 "1:\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700474 "mov x9, #1\n\t"
Andreas Gampe51f76352014-05-21 08:28:48 -0700475 "b 2b\n\t" // Goto finish-up
476
477 // End
478 "3:\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700479 : [result] "=r" (result)
480 // Use the result from r0
Andreas Gampe51f76352014-05-21 08:28:48 -0700481 : [arg0] "0"(arg0), [arg1] "r"(arg1), [arg2] "r"(arg2), [code] "r"(code), [self] "r"(self),
Andreas Gampef39b3782014-06-03 14:38:30 -0700482 [referrer] "r"(referrer), [hidden] "r"(hidden), [fpr_result] "m" (fpr_result)
483 : "x8", "x9", "x10", "x11", "x12", "x13", "x14", "x15", "x16", "x17", "x18", "x19", "x20",
484 "x21", "x22", "x23", "x24", "x25", "x26", "x27", "x28", "x30",
485 "d0", "d1", "d2", "d3", "d4", "d5", "d6", "d7",
486 "d8", "d9", "d10", "d11", "d12", "d13", "d14", "d15",
487 "d16", "d17", "d18", "d19", "d20", "d21", "d22", "d23",
Andreas Gampe2f6e3512014-06-07 01:32:33 -0700488 "d24", "d25", "d26", "d27", "d28", "d29", "d30", "d31",
489 "memory"); // clobber.
Ian Rogers6f3dbba2014-10-14 17:41:57 -0700490#elif defined(__x86_64__) && !defined(__APPLE__) && defined(__clang__)
Andreas Gampe51f76352014-05-21 08:28:48 -0700491 // Note: Uses the native convention
492 // TODO: Set the thread?
493 __asm__ __volatile__(
Andreas Gampe51f76352014-05-21 08:28:48 -0700494 "pushq %[referrer]\n\t" // Push referrer
495 "pushq (%%rsp)\n\t" // & 16B alignment padding
496 ".cfi_adjust_cfa_offset 16\n\t"
Andreas Gampe1a7e2922014-05-21 15:37:53 -0700497 "call *%%rbx\n\t" // Call the stub
Mathieu Chartier2cebb242015-04-21 16:50:40 -0700498 "addq $16, %%rsp\n\t" // Pop null and padding
Andreas Gampe51f76352014-05-21 08:28:48 -0700499 ".cfi_adjust_cfa_offset -16\n\t"
500 : "=a" (result)
501 // Use the result from rax
Andreas Gampe1a7e2922014-05-21 15:37:53 -0700502 : "D"(arg0), "S"(arg1), "d"(arg2), "b"(code), [referrer] "c"(referrer), [hidden] "a"(hidden)
Andreas Gampe51f76352014-05-21 08:28:48 -0700503 // This places arg0 into rdi, arg1 into rsi, arg2 into rdx, and code into rax
Andreas Gampe1a7e2922014-05-21 15:37:53 -0700504 : "rbp", "r8", "r9", "r10", "r11", "r12", "r13", "r14", "r15",
Andreas Gampe2f6e3512014-06-07 01:32:33 -0700505 "memory"); // clobber all
Andreas Gampe51f76352014-05-21 08:28:48 -0700506 // TODO: Should we clobber the other registers?
507#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -0800508 UNUSED(arg0, arg1, arg2, code, referrer, hidden);
Andreas Gampe51f76352014-05-21 08:28:48 -0700509 LOG(WARNING) << "Was asked to invoke for an architecture I do not understand.";
510 result = 0;
511#endif
512 // Pop transition.
513 self->PopManagedStackFragment(fragment);
514
515 fp_result = fpr_result;
516 EXPECT_EQ(0U, fp_result);
517
518 return result;
519 }
520
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700521 // Method with 32b arg0, 64b arg1
522 size_t Invoke3UWithReferrer(size_t arg0, uint64_t arg1, uintptr_t code, Thread* self,
523 mirror::ArtMethod* referrer) {
Ian Rogersc3ccc102014-06-25 11:52:14 -0700524#if (defined(__x86_64__) && !defined(__APPLE__)) || defined(__aarch64__)
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700525 // Just pass through.
526 return Invoke3WithReferrer(arg0, arg1, 0U, code, self, referrer);
527#else
528 // Need to split up arguments.
529 uint32_t lower = static_cast<uint32_t>(arg1 & 0xFFFFFFFF);
530 uint32_t upper = static_cast<uint32_t>((arg1 >> 32) & 0xFFFFFFFF);
531
532 return Invoke3WithReferrer(arg0, lower, upper, code, self, referrer);
533#endif
534 }
535
Andreas Gampe29b38412014-08-13 00:15:43 -0700536 static uintptr_t GetEntrypoint(Thread* self, QuickEntrypointEnum entrypoint) {
537 int32_t offset;
538#ifdef __LP64__
539 offset = GetThreadOffset<8>(entrypoint).Int32Value();
540#else
541 offset = GetThreadOffset<4>(entrypoint).Int32Value();
542#endif
543 return *reinterpret_cast<uintptr_t*>(reinterpret_cast<uint8_t*>(self) + offset);
544 }
545
Andreas Gampe6cf80102014-05-19 11:32:41 -0700546 protected:
547 size_t fp_result;
Andreas Gampe525cde22014-04-22 15:44:50 -0700548};
549
550
Andreas Gampe525cde22014-04-22 15:44:50 -0700551TEST_F(StubTest, Memcpy) {
Ian Rogersc3ccc102014-06-25 11:52:14 -0700552#if defined(__i386__) || (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe525cde22014-04-22 15:44:50 -0700553 Thread* self = Thread::Current();
554
555 uint32_t orig[20];
556 uint32_t trg[20];
557 for (size_t i = 0; i < 20; ++i) {
558 orig[i] = i;
559 trg[i] = 0;
560 }
561
562 Invoke3(reinterpret_cast<size_t>(&trg[4]), reinterpret_cast<size_t>(&orig[4]),
Andreas Gampe29b38412014-08-13 00:15:43 -0700563 10 * sizeof(uint32_t), StubTest::GetEntrypoint(self, kQuickMemcpy), self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700564
565 EXPECT_EQ(orig[0], trg[0]);
566
567 for (size_t i = 1; i < 4; ++i) {
568 EXPECT_NE(orig[i], trg[i]);
569 }
570
571 for (size_t i = 4; i < 14; ++i) {
572 EXPECT_EQ(orig[i], trg[i]);
573 }
574
575 for (size_t i = 14; i < 20; ++i) {
576 EXPECT_NE(orig[i], trg[i]);
577 }
578
579 // TODO: Test overlapping?
580
581#else
582 LOG(INFO) << "Skipping memcpy as I don't know how to do that on " << kRuntimeISA;
583 // Force-print to std::cout so it's also outside the logcat.
584 std::cout << "Skipping memcpy as I don't know how to do that on " << kRuntimeISA << std::endl;
585#endif
586}
587
Andreas Gampe525cde22014-04-22 15:44:50 -0700588TEST_F(StubTest, LockObject) {
Ian Rogersc3ccc102014-06-25 11:52:14 -0700589#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -0700590 static constexpr size_t kThinLockLoops = 100;
591
Andreas Gampe525cde22014-04-22 15:44:50 -0700592 Thread* self = Thread::Current();
Andreas Gampe29b38412014-08-13 00:15:43 -0700593
594 const uintptr_t art_quick_lock_object = StubTest::GetEntrypoint(self, kQuickLockObject);
595
Andreas Gampe525cde22014-04-22 15:44:50 -0700596 // Create an object
597 ScopedObjectAccess soa(self);
598 // garbage is created during ClassLinker::Init
599
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700600 StackHandleScope<2> hs(soa.Self());
601 Handle<mirror::String> obj(
602 hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), "hello, world!")));
Andreas Gampe525cde22014-04-22 15:44:50 -0700603 LockWord lock = obj->GetLockWord(false);
604 LockWord::LockState old_state = lock.GetState();
605 EXPECT_EQ(LockWord::LockState::kUnlocked, old_state);
606
Andreas Gampe29b38412014-08-13 00:15:43 -0700607 Invoke3(reinterpret_cast<size_t>(obj.Get()), 0U, 0U, art_quick_lock_object, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700608
609 LockWord lock_after = obj->GetLockWord(false);
610 LockWord::LockState new_state = lock_after.GetState();
611 EXPECT_EQ(LockWord::LockState::kThinLocked, new_state);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700612 EXPECT_EQ(lock_after.ThinLockCount(), 0U); // Thin lock starts count at zero
613
614 for (size_t i = 1; i < kThinLockLoops; ++i) {
Andreas Gampe29b38412014-08-13 00:15:43 -0700615 Invoke3(reinterpret_cast<size_t>(obj.Get()), 0U, 0U, art_quick_lock_object, self);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700616
617 // Check we're at lock count i
618
619 LockWord l_inc = obj->GetLockWord(false);
620 LockWord::LockState l_inc_state = l_inc.GetState();
621 EXPECT_EQ(LockWord::LockState::kThinLocked, l_inc_state);
622 EXPECT_EQ(l_inc.ThinLockCount(), i);
623 }
624
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700625 // Force a fat lock by running identity hashcode to fill up lock word.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700626 Handle<mirror::String> obj2(hs.NewHandle(
627 mirror::String::AllocFromModifiedUtf8(soa.Self(), "hello, world!")));
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700628
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700629 obj2->IdentityHashCode();
630
Andreas Gampe29b38412014-08-13 00:15:43 -0700631 Invoke3(reinterpret_cast<size_t>(obj2.Get()), 0U, 0U, art_quick_lock_object, self);
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700632
633 LockWord lock_after2 = obj2->GetLockWord(false);
634 LockWord::LockState new_state2 = lock_after2.GetState();
635 EXPECT_EQ(LockWord::LockState::kFatLocked, new_state2);
636 EXPECT_NE(lock_after2.FatLockMonitor(), static_cast<Monitor*>(nullptr));
637
638 // Test done.
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700639#else
640 LOG(INFO) << "Skipping lock_object as I don't know how to do that on " << kRuntimeISA;
641 // Force-print to std::cout so it's also outside the logcat.
642 std::cout << "Skipping lock_object as I don't know how to do that on " << kRuntimeISA << std::endl;
643#endif
644}
645
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700646
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700647class RandGen {
648 public:
649 explicit RandGen(uint32_t seed) : val_(seed) {}
650
651 uint32_t next() {
652 val_ = val_ * 48271 % 2147483647 + 13;
653 return val_;
654 }
655
656 uint32_t val_;
657};
658
659
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700660// NO_THREAD_SAFETY_ANALYSIS as we do not want to grab exclusive mutator lock for MonitorInfo.
661static void TestUnlockObject(StubTest* test) NO_THREAD_SAFETY_ANALYSIS {
Ian Rogersc3ccc102014-06-25 11:52:14 -0700662#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -0700663 static constexpr size_t kThinLockLoops = 100;
664
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700665 Thread* self = Thread::Current();
Andreas Gampe29b38412014-08-13 00:15:43 -0700666
667 const uintptr_t art_quick_lock_object = StubTest::GetEntrypoint(self, kQuickLockObject);
668 const uintptr_t art_quick_unlock_object = StubTest::GetEntrypoint(self, kQuickUnlockObject);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700669 // Create an object
670 ScopedObjectAccess soa(self);
671 // garbage is created during ClassLinker::Init
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700672 static constexpr size_t kNumberOfLocks = 10; // Number of objects = lock
673 StackHandleScope<kNumberOfLocks + 1> hs(self);
674 Handle<mirror::String> obj(
675 hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), "hello, world!")));
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700676 LockWord lock = obj->GetLockWord(false);
677 LockWord::LockState old_state = lock.GetState();
678 EXPECT_EQ(LockWord::LockState::kUnlocked, old_state);
679
Andreas Gampe29b38412014-08-13 00:15:43 -0700680 test->Invoke3(reinterpret_cast<size_t>(obj.Get()), 0U, 0U, art_quick_unlock_object, self);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700681 // This should be an illegal monitor state.
682 EXPECT_TRUE(self->IsExceptionPending());
683 self->ClearException();
684
685 LockWord lock_after = obj->GetLockWord(false);
686 LockWord::LockState new_state = lock_after.GetState();
687 EXPECT_EQ(LockWord::LockState::kUnlocked, new_state);
Andreas Gampe525cde22014-04-22 15:44:50 -0700688
Andreas Gampe29b38412014-08-13 00:15:43 -0700689 test->Invoke3(reinterpret_cast<size_t>(obj.Get()), 0U, 0U, art_quick_lock_object, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700690
691 LockWord lock_after2 = obj->GetLockWord(false);
692 LockWord::LockState new_state2 = lock_after2.GetState();
693 EXPECT_EQ(LockWord::LockState::kThinLocked, new_state2);
694
Andreas Gampe29b38412014-08-13 00:15:43 -0700695 test->Invoke3(reinterpret_cast<size_t>(obj.Get()), 0U, 0U, art_quick_unlock_object, self);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700696
697 LockWord lock_after3 = obj->GetLockWord(false);
698 LockWord::LockState new_state3 = lock_after3.GetState();
699 EXPECT_EQ(LockWord::LockState::kUnlocked, new_state3);
700
701 // Stress test:
702 // Keep a number of objects and their locks in flight. Randomly lock or unlock one of them in
703 // each step.
704
705 RandGen r(0x1234);
706
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700707 constexpr size_t kIterations = 10000; // Number of iterations
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700708 constexpr size_t kMoveToFat = 1000; // Chance of 1:kMoveFat to make a lock fat.
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700709
710 size_t counts[kNumberOfLocks];
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700711 bool fat[kNumberOfLocks]; // Whether a lock should be thin or fat.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700712 Handle<mirror::String> objects[kNumberOfLocks];
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700713
714 // Initialize = allocate.
715 for (size_t i = 0; i < kNumberOfLocks; ++i) {
716 counts[i] = 0;
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700717 fat[i] = false;
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700718 objects[i] = hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), ""));
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700719 }
720
721 for (size_t i = 0; i < kIterations; ++i) {
722 // Select which lock to update.
723 size_t index = r.next() % kNumberOfLocks;
724
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700725 // Make lock fat?
726 if (!fat[index] && (r.next() % kMoveToFat == 0)) {
727 fat[index] = true;
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700728 objects[index]->IdentityHashCode();
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700729
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700730 LockWord lock_iter = objects[index]->GetLockWord(false);
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700731 LockWord::LockState iter_state = lock_iter.GetState();
732 if (counts[index] == 0) {
733 EXPECT_EQ(LockWord::LockState::kHashCode, iter_state);
734 } else {
735 EXPECT_EQ(LockWord::LockState::kFatLocked, iter_state);
736 }
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700737 } else {
Andreas Gampe277ccbd2014-11-03 21:36:10 -0800738 bool take_lock; // Whether to lock or unlock in this step.
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700739 if (counts[index] == 0) {
Andreas Gampe277ccbd2014-11-03 21:36:10 -0800740 take_lock = true;
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700741 } else if (counts[index] == kThinLockLoops) {
Andreas Gampe277ccbd2014-11-03 21:36:10 -0800742 take_lock = false;
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700743 } else {
744 // Randomly.
Andreas Gampe277ccbd2014-11-03 21:36:10 -0800745 take_lock = r.next() % 2 == 0;
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700746 }
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700747
Andreas Gampe277ccbd2014-11-03 21:36:10 -0800748 if (take_lock) {
Andreas Gampe29b38412014-08-13 00:15:43 -0700749 test->Invoke3(reinterpret_cast<size_t>(objects[index].Get()), 0U, 0U, art_quick_lock_object,
750 self);
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700751 counts[index]++;
752 } else {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700753 test->Invoke3(reinterpret_cast<size_t>(objects[index].Get()), 0U, 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -0700754 art_quick_unlock_object, self);
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700755 counts[index]--;
756 }
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700757
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700758 EXPECT_FALSE(self->IsExceptionPending());
759
760 // Check the new state.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700761 LockWord lock_iter = objects[index]->GetLockWord(true);
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700762 LockWord::LockState iter_state = lock_iter.GetState();
763 if (fat[index]) {
764 // Abuse MonitorInfo.
765 EXPECT_EQ(LockWord::LockState::kFatLocked, iter_state) << index;
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700766 MonitorInfo info(objects[index].Get());
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700767 EXPECT_EQ(counts[index], info.entry_count_) << index;
768 } else {
769 if (counts[index] > 0) {
770 EXPECT_EQ(LockWord::LockState::kThinLocked, iter_state);
771 EXPECT_EQ(counts[index] - 1, lock_iter.ThinLockCount());
772 } else {
773 EXPECT_EQ(LockWord::LockState::kUnlocked, iter_state);
774 }
775 }
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700776 }
777 }
778
779 // Unlock the remaining count times and then check it's unlocked. Then deallocate.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700780 // Go reverse order to correctly handle Handles.
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700781 for (size_t i = 0; i < kNumberOfLocks; ++i) {
782 size_t index = kNumberOfLocks - 1 - i;
783 size_t count = counts[index];
784 while (count > 0) {
Andreas Gampe29b38412014-08-13 00:15:43 -0700785 test->Invoke3(reinterpret_cast<size_t>(objects[index].Get()), 0U, 0U, art_quick_unlock_object,
786 self);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700787 count--;
788 }
789
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700790 LockWord lock_after4 = objects[index]->GetLockWord(false);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700791 LockWord::LockState new_state4 = lock_after4.GetState();
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700792 EXPECT_TRUE(LockWord::LockState::kUnlocked == new_state4
793 || LockWord::LockState::kFatLocked == new_state4);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700794 }
795
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700796 // Test done.
Andreas Gampe525cde22014-04-22 15:44:50 -0700797#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -0800798 UNUSED(test);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700799 LOG(INFO) << "Skipping unlock_object as I don't know how to do that on " << kRuntimeISA;
Andreas Gampe525cde22014-04-22 15:44:50 -0700800 // Force-print to std::cout so it's also outside the logcat.
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700801 std::cout << "Skipping unlock_object as I don't know how to do that on " << kRuntimeISA << std::endl;
Andreas Gampe525cde22014-04-22 15:44:50 -0700802#endif
803}
804
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700805TEST_F(StubTest, UnlockObject) {
Andreas Gampe369810a2015-01-14 19:53:31 -0800806 // This will lead to monitor error messages in the log.
807 ScopedLogSeverity sls(LogSeverity::FATAL);
808
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700809 TestUnlockObject(this);
810}
Andreas Gampe525cde22014-04-22 15:44:50 -0700811
Ian Rogersc3ccc102014-06-25 11:52:14 -0700812#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe525cde22014-04-22 15:44:50 -0700813extern "C" void art_quick_check_cast(void);
814#endif
815
816TEST_F(StubTest, CheckCast) {
Ian Rogersc3ccc102014-06-25 11:52:14 -0700817#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe525cde22014-04-22 15:44:50 -0700818 Thread* self = Thread::Current();
Andreas Gampe29b38412014-08-13 00:15:43 -0700819
820 const uintptr_t art_quick_check_cast = StubTest::GetEntrypoint(self, kQuickCheckCast);
821
Andreas Gampe525cde22014-04-22 15:44:50 -0700822 // Find some classes.
823 ScopedObjectAccess soa(self);
824 // garbage is created during ClassLinker::Init
825
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700826 StackHandleScope<2> hs(soa.Self());
827 Handle<mirror::Class> c(
828 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "[Ljava/lang/Object;")));
829 Handle<mirror::Class> c2(
830 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "[Ljava/lang/String;")));
Andreas Gampe525cde22014-04-22 15:44:50 -0700831
832 EXPECT_FALSE(self->IsExceptionPending());
833
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700834 Invoke3(reinterpret_cast<size_t>(c.Get()), reinterpret_cast<size_t>(c.Get()), 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -0700835 art_quick_check_cast, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700836
837 EXPECT_FALSE(self->IsExceptionPending());
838
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700839 Invoke3(reinterpret_cast<size_t>(c2.Get()), reinterpret_cast<size_t>(c2.Get()), 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -0700840 art_quick_check_cast, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700841
842 EXPECT_FALSE(self->IsExceptionPending());
843
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700844 Invoke3(reinterpret_cast<size_t>(c.Get()), reinterpret_cast<size_t>(c2.Get()), 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -0700845 art_quick_check_cast, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700846
847 EXPECT_FALSE(self->IsExceptionPending());
848
849 // TODO: Make the following work. But that would require correct managed frames.
850
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700851 Invoke3(reinterpret_cast<size_t>(c2.Get()), reinterpret_cast<size_t>(c.Get()), 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -0700852 art_quick_check_cast, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700853
854 EXPECT_TRUE(self->IsExceptionPending());
855 self->ClearException();
856
857#else
858 LOG(INFO) << "Skipping check_cast as I don't know how to do that on " << kRuntimeISA;
859 // Force-print to std::cout so it's also outside the logcat.
860 std::cout << "Skipping check_cast as I don't know how to do that on " << kRuntimeISA << std::endl;
861#endif
862}
863
864
Andreas Gampe525cde22014-04-22 15:44:50 -0700865TEST_F(StubTest, APutObj) {
Hiroshi Yamauchid6881ae2014-04-28 17:21:48 -0700866 TEST_DISABLED_FOR_HEAP_REFERENCE_POISONING();
867
Ian Rogersc3ccc102014-06-25 11:52:14 -0700868#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe525cde22014-04-22 15:44:50 -0700869 Thread* self = Thread::Current();
Andreas Gampe29b38412014-08-13 00:15:43 -0700870
871 // Do not check non-checked ones, we'd need handlers and stuff...
872 const uintptr_t art_quick_aput_obj_with_null_and_bound_check =
873 StubTest::GetEntrypoint(self, kQuickAputObjectWithNullAndBoundCheck);
874
Andreas Gampe525cde22014-04-22 15:44:50 -0700875 // Create an object
876 ScopedObjectAccess soa(self);
877 // garbage is created during ClassLinker::Init
878
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700879 StackHandleScope<5> hs(soa.Self());
880 Handle<mirror::Class> c(
881 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "Ljava/lang/Object;")));
882 Handle<mirror::Class> ca(
883 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "[Ljava/lang/String;")));
Andreas Gampe525cde22014-04-22 15:44:50 -0700884
885 // Build a string array of size 1
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700886 Handle<mirror::ObjectArray<mirror::Object>> array(
887 hs.NewHandle(mirror::ObjectArray<mirror::Object>::Alloc(soa.Self(), ca.Get(), 10)));
Andreas Gampe525cde22014-04-22 15:44:50 -0700888
889 // Build a string -> should be assignable
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700890 Handle<mirror::String> str_obj(
891 hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), "hello, world!")));
Andreas Gampe525cde22014-04-22 15:44:50 -0700892
893 // Build a generic object -> should fail assigning
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700894 Handle<mirror::Object> obj_obj(hs.NewHandle(c->AllocObject(soa.Self())));
Andreas Gampe525cde22014-04-22 15:44:50 -0700895
896 // Play with it...
897
898 // 1) Success cases
Andreas Gampef4e910b2014-04-29 16:55:52 -0700899 // 1.1) Assign str_obj to array[0..3]
Andreas Gampe525cde22014-04-22 15:44:50 -0700900
901 EXPECT_FALSE(self->IsExceptionPending());
902
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700903 Invoke3(reinterpret_cast<size_t>(array.Get()), 0U, reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampe29b38412014-08-13 00:15:43 -0700904 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700905
906 EXPECT_FALSE(self->IsExceptionPending());
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700907 EXPECT_EQ(str_obj.Get(), array->Get(0));
Andreas Gampe525cde22014-04-22 15:44:50 -0700908
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700909 Invoke3(reinterpret_cast<size_t>(array.Get()), 1U, reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampe29b38412014-08-13 00:15:43 -0700910 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampef4e910b2014-04-29 16:55:52 -0700911
912 EXPECT_FALSE(self->IsExceptionPending());
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700913 EXPECT_EQ(str_obj.Get(), array->Get(1));
Andreas Gampef4e910b2014-04-29 16:55:52 -0700914
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700915 Invoke3(reinterpret_cast<size_t>(array.Get()), 2U, reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampe29b38412014-08-13 00:15:43 -0700916 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampef4e910b2014-04-29 16:55:52 -0700917
918 EXPECT_FALSE(self->IsExceptionPending());
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700919 EXPECT_EQ(str_obj.Get(), array->Get(2));
Andreas Gampef4e910b2014-04-29 16:55:52 -0700920
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700921 Invoke3(reinterpret_cast<size_t>(array.Get()), 3U, reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampe29b38412014-08-13 00:15:43 -0700922 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampef4e910b2014-04-29 16:55:52 -0700923
924 EXPECT_FALSE(self->IsExceptionPending());
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700925 EXPECT_EQ(str_obj.Get(), array->Get(3));
Andreas Gampef4e910b2014-04-29 16:55:52 -0700926
927 // 1.2) Assign null to array[0..3]
Andreas Gampe525cde22014-04-22 15:44:50 -0700928
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700929 Invoke3(reinterpret_cast<size_t>(array.Get()), 0U, reinterpret_cast<size_t>(nullptr),
Andreas Gampe29b38412014-08-13 00:15:43 -0700930 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700931
932 EXPECT_FALSE(self->IsExceptionPending());
Andreas Gampef4e910b2014-04-29 16:55:52 -0700933 EXPECT_EQ(nullptr, array->Get(0));
934
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700935 Invoke3(reinterpret_cast<size_t>(array.Get()), 1U, reinterpret_cast<size_t>(nullptr),
Andreas Gampe29b38412014-08-13 00:15:43 -0700936 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampef4e910b2014-04-29 16:55:52 -0700937
938 EXPECT_FALSE(self->IsExceptionPending());
939 EXPECT_EQ(nullptr, array->Get(1));
940
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700941 Invoke3(reinterpret_cast<size_t>(array.Get()), 2U, reinterpret_cast<size_t>(nullptr),
Andreas Gampe29b38412014-08-13 00:15:43 -0700942 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampef4e910b2014-04-29 16:55:52 -0700943
944 EXPECT_FALSE(self->IsExceptionPending());
945 EXPECT_EQ(nullptr, array->Get(2));
946
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700947 Invoke3(reinterpret_cast<size_t>(array.Get()), 3U, reinterpret_cast<size_t>(nullptr),
Andreas Gampe29b38412014-08-13 00:15:43 -0700948 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampef4e910b2014-04-29 16:55:52 -0700949
950 EXPECT_FALSE(self->IsExceptionPending());
951 EXPECT_EQ(nullptr, array->Get(3));
Andreas Gampe525cde22014-04-22 15:44:50 -0700952
953 // TODO: Check _which_ exception is thrown. Then make 3) check that it's the right check order.
954
955 // 2) Failure cases (str into str[])
956 // 2.1) Array = null
957 // TODO: Throwing NPE needs actual DEX code
958
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700959// Invoke3(reinterpret_cast<size_t>(nullptr), 0U, reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampe525cde22014-04-22 15:44:50 -0700960// reinterpret_cast<uintptr_t>(&art_quick_aput_obj_with_null_and_bound_check), self);
961//
962// EXPECT_TRUE(self->IsExceptionPending());
963// self->ClearException();
964
965 // 2.2) Index < 0
966
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700967 Invoke3(reinterpret_cast<size_t>(array.Get()), static_cast<size_t>(-1),
968 reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampe29b38412014-08-13 00:15:43 -0700969 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700970
971 EXPECT_TRUE(self->IsExceptionPending());
972 self->ClearException();
973
974 // 2.3) Index > 0
975
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700976 Invoke3(reinterpret_cast<size_t>(array.Get()), 10U, reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampe29b38412014-08-13 00:15:43 -0700977 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700978
979 EXPECT_TRUE(self->IsExceptionPending());
980 self->ClearException();
981
982 // 3) Failure cases (obj into str[])
983
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700984 Invoke3(reinterpret_cast<size_t>(array.Get()), 0U, reinterpret_cast<size_t>(obj_obj.Get()),
Andreas Gampe29b38412014-08-13 00:15:43 -0700985 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700986
987 EXPECT_TRUE(self->IsExceptionPending());
988 self->ClearException();
989
990 // Tests done.
991#else
992 LOG(INFO) << "Skipping aput_obj as I don't know how to do that on " << kRuntimeISA;
993 // Force-print to std::cout so it's also outside the logcat.
994 std::cout << "Skipping aput_obj as I don't know how to do that on " << kRuntimeISA << std::endl;
995#endif
996}
997
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700998TEST_F(StubTest, AllocObject) {
999 TEST_DISABLED_FOR_HEAP_REFERENCE_POISONING();
1000
Ian Rogersc3ccc102014-06-25 11:52:14 -07001001#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe369810a2015-01-14 19:53:31 -08001002 // This will lead to OOM error messages in the log.
1003 ScopedLogSeverity sls(LogSeverity::FATAL);
1004
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001005 // TODO: Check the "Unresolved" allocation stubs
1006
1007 Thread* self = Thread::Current();
1008 // Create an object
1009 ScopedObjectAccess soa(self);
1010 // garbage is created during ClassLinker::Init
1011
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001012 StackHandleScope<2> hs(soa.Self());
1013 Handle<mirror::Class> c(
1014 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "Ljava/lang/Object;")));
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001015
1016 // Play with it...
1017
1018 EXPECT_FALSE(self->IsExceptionPending());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001019 {
1020 // Use an arbitrary method from c to use as referrer
1021 size_t result = Invoke3(static_cast<size_t>(c->GetDexTypeIndex()), // type_idx
1022 reinterpret_cast<size_t>(c->GetVirtualMethod(0)), // arbitrary
1023 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001024 StubTest::GetEntrypoint(self, kQuickAllocObject),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001025 self);
1026
1027 EXPECT_FALSE(self->IsExceptionPending());
1028 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
1029 mirror::Object* obj = reinterpret_cast<mirror::Object*>(result);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001030 EXPECT_EQ(c.Get(), obj->GetClass());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001031 VerifyObject(obj);
1032 }
1033
1034 {
Mathieu Chartier2cebb242015-04-21 16:50:40 -07001035 // We can use null in the second argument as we do not need a method here (not used in
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001036 // resolved/initialized cases)
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001037 size_t result = Invoke3(reinterpret_cast<size_t>(c.Get()), reinterpret_cast<size_t>(nullptr), 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001038 StubTest::GetEntrypoint(self, kQuickAllocObjectResolved),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001039 self);
1040
1041 EXPECT_FALSE(self->IsExceptionPending());
1042 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
1043 mirror::Object* obj = reinterpret_cast<mirror::Object*>(result);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001044 EXPECT_EQ(c.Get(), obj->GetClass());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001045 VerifyObject(obj);
1046 }
1047
1048 {
Mathieu Chartier2cebb242015-04-21 16:50:40 -07001049 // We can use null in the second argument as we do not need a method here (not used in
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001050 // resolved/initialized cases)
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001051 size_t result = Invoke3(reinterpret_cast<size_t>(c.Get()), reinterpret_cast<size_t>(nullptr), 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001052 StubTest::GetEntrypoint(self, kQuickAllocObjectInitialized),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001053 self);
1054
1055 EXPECT_FALSE(self->IsExceptionPending());
1056 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
1057 mirror::Object* obj = reinterpret_cast<mirror::Object*>(result);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001058 EXPECT_EQ(c.Get(), obj->GetClass());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001059 VerifyObject(obj);
1060 }
1061
1062 // Failure tests.
1063
1064 // Out-of-memory.
1065 {
1066 Runtime::Current()->GetHeap()->SetIdealFootprint(1 * GB);
1067
1068 // Array helps to fill memory faster.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001069 Handle<mirror::Class> ca(
1070 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "[Ljava/lang/Object;")));
1071
1072 // Use arbitrary large amount for now.
1073 static const size_t kMaxHandles = 1000000;
Ian Rogers700a4022014-05-19 16:49:03 -07001074 std::unique_ptr<StackHandleScope<kMaxHandles>> hsp(new StackHandleScope<kMaxHandles>(self));
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001075
1076 std::vector<Handle<mirror::Object>> handles;
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001077 // Start allocating with 128K
1078 size_t length = 128 * KB / 4;
1079 while (length > 10) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001080 Handle<mirror::Object> h(hsp->NewHandle<mirror::Object>(
1081 mirror::ObjectArray<mirror::Object>::Alloc(soa.Self(), ca.Get(), length / 4)));
1082 if (self->IsExceptionPending() || h.Get() == nullptr) {
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001083 self->ClearException();
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001084
1085 // Try a smaller length
1086 length = length / 8;
1087 // Use at most half the reported free space.
1088 size_t mem = Runtime::Current()->GetHeap()->GetFreeMemory();
1089 if (length * 8 > mem) {
1090 length = mem / 8;
1091 }
1092 } else {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001093 handles.push_back(h);
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001094 }
1095 }
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001096 LOG(INFO) << "Used " << handles.size() << " arrays to fill space.";
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001097
1098 // Allocate simple objects till it fails.
1099 while (!self->IsExceptionPending()) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001100 Handle<mirror::Object> h = hsp->NewHandle(c->AllocObject(soa.Self()));
1101 if (!self->IsExceptionPending() && h.Get() != nullptr) {
1102 handles.push_back(h);
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001103 }
1104 }
1105 self->ClearException();
1106
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001107 size_t result = Invoke3(reinterpret_cast<size_t>(c.Get()), reinterpret_cast<size_t>(nullptr), 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001108 StubTest::GetEntrypoint(self, kQuickAllocObjectInitialized),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001109 self);
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001110 EXPECT_TRUE(self->IsExceptionPending());
1111 self->ClearException();
1112 EXPECT_EQ(reinterpret_cast<size_t>(nullptr), result);
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001113 }
1114
1115 // Tests done.
1116#else
1117 LOG(INFO) << "Skipping alloc_object as I don't know how to do that on " << kRuntimeISA;
1118 // Force-print to std::cout so it's also outside the logcat.
1119 std::cout << "Skipping alloc_object as I don't know how to do that on " << kRuntimeISA << std::endl;
1120#endif
1121}
1122
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001123TEST_F(StubTest, AllocObjectArray) {
1124 TEST_DISABLED_FOR_HEAP_REFERENCE_POISONING();
1125
Ian Rogersc3ccc102014-06-25 11:52:14 -07001126#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001127 // TODO: Check the "Unresolved" allocation stubs
1128
Andreas Gampe369810a2015-01-14 19:53:31 -08001129 // This will lead to OOM error messages in the log.
1130 ScopedLogSeverity sls(LogSeverity::FATAL);
1131
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001132 Thread* self = Thread::Current();
1133 // Create an object
1134 ScopedObjectAccess soa(self);
1135 // garbage is created during ClassLinker::Init
1136
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001137 StackHandleScope<2> hs(self);
1138 Handle<mirror::Class> c(
1139 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "[Ljava/lang/Object;")));
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001140
1141 // Needed to have a linked method.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001142 Handle<mirror::Class> c_obj(
1143 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "Ljava/lang/Object;")));
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001144
1145 // Play with it...
1146
1147 EXPECT_FALSE(self->IsExceptionPending());
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001148
1149 // For some reason this does not work, as the type_idx is artificial and outside what the
1150 // resolved types of c_obj allow...
1151
Ian Rogerscf7f1912014-10-22 22:06:39 -07001152 if ((false)) {
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001153 // Use an arbitrary method from c to use as referrer
1154 size_t result = Invoke3(static_cast<size_t>(c->GetDexTypeIndex()), // type_idx
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001155 10U,
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08001156 reinterpret_cast<size_t>(c_obj->GetVirtualMethod(0)), // arbitrary
Andreas Gampe29b38412014-08-13 00:15:43 -07001157 StubTest::GetEntrypoint(self, kQuickAllocArray),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001158 self);
1159
1160 EXPECT_FALSE(self->IsExceptionPending());
1161 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
1162 mirror::Array* obj = reinterpret_cast<mirror::Array*>(result);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001163 EXPECT_EQ(c.Get(), obj->GetClass());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001164 VerifyObject(obj);
1165 EXPECT_EQ(obj->GetLength(), 10);
1166 }
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001167
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001168 {
Mathieu Chartier2cebb242015-04-21 16:50:40 -07001169 // We can use null in the second argument as we do not need a method here (not used in
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001170 // resolved/initialized cases)
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08001171 size_t result = Invoke3(reinterpret_cast<size_t>(c.Get()), 10U,
1172 reinterpret_cast<size_t>(nullptr),
Andreas Gampe29b38412014-08-13 00:15:43 -07001173 StubTest::GetEntrypoint(self, kQuickAllocArrayResolved),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001174 self);
Nicolas Geoffray14691c52015-03-05 10:40:17 +00001175 EXPECT_FALSE(self->IsExceptionPending()) << PrettyTypeOf(self->GetException());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001176 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
1177 mirror::Object* obj = reinterpret_cast<mirror::Object*>(result);
1178 EXPECT_TRUE(obj->IsArrayInstance());
1179 EXPECT_TRUE(obj->IsObjectArray());
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001180 EXPECT_EQ(c.Get(), obj->GetClass());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001181 VerifyObject(obj);
1182 mirror::Array* array = reinterpret_cast<mirror::Array*>(result);
1183 EXPECT_EQ(array->GetLength(), 10);
1184 }
1185
1186 // Failure tests.
1187
1188 // Out-of-memory.
1189 {
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08001190 size_t result = Invoke3(reinterpret_cast<size_t>(c.Get()),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001191 GB, // that should fail...
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08001192 reinterpret_cast<size_t>(nullptr),
Andreas Gampe29b38412014-08-13 00:15:43 -07001193 StubTest::GetEntrypoint(self, kQuickAllocArrayResolved),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001194 self);
1195
1196 EXPECT_TRUE(self->IsExceptionPending());
1197 self->ClearException();
1198 EXPECT_EQ(reinterpret_cast<size_t>(nullptr), result);
1199 }
1200
1201 // Tests done.
1202#else
1203 LOG(INFO) << "Skipping alloc_array as I don't know how to do that on " << kRuntimeISA;
1204 // Force-print to std::cout so it's also outside the logcat.
1205 std::cout << "Skipping alloc_array as I don't know how to do that on " << kRuntimeISA << std::endl;
1206#endif
1207}
1208
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001209
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001210TEST_F(StubTest, StringCompareTo) {
1211 TEST_DISABLED_FOR_HEAP_REFERENCE_POISONING();
1212
Ian Rogersc3ccc102014-06-25 11:52:14 -07001213#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001214 // TODO: Check the "Unresolved" allocation stubs
1215
1216 Thread* self = Thread::Current();
Andreas Gampe29b38412014-08-13 00:15:43 -07001217
1218 const uintptr_t art_quick_string_compareto = StubTest::GetEntrypoint(self, kQuickStringCompareTo);
1219
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001220 ScopedObjectAccess soa(self);
1221 // garbage is created during ClassLinker::Init
1222
1223 // Create some strings
1224 // Use array so we can index into it and use a matrix for expected results
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -07001225 // Setup: The first half is standard. The second half uses a non-zero offset.
1226 // TODO: Shared backing arrays.
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001227 const char* c[] = { "", "", "a", "aa", "ab",
Serban Constantinescu86797a72014-06-19 16:17:56 +01001228 "aacaacaacaacaacaac", // This one's under the default limit to go to __memcmp16.
1229 "aacaacaacaacaacaacaacaacaacaacaacaac", // This one's over.
1230 "aacaacaacaacaacaacaacaacaacaacaacaaca" }; // As is this one. We need a separate one to
1231 // defeat object-equal optimizations.
Jeff Hao848f70a2014-01-15 13:49:50 -08001232 static constexpr size_t kStringCount = arraysize(c);
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001233
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001234 StackHandleScope<kStringCount> hs(self);
1235 Handle<mirror::String> s[kStringCount];
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001236
Jeff Hao848f70a2014-01-15 13:49:50 -08001237 for (size_t i = 0; i < kStringCount; ++i) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001238 s[i] = hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), c[i]));
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001239 }
1240
1241 // TODO: wide characters
1242
1243 // Matrix of expectations. First component is first parameter. Note we only check against the
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -07001244 // sign, not the value. As we are testing random offsets, we need to compute this and need to
1245 // rely on String::CompareTo being correct.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001246 int32_t expected[kStringCount][kStringCount];
1247 for (size_t x = 0; x < kStringCount; ++x) {
1248 for (size_t y = 0; y < kStringCount; ++y) {
1249 expected[x][y] = s[x]->CompareTo(s[y].Get());
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -07001250 }
1251 }
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001252
1253 // Play with it...
1254
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001255 for (size_t x = 0; x < kStringCount; ++x) {
1256 for (size_t y = 0; y < kStringCount; ++y) {
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001257 // Test string_compareto x y
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001258 size_t result = Invoke3(reinterpret_cast<size_t>(s[x].Get()),
1259 reinterpret_cast<size_t>(s[y].Get()), 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001260 art_quick_string_compareto, self);
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001261
1262 EXPECT_FALSE(self->IsExceptionPending());
1263
1264 // The result is a 32b signed integer
1265 union {
1266 size_t r;
1267 int32_t i;
1268 } conv;
1269 conv.r = result;
1270 int32_t e = expected[x][y];
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -07001271 EXPECT_TRUE(e == 0 ? conv.i == 0 : true) << "x=" << c[x] << " y=" << c[y] << " res=" <<
1272 conv.r;
1273 EXPECT_TRUE(e < 0 ? conv.i < 0 : true) << "x=" << c[x] << " y=" << c[y] << " res=" <<
1274 conv.r;
1275 EXPECT_TRUE(e > 0 ? conv.i > 0 : true) << "x=" << c[x] << " y=" << c[y] << " res=" <<
1276 conv.r;
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001277 }
1278 }
1279
Andreas Gampe7177d7c2014-05-02 12:10:02 -07001280 // TODO: Deallocate things.
1281
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001282 // Tests done.
1283#else
1284 LOG(INFO) << "Skipping string_compareto as I don't know how to do that on " << kRuntimeISA;
1285 // Force-print to std::cout so it's also outside the logcat.
1286 std::cout << "Skipping string_compareto as I don't know how to do that on " << kRuntimeISA <<
1287 std::endl;
1288#endif
1289}
1290
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001291
Mathieu Chartierc7853442015-03-27 14:35:38 -07001292static void GetSetBooleanStatic(ArtField* f, Thread* self,
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001293 mirror::ArtMethod* referrer, StubTest* test)
Fred Shih37f05ef2014-07-16 18:38:08 -07001294 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1295#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
1296 constexpr size_t num_values = 5;
1297 uint8_t values[num_values] = { 0, 1, 2, 128, 0xFF };
1298
1299 for (size_t i = 0; i < num_values; ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001300 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001301 static_cast<size_t>(values[i]),
1302 0U,
1303 StubTest::GetEntrypoint(self, kQuickSet8Static),
1304 self,
1305 referrer);
1306
Mathieu Chartierc7853442015-03-27 14:35:38 -07001307 size_t res = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001308 0U, 0U,
1309 StubTest::GetEntrypoint(self, kQuickGetBooleanStatic),
1310 self,
1311 referrer);
1312 // Boolean currently stores bools as uint8_t, be more zealous about asserting correct writes/gets.
1313 EXPECT_EQ(values[i], static_cast<uint8_t>(res)) << "Iteration " << i;
1314 }
1315#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001316 UNUSED(f, self, referrer, test);
Fred Shih37f05ef2014-07-16 18:38:08 -07001317 LOG(INFO) << "Skipping set_boolean_static as I don't know how to do that on " << kRuntimeISA;
1318 // Force-print to std::cout so it's also outside the logcat.
1319 std::cout << "Skipping set_boolean_static as I don't know how to do that on " << kRuntimeISA << std::endl;
1320#endif
1321}
Mathieu Chartierc7853442015-03-27 14:35:38 -07001322static void GetSetByteStatic(ArtField* f, Thread* self, mirror::ArtMethod* referrer,
1323 StubTest* test)
Fred Shih37f05ef2014-07-16 18:38:08 -07001324 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1325#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001326 int8_t values[] = { -128, -64, 0, 64, 127 };
Fred Shih37f05ef2014-07-16 18:38:08 -07001327
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001328 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001329 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001330 static_cast<size_t>(values[i]),
1331 0U,
1332 StubTest::GetEntrypoint(self, kQuickSet8Static),
1333 self,
1334 referrer);
1335
Mathieu Chartierc7853442015-03-27 14:35:38 -07001336 size_t res = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001337 0U, 0U,
1338 StubTest::GetEntrypoint(self, kQuickGetByteStatic),
1339 self,
1340 referrer);
1341 EXPECT_EQ(values[i], static_cast<int8_t>(res)) << "Iteration " << i;
1342 }
1343#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001344 UNUSED(f, self, referrer, test);
Fred Shih37f05ef2014-07-16 18:38:08 -07001345 LOG(INFO) << "Skipping set_byte_static as I don't know how to do that on " << kRuntimeISA;
1346 // Force-print to std::cout so it's also outside the logcat.
1347 std::cout << "Skipping set_byte_static as I don't know how to do that on " << kRuntimeISA << std::endl;
1348#endif
1349}
1350
1351
Mathieu Chartierc7853442015-03-27 14:35:38 -07001352static void GetSetBooleanInstance(Handle<mirror::Object>* obj, ArtField* f, Thread* self,
1353 mirror::ArtMethod* referrer, StubTest* test)
Fred Shih37f05ef2014-07-16 18:38:08 -07001354 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1355#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001356 uint8_t values[] = { 0, true, 2, 128, 0xFF };
Fred Shih37f05ef2014-07-16 18:38:08 -07001357
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001358 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001359 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001360 reinterpret_cast<size_t>(obj->Get()),
1361 static_cast<size_t>(values[i]),
1362 StubTest::GetEntrypoint(self, kQuickSet8Instance),
1363 self,
1364 referrer);
1365
Mathieu Chartierc7853442015-03-27 14:35:38 -07001366 uint8_t res = f->GetBoolean(obj->Get());
Fred Shih37f05ef2014-07-16 18:38:08 -07001367 EXPECT_EQ(values[i], res) << "Iteration " << i;
1368
Mathieu Chartierc7853442015-03-27 14:35:38 -07001369 f->SetBoolean<false>(obj->Get(), res);
Fred Shih37f05ef2014-07-16 18:38:08 -07001370
Mathieu Chartierc7853442015-03-27 14:35:38 -07001371 size_t res2 = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001372 reinterpret_cast<size_t>(obj->Get()),
1373 0U,
1374 StubTest::GetEntrypoint(self, kQuickGetBooleanInstance),
1375 self,
1376 referrer);
1377 EXPECT_EQ(res, static_cast<uint8_t>(res2));
1378 }
1379#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001380 UNUSED(obj, f, self, referrer, test);
Fred Shih37f05ef2014-07-16 18:38:08 -07001381 LOG(INFO) << "Skipping set_boolean_instance as I don't know how to do that on " << kRuntimeISA;
1382 // Force-print to std::cout so it's also outside the logcat.
1383 std::cout << "Skipping set_boolean_instance as I don't know how to do that on " << kRuntimeISA << std::endl;
1384#endif
1385}
Mathieu Chartierc7853442015-03-27 14:35:38 -07001386static void GetSetByteInstance(Handle<mirror::Object>* obj, ArtField* f,
Fred Shih37f05ef2014-07-16 18:38:08 -07001387 Thread* self, mirror::ArtMethod* referrer, StubTest* test)
1388 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1389#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001390 int8_t values[] = { -128, -64, 0, 64, 127 };
Fred Shih37f05ef2014-07-16 18:38:08 -07001391
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001392 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001393 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001394 reinterpret_cast<size_t>(obj->Get()),
1395 static_cast<size_t>(values[i]),
1396 StubTest::GetEntrypoint(self, kQuickSet8Instance),
1397 self,
1398 referrer);
1399
Mathieu Chartierc7853442015-03-27 14:35:38 -07001400 int8_t res = f->GetByte(obj->Get());
Fred Shih37f05ef2014-07-16 18:38:08 -07001401 EXPECT_EQ(res, values[i]) << "Iteration " << i;
Mathieu Chartierc7853442015-03-27 14:35:38 -07001402 f->SetByte<false>(obj->Get(), ++res);
Fred Shih37f05ef2014-07-16 18:38:08 -07001403
Mathieu Chartierc7853442015-03-27 14:35:38 -07001404 size_t res2 = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001405 reinterpret_cast<size_t>(obj->Get()),
1406 0U,
1407 StubTest::GetEntrypoint(self, kQuickGetByteInstance),
1408 self,
1409 referrer);
1410 EXPECT_EQ(res, static_cast<int8_t>(res2));
1411 }
1412#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001413 UNUSED(obj, f, self, referrer, test);
Fred Shih37f05ef2014-07-16 18:38:08 -07001414 LOG(INFO) << "Skipping set_byte_instance as I don't know how to do that on " << kRuntimeISA;
1415 // Force-print to std::cout so it's also outside the logcat.
1416 std::cout << "Skipping set_byte_instance as I don't know how to do that on " << kRuntimeISA << std::endl;
1417#endif
1418}
1419
Mathieu Chartierc7853442015-03-27 14:35:38 -07001420static void GetSetCharStatic(ArtField* f, Thread* self, mirror::ArtMethod* referrer,
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001421 StubTest* test)
Fred Shih37f05ef2014-07-16 18:38:08 -07001422 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1423#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001424 uint16_t values[] = { 0, 1, 2, 255, 32768, 0xFFFF };
Fred Shih37f05ef2014-07-16 18:38:08 -07001425
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001426 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001427 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001428 static_cast<size_t>(values[i]),
1429 0U,
1430 StubTest::GetEntrypoint(self, kQuickSet16Static),
1431 self,
1432 referrer);
1433
Mathieu Chartierc7853442015-03-27 14:35:38 -07001434 size_t res = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001435 0U, 0U,
1436 StubTest::GetEntrypoint(self, kQuickGetCharStatic),
1437 self,
1438 referrer);
1439
1440 EXPECT_EQ(values[i], static_cast<uint16_t>(res)) << "Iteration " << i;
1441 }
1442#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001443 UNUSED(f, self, referrer, test);
Fred Shih37f05ef2014-07-16 18:38:08 -07001444 LOG(INFO) << "Skipping set_char_static as I don't know how to do that on " << kRuntimeISA;
1445 // Force-print to std::cout so it's also outside the logcat.
1446 std::cout << "Skipping set_char_static as I don't know how to do that on " << kRuntimeISA << std::endl;
1447#endif
1448}
Mathieu Chartierc7853442015-03-27 14:35:38 -07001449static void GetSetShortStatic(ArtField* f, Thread* self,
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001450 mirror::ArtMethod* referrer, StubTest* test)
Fred Shih37f05ef2014-07-16 18:38:08 -07001451 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1452#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001453 int16_t values[] = { -0x7FFF, -32768, 0, 255, 32767, 0x7FFE };
Fred Shih37f05ef2014-07-16 18:38:08 -07001454
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001455 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001456 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001457 static_cast<size_t>(values[i]),
1458 0U,
1459 StubTest::GetEntrypoint(self, kQuickSet16Static),
1460 self,
1461 referrer);
1462
Mathieu Chartierc7853442015-03-27 14:35:38 -07001463 size_t res = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001464 0U, 0U,
1465 StubTest::GetEntrypoint(self, kQuickGetShortStatic),
1466 self,
1467 referrer);
1468
1469 EXPECT_EQ(static_cast<int16_t>(res), values[i]) << "Iteration " << i;
1470 }
1471#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001472 UNUSED(f, self, referrer, test);
Fred Shih37f05ef2014-07-16 18:38:08 -07001473 LOG(INFO) << "Skipping set_short_static as I don't know how to do that on " << kRuntimeISA;
1474 // Force-print to std::cout so it's also outside the logcat.
1475 std::cout << "Skipping set_short_static as I don't know how to do that on " << kRuntimeISA << std::endl;
1476#endif
1477}
1478
Mathieu Chartierc7853442015-03-27 14:35:38 -07001479static void GetSetCharInstance(Handle<mirror::Object>* obj, ArtField* f,
1480 Thread* self, mirror::ArtMethod* referrer, StubTest* test)
Fred Shih37f05ef2014-07-16 18:38:08 -07001481 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1482#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001483 uint16_t values[] = { 0, 1, 2, 255, 32768, 0xFFFF };
Fred Shih37f05ef2014-07-16 18:38:08 -07001484
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001485 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001486 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001487 reinterpret_cast<size_t>(obj->Get()),
1488 static_cast<size_t>(values[i]),
1489 StubTest::GetEntrypoint(self, kQuickSet16Instance),
1490 self,
1491 referrer);
1492
Mathieu Chartierc7853442015-03-27 14:35:38 -07001493 uint16_t res = f->GetChar(obj->Get());
Fred Shih37f05ef2014-07-16 18:38:08 -07001494 EXPECT_EQ(res, values[i]) << "Iteration " << i;
Mathieu Chartierc7853442015-03-27 14:35:38 -07001495 f->SetChar<false>(obj->Get(), ++res);
Fred Shih37f05ef2014-07-16 18:38:08 -07001496
Mathieu Chartierc7853442015-03-27 14:35:38 -07001497 size_t res2 = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001498 reinterpret_cast<size_t>(obj->Get()),
1499 0U,
1500 StubTest::GetEntrypoint(self, kQuickGetCharInstance),
1501 self,
1502 referrer);
1503 EXPECT_EQ(res, static_cast<uint16_t>(res2));
1504 }
1505#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001506 UNUSED(obj, f, self, referrer, test);
Fred Shih37f05ef2014-07-16 18:38:08 -07001507 LOG(INFO) << "Skipping set_char_instance as I don't know how to do that on " << kRuntimeISA;
1508 // Force-print to std::cout so it's also outside the logcat.
1509 std::cout << "Skipping set_char_instance as I don't know how to do that on " << kRuntimeISA << std::endl;
1510#endif
1511}
Mathieu Chartierc7853442015-03-27 14:35:38 -07001512static void GetSetShortInstance(Handle<mirror::Object>* obj, ArtField* f,
Fred Shih37f05ef2014-07-16 18:38:08 -07001513 Thread* self, mirror::ArtMethod* referrer, StubTest* test)
1514 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1515#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001516 int16_t values[] = { -0x7FFF, -32768, 0, 255, 32767, 0x7FFE };
Fred Shih37f05ef2014-07-16 18:38:08 -07001517
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001518 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001519 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001520 reinterpret_cast<size_t>(obj->Get()),
1521 static_cast<size_t>(values[i]),
1522 StubTest::GetEntrypoint(self, kQuickSet16Instance),
1523 self,
1524 referrer);
1525
Mathieu Chartierc7853442015-03-27 14:35:38 -07001526 int16_t res = f->GetShort(obj->Get());
Fred Shih37f05ef2014-07-16 18:38:08 -07001527 EXPECT_EQ(res, values[i]) << "Iteration " << i;
Mathieu Chartierc7853442015-03-27 14:35:38 -07001528 f->SetShort<false>(obj->Get(), ++res);
Fred Shih37f05ef2014-07-16 18:38:08 -07001529
Mathieu Chartierc7853442015-03-27 14:35:38 -07001530 size_t res2 = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001531 reinterpret_cast<size_t>(obj->Get()),
1532 0U,
1533 StubTest::GetEntrypoint(self, kQuickGetShortInstance),
1534 self,
1535 referrer);
1536 EXPECT_EQ(res, static_cast<int16_t>(res2));
1537 }
1538#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001539 UNUSED(obj, f, self, referrer, test);
Fred Shih37f05ef2014-07-16 18:38:08 -07001540 LOG(INFO) << "Skipping set_short_instance as I don't know how to do that on " << kRuntimeISA;
1541 // Force-print to std::cout so it's also outside the logcat.
1542 std::cout << "Skipping set_short_instance as I don't know how to do that on " << kRuntimeISA << std::endl;
1543#endif
1544}
1545
Mathieu Chartierc7853442015-03-27 14:35:38 -07001546static void GetSet32Static(ArtField* f, Thread* self, mirror::ArtMethod* referrer,
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001547 StubTest* test)
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001548 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Ian Rogersc3ccc102014-06-25 11:52:14 -07001549#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001550 uint32_t values[] = { 0, 1, 2, 255, 32768, 1000000, 0xFFFFFFFF };
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001551
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001552 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001553 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001554 static_cast<size_t>(values[i]),
1555 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001556 StubTest::GetEntrypoint(self, kQuickSet32Static),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001557 self,
1558 referrer);
1559
Mathieu Chartierc7853442015-03-27 14:35:38 -07001560 size_t res = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001561 0U, 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001562 StubTest::GetEntrypoint(self, kQuickGet32Static),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001563 self,
1564 referrer);
1565
1566 EXPECT_EQ(res, values[i]) << "Iteration " << i;
1567 }
1568#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001569 UNUSED(f, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001570 LOG(INFO) << "Skipping set32static as I don't know how to do that on " << kRuntimeISA;
1571 // Force-print to std::cout so it's also outside the logcat.
1572 std::cout << "Skipping set32static as I don't know how to do that on " << kRuntimeISA << std::endl;
1573#endif
1574}
1575
1576
Mathieu Chartierc7853442015-03-27 14:35:38 -07001577static void GetSet32Instance(Handle<mirror::Object>* obj, ArtField* f,
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001578 Thread* self, mirror::ArtMethod* referrer, StubTest* test)
1579 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Ian Rogersc3ccc102014-06-25 11:52:14 -07001580#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001581 uint32_t values[] = { 0, 1, 2, 255, 32768, 1000000, 0xFFFFFFFF };
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001582
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001583 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001584 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001585 reinterpret_cast<size_t>(obj->Get()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001586 static_cast<size_t>(values[i]),
Andreas Gampe29b38412014-08-13 00:15:43 -07001587 StubTest::GetEntrypoint(self, kQuickSet32Instance),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001588 self,
1589 referrer);
1590
Mathieu Chartierc7853442015-03-27 14:35:38 -07001591 int32_t res = f->GetInt(obj->Get());
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001592 EXPECT_EQ(res, static_cast<int32_t>(values[i])) << "Iteration " << i;
1593
1594 res++;
Mathieu Chartierc7853442015-03-27 14:35:38 -07001595 f->SetInt<false>(obj->Get(), res);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001596
Mathieu Chartierc7853442015-03-27 14:35:38 -07001597 size_t res2 = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001598 reinterpret_cast<size_t>(obj->Get()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001599 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001600 StubTest::GetEntrypoint(self, kQuickGet32Instance),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001601 self,
1602 referrer);
1603 EXPECT_EQ(res, static_cast<int32_t>(res2));
1604 }
1605#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001606 UNUSED(obj, f, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001607 LOG(INFO) << "Skipping set32instance as I don't know how to do that on " << kRuntimeISA;
1608 // Force-print to std::cout so it's also outside the logcat.
1609 std::cout << "Skipping set32instance as I don't know how to do that on " << kRuntimeISA << std::endl;
1610#endif
1611}
1612
1613
Ian Rogersc3ccc102014-06-25 11:52:14 -07001614#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001615
1616static void set_and_check_static(uint32_t f_idx, mirror::Object* val, Thread* self,
1617 mirror::ArtMethod* referrer, StubTest* test)
1618 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1619 test->Invoke3WithReferrer(static_cast<size_t>(f_idx),
1620 reinterpret_cast<size_t>(val),
1621 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001622 StubTest::GetEntrypoint(self, kQuickSetObjStatic),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001623 self,
1624 referrer);
1625
1626 size_t res = test->Invoke3WithReferrer(static_cast<size_t>(f_idx),
1627 0U, 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001628 StubTest::GetEntrypoint(self, kQuickGetObjStatic),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001629 self,
1630 referrer);
1631
1632 EXPECT_EQ(res, reinterpret_cast<size_t>(val)) << "Value " << val;
1633}
1634#endif
1635
Mathieu Chartierc7853442015-03-27 14:35:38 -07001636static void GetSetObjStatic(ArtField* f, Thread* self, mirror::ArtMethod* referrer,
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001637 StubTest* test)
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001638 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Ian Rogersc3ccc102014-06-25 11:52:14 -07001639#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Mathieu Chartierc7853442015-03-27 14:35:38 -07001640 set_and_check_static(f->GetDexFieldIndex(), nullptr, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001641
1642 // Allocate a string object for simplicity.
1643 mirror::String* str = mirror::String::AllocFromModifiedUtf8(self, "Test");
Mathieu Chartierc7853442015-03-27 14:35:38 -07001644 set_and_check_static(f->GetDexFieldIndex(), str, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001645
Mathieu Chartierc7853442015-03-27 14:35:38 -07001646 set_and_check_static(f->GetDexFieldIndex(), nullptr, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001647#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001648 UNUSED(f, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001649 LOG(INFO) << "Skipping setObjstatic as I don't know how to do that on " << kRuntimeISA;
1650 // Force-print to std::cout so it's also outside the logcat.
1651 std::cout << "Skipping setObjstatic as I don't know how to do that on " << kRuntimeISA << std::endl;
1652#endif
1653}
1654
1655
Ian Rogersc3ccc102014-06-25 11:52:14 -07001656#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Mathieu Chartierc7853442015-03-27 14:35:38 -07001657static void set_and_check_instance(ArtField* f, mirror::Object* trg,
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001658 mirror::Object* val, Thread* self, mirror::ArtMethod* referrer,
1659 StubTest* test)
1660 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001661 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001662 reinterpret_cast<size_t>(trg),
1663 reinterpret_cast<size_t>(val),
Andreas Gampe29b38412014-08-13 00:15:43 -07001664 StubTest::GetEntrypoint(self, kQuickSetObjInstance),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001665 self,
1666 referrer);
1667
Mathieu Chartierc7853442015-03-27 14:35:38 -07001668 size_t res = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001669 reinterpret_cast<size_t>(trg),
1670 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001671 StubTest::GetEntrypoint(self, kQuickGetObjInstance),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001672 self,
1673 referrer);
1674
1675 EXPECT_EQ(res, reinterpret_cast<size_t>(val)) << "Value " << val;
1676
Mathieu Chartierc7853442015-03-27 14:35:38 -07001677 EXPECT_EQ(val, f->GetObj(trg));
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001678}
1679#endif
1680
Mathieu Chartierc7853442015-03-27 14:35:38 -07001681static void GetSetObjInstance(Handle<mirror::Object>* obj, ArtField* f,
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001682 Thread* self, mirror::ArtMethod* referrer, StubTest* test)
1683 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Ian Rogersc3ccc102014-06-25 11:52:14 -07001684#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001685 set_and_check_instance(f, obj->Get(), nullptr, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001686
1687 // Allocate a string object for simplicity.
1688 mirror::String* str = mirror::String::AllocFromModifiedUtf8(self, "Test");
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001689 set_and_check_instance(f, obj->Get(), str, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001690
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001691 set_and_check_instance(f, obj->Get(), nullptr, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001692#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001693 UNUSED(obj, f, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001694 LOG(INFO) << "Skipping setObjinstance as I don't know how to do that on " << kRuntimeISA;
1695 // Force-print to std::cout so it's also outside the logcat.
1696 std::cout << "Skipping setObjinstance as I don't know how to do that on " << kRuntimeISA << std::endl;
1697#endif
1698}
1699
1700
1701// TODO: Complete these tests for 32b architectures.
1702
Mathieu Chartierc7853442015-03-27 14:35:38 -07001703static void GetSet64Static(ArtField* f, Thread* self, mirror::ArtMethod* referrer,
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001704 StubTest* test)
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001705 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Ian Rogersc3ccc102014-06-25 11:52:14 -07001706#if (defined(__x86_64__) && !defined(__APPLE__)) || defined(__aarch64__)
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001707 uint64_t values[] = { 0, 1, 2, 255, 32768, 1000000, 0xFFFFFFFF, 0xFFFFFFFFFFFF };
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001708
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001709 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001710 test->Invoke3UWithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001711 values[i],
Andreas Gampe29b38412014-08-13 00:15:43 -07001712 StubTest::GetEntrypoint(self, kQuickSet64Static),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001713 self,
1714 referrer);
1715
Mathieu Chartierc7853442015-03-27 14:35:38 -07001716 size_t res = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001717 0U, 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001718 StubTest::GetEntrypoint(self, kQuickGet64Static),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001719 self,
1720 referrer);
1721
1722 EXPECT_EQ(res, values[i]) << "Iteration " << i;
1723 }
1724#else
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001725 UNUSED(f, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001726 LOG(INFO) << "Skipping set64static as I don't know how to do that on " << kRuntimeISA;
1727 // Force-print to std::cout so it's also outside the logcat.
1728 std::cout << "Skipping set64static as I don't know how to do that on " << kRuntimeISA << std::endl;
1729#endif
1730}
1731
1732
Mathieu Chartierc7853442015-03-27 14:35:38 -07001733static void GetSet64Instance(Handle<mirror::Object>* obj, ArtField* f,
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001734 Thread* self, mirror::ArtMethod* referrer, StubTest* test)
1735 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Ian Rogersc3ccc102014-06-25 11:52:14 -07001736#if (defined(__x86_64__) && !defined(__APPLE__)) || defined(__aarch64__)
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001737 uint64_t values[] = { 0, 1, 2, 255, 32768, 1000000, 0xFFFFFFFF, 0xFFFFFFFFFFFF };
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001738
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001739 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001740 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001741 reinterpret_cast<size_t>(obj->Get()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001742 static_cast<size_t>(values[i]),
Andreas Gampe29b38412014-08-13 00:15:43 -07001743 StubTest::GetEntrypoint(self, kQuickSet64Instance),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001744 self,
1745 referrer);
1746
Mathieu Chartierc7853442015-03-27 14:35:38 -07001747 int64_t res = f->GetLong(obj->Get());
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001748 EXPECT_EQ(res, static_cast<int64_t>(values[i])) << "Iteration " << i;
1749
1750 res++;
Mathieu Chartierc7853442015-03-27 14:35:38 -07001751 f->SetLong<false>(obj->Get(), res);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001752
Mathieu Chartierc7853442015-03-27 14:35:38 -07001753 size_t res2 = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001754 reinterpret_cast<size_t>(obj->Get()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001755 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001756 StubTest::GetEntrypoint(self, kQuickGet64Instance),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001757 self,
1758 referrer);
1759 EXPECT_EQ(res, static_cast<int64_t>(res2));
1760 }
1761#else
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001762 UNUSED(obj, f, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001763 LOG(INFO) << "Skipping set64instance as I don't know how to do that on " << kRuntimeISA;
1764 // Force-print to std::cout so it's also outside the logcat.
1765 std::cout << "Skipping set64instance as I don't know how to do that on " << kRuntimeISA << std::endl;
1766#endif
1767}
1768
1769static void TestFields(Thread* self, StubTest* test, Primitive::Type test_type) {
1770 // garbage is created during ClassLinker::Init
1771
1772 JNIEnv* env = Thread::Current()->GetJniEnv();
1773 jclass jc = env->FindClass("AllFields");
Mathieu Chartier2cebb242015-04-21 16:50:40 -07001774 CHECK(jc != nullptr);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001775 jobject o = env->AllocObject(jc);
Mathieu Chartier2cebb242015-04-21 16:50:40 -07001776 CHECK(o != nullptr);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001777
1778 ScopedObjectAccess soa(self);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001779 StackHandleScope<4> hs(self);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001780 Handle<mirror::Object> obj(hs.NewHandle(soa.Decode<mirror::Object*>(o)));
1781 Handle<mirror::Class> c(hs.NewHandle(obj->GetClass()));
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001782 // Need a method as a referrer
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001783 Handle<mirror::ArtMethod> m(hs.NewHandle(c->GetDirectMethod(0)));
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001784
1785 // Play with it...
1786
1787 // Static fields.
Mathieu Chartierc7853442015-03-27 14:35:38 -07001788 ArtField* fields = c->GetSFields();
1789 size_t num_fields = c->NumStaticFields();
1790 for (size_t i = 0; i < num_fields; ++i) {
1791 ArtField* f = &fields[i];
1792 Primitive::Type type = f->GetTypeAsPrimitiveType();
1793 if (test_type != type) {
1794 continue;
1795 }
1796 switch (type) {
1797 case Primitive::Type::kPrimBoolean:
1798 GetSetBooleanStatic(f, self, m.Get(), test);
1799 break;
1800 case Primitive::Type::kPrimByte:
1801 GetSetByteStatic(f, self, m.Get(), test);
1802 break;
1803 case Primitive::Type::kPrimChar:
1804 GetSetCharStatic(f, self, m.Get(), test);
1805 break;
1806 case Primitive::Type::kPrimShort:
1807 GetSetShortStatic(f, self, m.Get(), test);
1808 break;
1809 case Primitive::Type::kPrimInt:
1810 GetSet32Static(f, self, m.Get(), test);
1811 break;
1812 case Primitive::Type::kPrimLong:
1813 GetSet64Static(f, self, m.Get(), test);
1814 break;
1815 case Primitive::Type::kPrimNot:
1816 // Don't try array.
1817 if (f->GetTypeDescriptor()[0] != '[') {
1818 GetSetObjStatic(f, self, m.Get(), test);
1819 }
1820 break;
1821 default:
1822 break; // Skip.
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001823 }
1824 }
1825
1826 // Instance fields.
Mathieu Chartierc7853442015-03-27 14:35:38 -07001827 fields = c->GetIFields();
1828 num_fields = c->NumInstanceFields();
1829 for (size_t i = 0; i < num_fields; ++i) {
1830 ArtField* f = &fields[i];
1831 Primitive::Type type = f->GetTypeAsPrimitiveType();
1832 if (test_type != type) {
1833 continue;
1834 }
1835 switch (type) {
1836 case Primitive::Type::kPrimBoolean:
1837 GetSetBooleanInstance(&obj, f, self, m.Get(), test);
1838 break;
1839 case Primitive::Type::kPrimByte:
1840 GetSetByteInstance(&obj, f, self, m.Get(), test);
1841 break;
1842 case Primitive::Type::kPrimChar:
1843 GetSetCharInstance(&obj, f, self, m.Get(), test);
1844 break;
1845 case Primitive::Type::kPrimShort:
1846 GetSetShortInstance(&obj, f, self, m.Get(), test);
1847 break;
1848 case Primitive::Type::kPrimInt:
1849 GetSet32Instance(&obj, f, self, m.Get(), test);
1850 break;
1851 case Primitive::Type::kPrimLong:
1852 GetSet64Instance(&obj, f, self, m.Get(), test);
1853 break;
1854 case Primitive::Type::kPrimNot:
1855 // Don't try array.
1856 if (f->GetTypeDescriptor()[0] != '[') {
1857 GetSetObjInstance(&obj, f, self, m.Get(), test);
1858 }
1859 break;
1860 default:
1861 break; // Skip.
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001862 }
1863 }
1864
1865 // TODO: Deallocate things.
1866}
1867
Fred Shih37f05ef2014-07-16 18:38:08 -07001868TEST_F(StubTest, Fields8) {
1869 TEST_DISABLED_FOR_HEAP_REFERENCE_POISONING();
1870
1871 Thread* self = Thread::Current();
1872
1873 self->TransitionFromSuspendedToRunnable();
1874 LoadDex("AllFields");
1875 bool started = runtime_->Start();
1876 CHECK(started);
1877
1878 TestFields(self, this, Primitive::Type::kPrimBoolean);
1879 TestFields(self, this, Primitive::Type::kPrimByte);
1880}
1881
1882TEST_F(StubTest, Fields16) {
1883 TEST_DISABLED_FOR_HEAP_REFERENCE_POISONING();
1884
1885 Thread* self = Thread::Current();
1886
1887 self->TransitionFromSuspendedToRunnable();
1888 LoadDex("AllFields");
1889 bool started = runtime_->Start();
1890 CHECK(started);
1891
1892 TestFields(self, this, Primitive::Type::kPrimChar);
1893 TestFields(self, this, Primitive::Type::kPrimShort);
1894}
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001895
1896TEST_F(StubTest, Fields32) {
1897 TEST_DISABLED_FOR_HEAP_REFERENCE_POISONING();
1898
1899 Thread* self = Thread::Current();
1900
1901 self->TransitionFromSuspendedToRunnable();
1902 LoadDex("AllFields");
1903 bool started = runtime_->Start();
1904 CHECK(started);
1905
1906 TestFields(self, this, Primitive::Type::kPrimInt);
1907}
1908
1909TEST_F(StubTest, FieldsObj) {
1910 TEST_DISABLED_FOR_HEAP_REFERENCE_POISONING();
1911
1912 Thread* self = Thread::Current();
1913
1914 self->TransitionFromSuspendedToRunnable();
1915 LoadDex("AllFields");
1916 bool started = runtime_->Start();
1917 CHECK(started);
1918
1919 TestFields(self, this, Primitive::Type::kPrimNot);
1920}
1921
1922TEST_F(StubTest, Fields64) {
1923 TEST_DISABLED_FOR_HEAP_REFERENCE_POISONING();
1924
1925 Thread* self = Thread::Current();
1926
1927 self->TransitionFromSuspendedToRunnable();
1928 LoadDex("AllFields");
1929 bool started = runtime_->Start();
1930 CHECK(started);
1931
1932 TestFields(self, this, Primitive::Type::kPrimLong);
1933}
1934
Andreas Gampe51f76352014-05-21 08:28:48 -07001935TEST_F(StubTest, IMT) {
Ian Rogersc3ccc102014-06-25 11:52:14 -07001936#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe51f76352014-05-21 08:28:48 -07001937 TEST_DISABLED_FOR_HEAP_REFERENCE_POISONING();
1938
1939 Thread* self = Thread::Current();
1940
1941 ScopedObjectAccess soa(self);
1942 StackHandleScope<7> hs(self);
1943
1944 JNIEnv* env = Thread::Current()->GetJniEnv();
1945
1946 // ArrayList
1947
1948 // Load ArrayList and used methods (JNI).
1949 jclass arraylist_jclass = env->FindClass("java/util/ArrayList");
1950 ASSERT_NE(nullptr, arraylist_jclass);
1951 jmethodID arraylist_constructor = env->GetMethodID(arraylist_jclass, "<init>", "()V");
1952 ASSERT_NE(nullptr, arraylist_constructor);
1953 jmethodID contains_jmethod = env->GetMethodID(arraylist_jclass, "contains", "(Ljava/lang/Object;)Z");
1954 ASSERT_NE(nullptr, contains_jmethod);
1955 jmethodID add_jmethod = env->GetMethodID(arraylist_jclass, "add", "(Ljava/lang/Object;)Z");
1956 ASSERT_NE(nullptr, add_jmethod);
1957
1958 // Get mirror representation.
1959 Handle<mirror::ArtMethod> contains_amethod(hs.NewHandle(soa.DecodeMethod(contains_jmethod)));
1960
1961 // Patch up ArrayList.contains.
1962 if (contains_amethod.Get()->GetEntryPointFromQuickCompiledCode() == nullptr) {
1963 contains_amethod.Get()->SetEntryPointFromQuickCompiledCode(reinterpret_cast<void*>(
Andreas Gampe29b38412014-08-13 00:15:43 -07001964 StubTest::GetEntrypoint(self, kQuickQuickToInterpreterBridge)));
Andreas Gampe51f76352014-05-21 08:28:48 -07001965 }
1966
1967 // List
1968
1969 // Load List and used methods (JNI).
1970 jclass list_jclass = env->FindClass("java/util/List");
1971 ASSERT_NE(nullptr, list_jclass);
1972 jmethodID inf_contains_jmethod = env->GetMethodID(list_jclass, "contains", "(Ljava/lang/Object;)Z");
1973 ASSERT_NE(nullptr, inf_contains_jmethod);
1974
1975 // Get mirror representation.
1976 Handle<mirror::ArtMethod> inf_contains(hs.NewHandle(soa.DecodeMethod(inf_contains_jmethod)));
1977
1978 // Object
1979
1980 jclass obj_jclass = env->FindClass("java/lang/Object");
1981 ASSERT_NE(nullptr, obj_jclass);
1982 jmethodID obj_constructor = env->GetMethodID(obj_jclass, "<init>", "()V");
1983 ASSERT_NE(nullptr, obj_constructor);
1984
Andreas Gampe51f76352014-05-21 08:28:48 -07001985 // Create instances.
1986
1987 jobject jarray_list = env->NewObject(arraylist_jclass, arraylist_constructor);
1988 ASSERT_NE(nullptr, jarray_list);
1989 Handle<mirror::Object> array_list(hs.NewHandle(soa.Decode<mirror::Object*>(jarray_list)));
1990
1991 jobject jobj = env->NewObject(obj_jclass, obj_constructor);
1992 ASSERT_NE(nullptr, jobj);
1993 Handle<mirror::Object> obj(hs.NewHandle(soa.Decode<mirror::Object*>(jobj)));
1994
Andreas Gampe1a7e2922014-05-21 15:37:53 -07001995 // Invocation tests.
1996
1997 // 1. imt_conflict
1998
1999 // Contains.
Andreas Gampe51f76352014-05-21 08:28:48 -07002000
2001 size_t result =
2002 Invoke3WithReferrerAndHidden(0U, reinterpret_cast<size_t>(array_list.Get()),
2003 reinterpret_cast<size_t>(obj.Get()),
Andreas Gampe29b38412014-08-13 00:15:43 -07002004 StubTest::GetEntrypoint(self, kQuickQuickImtConflictTrampoline),
Andreas Gampe51f76352014-05-21 08:28:48 -07002005 self, contains_amethod.Get(),
2006 static_cast<size_t>(inf_contains.Get()->GetDexMethodIndex()));
2007
2008 ASSERT_FALSE(self->IsExceptionPending());
2009 EXPECT_EQ(static_cast<size_t>(JNI_FALSE), result);
2010
2011 // Add object.
2012
2013 env->CallBooleanMethod(jarray_list, add_jmethod, jobj);
2014
Nicolas Geoffray14691c52015-03-05 10:40:17 +00002015 ASSERT_FALSE(self->IsExceptionPending()) << PrettyTypeOf(self->GetException());
Andreas Gampe51f76352014-05-21 08:28:48 -07002016
Andreas Gampe1a7e2922014-05-21 15:37:53 -07002017 // Contains.
Andreas Gampe51f76352014-05-21 08:28:48 -07002018
2019 result = Invoke3WithReferrerAndHidden(0U, reinterpret_cast<size_t>(array_list.Get()),
2020 reinterpret_cast<size_t>(obj.Get()),
Andreas Gampe29b38412014-08-13 00:15:43 -07002021 StubTest::GetEntrypoint(self, kQuickQuickImtConflictTrampoline),
Andreas Gampe51f76352014-05-21 08:28:48 -07002022 self, contains_amethod.Get(),
2023 static_cast<size_t>(inf_contains.Get()->GetDexMethodIndex()));
2024
2025 ASSERT_FALSE(self->IsExceptionPending());
2026 EXPECT_EQ(static_cast<size_t>(JNI_TRUE), result);
Andreas Gampe1a7e2922014-05-21 15:37:53 -07002027
2028 // 2. regular interface trampoline
2029
2030 result = Invoke3WithReferrer(static_cast<size_t>(inf_contains.Get()->GetDexMethodIndex()),
2031 reinterpret_cast<size_t>(array_list.Get()),
2032 reinterpret_cast<size_t>(obj.Get()),
2033 StubTest::GetEntrypoint(self,
2034 kQuickInvokeInterfaceTrampolineWithAccessCheck),
2035 self, contains_amethod.Get());
2036
2037 ASSERT_FALSE(self->IsExceptionPending());
2038 EXPECT_EQ(static_cast<size_t>(JNI_TRUE), result);
2039
2040 result = Invoke3WithReferrer(static_cast<size_t>(inf_contains.Get()->GetDexMethodIndex()),
2041 reinterpret_cast<size_t>(array_list.Get()),
2042 reinterpret_cast<size_t>(array_list.Get()),
2043 StubTest::GetEntrypoint(self,
2044 kQuickInvokeInterfaceTrampolineWithAccessCheck),
2045 self, contains_amethod.Get());
2046
2047 ASSERT_FALSE(self->IsExceptionPending());
2048 EXPECT_EQ(static_cast<size_t>(JNI_FALSE), result);
Andreas Gampe51f76352014-05-21 08:28:48 -07002049#else
Andreas Gampe6aac3552014-06-09 14:55:53 -07002050 LOG(INFO) << "Skipping imt as I don't know how to do that on " << kRuntimeISA;
Andreas Gampe51f76352014-05-21 08:28:48 -07002051 // Force-print to std::cout so it's also outside the logcat.
Andreas Gampe6aac3552014-06-09 14:55:53 -07002052 std::cout << "Skipping imt as I don't know how to do that on " << kRuntimeISA << std::endl;
2053#endif
2054}
2055
Andreas Gampe6aac3552014-06-09 14:55:53 -07002056TEST_F(StubTest, StringIndexOf) {
2057#if defined(__arm__) || defined(__aarch64__)
Hiroshi Yamauchi52fa8142014-06-16 12:59:49 -07002058 TEST_DISABLED_FOR_HEAP_REFERENCE_POISONING();
2059
Andreas Gampe6aac3552014-06-09 14:55:53 -07002060 Thread* self = Thread::Current();
2061 ScopedObjectAccess soa(self);
2062 // garbage is created during ClassLinker::Init
2063
2064 // Create some strings
2065 // Use array so we can index into it and use a matrix for expected results
2066 // Setup: The first half is standard. The second half uses a non-zero offset.
2067 // TODO: Shared backing arrays.
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07002068 const char* c_str[] = { "", "a", "ba", "cba", "dcba", "edcba", "asdfghjkl" };
2069 static constexpr size_t kStringCount = arraysize(c_str);
2070 const char c_char[] = { 'a', 'b', 'c', 'd', 'e' };
2071 static constexpr size_t kCharCount = arraysize(c_char);
Andreas Gampe6aac3552014-06-09 14:55:53 -07002072
2073 StackHandleScope<kStringCount> hs(self);
2074 Handle<mirror::String> s[kStringCount];
2075
2076 for (size_t i = 0; i < kStringCount; ++i) {
2077 s[i] = hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), c_str[i]));
2078 }
2079
2080 // Matrix of expectations. First component is first parameter. Note we only check against the
2081 // sign, not the value. As we are testing random offsets, we need to compute this and need to
2082 // rely on String::CompareTo being correct.
2083 static constexpr size_t kMaxLen = 9;
2084 DCHECK_LE(strlen(c_str[kStringCount-1]), kMaxLen) << "Please fix the indexof test.";
2085
2086 // Last dimension: start, offset by 1.
2087 int32_t expected[kStringCount][kCharCount][kMaxLen + 3];
2088 for (size_t x = 0; x < kStringCount; ++x) {
2089 for (size_t y = 0; y < kCharCount; ++y) {
2090 for (size_t z = 0; z <= kMaxLen + 2; ++z) {
2091 expected[x][y][z] = s[x]->FastIndexOf(c_char[y], static_cast<int32_t>(z) - 1);
2092 }
2093 }
2094 }
2095
2096 // Play with it...
2097
2098 for (size_t x = 0; x < kStringCount; ++x) {
2099 for (size_t y = 0; y < kCharCount; ++y) {
2100 for (size_t z = 0; z <= kMaxLen + 2; ++z) {
2101 int32_t start = static_cast<int32_t>(z) - 1;
2102
2103 // Test string_compareto x y
2104 size_t result = Invoke3(reinterpret_cast<size_t>(s[x].Get()), c_char[y], start,
Andreas Gampe29b38412014-08-13 00:15:43 -07002105 StubTest::GetEntrypoint(self, kQuickIndexOf), self);
Andreas Gampe6aac3552014-06-09 14:55:53 -07002106
2107 EXPECT_FALSE(self->IsExceptionPending());
2108
2109 // The result is a 32b signed integer
2110 union {
2111 size_t r;
2112 int32_t i;
2113 } conv;
2114 conv.r = result;
2115
2116 EXPECT_EQ(expected[x][y][z], conv.i) << "Wrong result for " << c_str[x] << " / " <<
2117 c_char[y] << " @ " << start;
2118 }
2119 }
2120 }
2121
2122 // TODO: Deallocate things.
2123
2124 // Tests done.
2125#else
2126 LOG(INFO) << "Skipping indexof as I don't know how to do that on " << kRuntimeISA;
2127 // Force-print to std::cout so it's also outside the logcat.
2128 std::cout << "Skipping indexof as I don't know how to do that on " << kRuntimeISA << std::endl;
Andreas Gampe51f76352014-05-21 08:28:48 -07002129#endif
2130}
2131
Andreas Gampe525cde22014-04-22 15:44:50 -07002132} // namespace art