blob: 864e3f7ad073bdb397fd0eec338fe5a0684329a7 [file] [log] [blame]
Andreas Gampe525cde22014-04-22 15:44:50 -07001/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
Ian Rogerse63db272014-07-15 15:36:11 -070017#include <cstdio>
18
Andreas Gampe525cde22014-04-22 15:44:50 -070019#include "common_runtime_test.h"
Andreas Gampe29b38412014-08-13 00:15:43 -070020#include "entrypoints/quick/quick_entrypoints_enum.h"
Andreas Gampe6e4e59c2014-05-05 20:11:02 -070021#include "mirror/art_field-inl.h"
Andreas Gampe51f76352014-05-21 08:28:48 -070022#include "mirror/art_method-inl.h"
23#include "mirror/class-inl.h"
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -070024#include "mirror/string-inl.h"
Ian Rogerse63db272014-07-15 15:36:11 -070025#include "scoped_thread_state_change.h"
Andreas Gampe525cde22014-04-22 15:44:50 -070026
27namespace art {
28
29
30class StubTest : public CommonRuntimeTest {
31 protected:
32 // We need callee-save methods set up in the Runtime for exceptions.
33 void SetUp() OVERRIDE {
34 // Do the normal setup.
35 CommonRuntimeTest::SetUp();
36
37 {
38 // Create callee-save methods
39 ScopedObjectAccess soa(Thread::Current());
Vladimir Marko7624d252014-05-02 14:40:15 +010040 runtime_->SetInstructionSet(kRuntimeISA);
Andreas Gampe525cde22014-04-22 15:44:50 -070041 for (int i = 0; i < Runtime::kLastCalleeSaveType; i++) {
42 Runtime::CalleeSaveType type = Runtime::CalleeSaveType(i);
43 if (!runtime_->HasCalleeSaveMethod(type)) {
Vladimir Marko7624d252014-05-02 14:40:15 +010044 runtime_->SetCalleeSaveMethod(runtime_->CreateCalleeSaveMethod(type), type);
Andreas Gampe525cde22014-04-22 15:44:50 -070045 }
46 }
47 }
48 }
49
Ian Rogerse63db272014-07-15 15:36:11 -070050 void SetUpRuntimeOptions(RuntimeOptions *options) OVERRIDE {
Andreas Gampe00c1e6d2014-04-25 15:47:13 -070051 // Use a smaller heap
52 for (std::pair<std::string, const void*>& pair : *options) {
53 if (pair.first.find("-Xmx") == 0) {
54 pair.first = "-Xmx4M"; // Smallest we can go.
55 }
56 }
Andreas Gampe51f76352014-05-21 08:28:48 -070057 options->push_back(std::make_pair("-Xint", nullptr));
Andreas Gampe00c1e6d2014-04-25 15:47:13 -070058 }
Andreas Gampe525cde22014-04-22 15:44:50 -070059
Mathieu Chartier119c6bd2014-05-09 14:11:47 -070060 // Helper function needed since TEST_F makes a new class.
61 Thread::tls_ptr_sized_values* GetTlsPtr(Thread* self) {
62 return &self->tlsPtr_;
63 }
64
Andreas Gampe4fc046e2014-05-06 16:56:39 -070065 public:
Andreas Gampe525cde22014-04-22 15:44:50 -070066 size_t Invoke3(size_t arg0, size_t arg1, size_t arg2, uintptr_t code, Thread* self) {
Andreas Gampe6cf80102014-05-19 11:32:41 -070067 return Invoke3WithReferrer(arg0, arg1, arg2, code, self, nullptr);
Andreas Gampe525cde22014-04-22 15:44:50 -070068 }
Andreas Gampe6e4e59c2014-05-05 20:11:02 -070069
Andreas Gampe6e4e59c2014-05-05 20:11:02 -070070 // TODO: Set up a frame according to referrer's specs.
71 size_t Invoke3WithReferrer(size_t arg0, size_t arg1, size_t arg2, uintptr_t code, Thread* self,
72 mirror::ArtMethod* referrer) {
73 // Push a transition back into managed code onto the linked list in thread.
74 ManagedStack fragment;
75 self->PushManagedStackFragment(&fragment);
76
77 size_t result;
Andreas Gampe6cf80102014-05-19 11:32:41 -070078 size_t fpr_result = 0;
Andreas Gampe6e4e59c2014-05-05 20:11:02 -070079#if defined(__i386__)
80 // TODO: Set the thread?
81 __asm__ __volatile__(
Ian Rogersc5f17732014-06-05 20:48:42 -070082 "subl $12, %%esp\n\t" // Align stack.
83 "pushl %[referrer]\n\t" // Store referrer.
Andreas Gampe6e4e59c2014-05-05 20:11:02 -070084 "call *%%edi\n\t" // Call the stub
Ian Rogersc5f17732014-06-05 20:48:42 -070085 "addl $16, %%esp" // Pop referrer
Andreas Gampe6e4e59c2014-05-05 20:11:02 -070086 : "=a" (result)
87 // Use the result from eax
Andreas Gampe2f6e3512014-06-07 01:32:33 -070088 : "a"(arg0), "c"(arg1), "d"(arg2), "D"(code), [referrer]"r"(referrer)
89 // This places code into edi, arg0 into eax, arg1 into ecx, and arg2 into edx
90 : "memory"); // clobber.
Andreas Gampe6e4e59c2014-05-05 20:11:02 -070091 // TODO: Should we clobber the other registers? EBX gets clobbered by some of the stubs,
92 // but compilation fails when declaring that.
93#elif defined(__arm__)
94 __asm__ __volatile__(
95 "push {r1-r12, lr}\n\t" // Save state, 13*4B = 52B
96 ".cfi_adjust_cfa_offset 52\n\t"
97 "push {r9}\n\t"
98 ".cfi_adjust_cfa_offset 4\n\t"
99 "mov r9, %[referrer]\n\n"
100 "str r9, [sp, #-8]!\n\t" // Push referrer, +8B padding so 16B aligned
101 ".cfi_adjust_cfa_offset 8\n\t"
102 "ldr r9, [sp, #8]\n\t"
103
104 // Push everything on the stack, so we don't rely on the order. What a mess. :-(
105 "sub sp, sp, #20\n\t"
106 "str %[arg0], [sp]\n\t"
107 "str %[arg1], [sp, #4]\n\t"
108 "str %[arg2], [sp, #8]\n\t"
109 "str %[code], [sp, #12]\n\t"
110 "str %[self], [sp, #16]\n\t"
111 "ldr r0, [sp]\n\t"
112 "ldr r1, [sp, #4]\n\t"
113 "ldr r2, [sp, #8]\n\t"
114 "ldr r3, [sp, #12]\n\t"
115 "ldr r9, [sp, #16]\n\t"
116 "add sp, sp, #20\n\t"
117
118 "blx r3\n\t" // Call the stub
119 "add sp, sp, #12\n\t" // Pop nullptr and padding
120 ".cfi_adjust_cfa_offset -12\n\t"
121 "pop {r1-r12, lr}\n\t" // Restore state
122 ".cfi_adjust_cfa_offset -52\n\t"
123 "mov %[result], r0\n\t" // Save the result
124 : [result] "=r" (result)
125 // Use the result from r0
126 : [arg0] "r"(arg0), [arg1] "r"(arg1), [arg2] "r"(arg2), [code] "r"(code), [self] "r"(self),
127 [referrer] "r"(referrer)
Andreas Gampe2f6e3512014-06-07 01:32:33 -0700128 : "memory"); // clobber.
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700129#elif defined(__aarch64__)
130 __asm__ __volatile__(
Andreas Gampef39b3782014-06-03 14:38:30 -0700131 // Spill x0-x7 which we say we don't clobber. May contain args.
Andreas Gampe6cf80102014-05-19 11:32:41 -0700132 "sub sp, sp, #64\n\t"
133 ".cfi_adjust_cfa_offset 64\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700134 "stp x0, x1, [sp]\n\t"
135 "stp x2, x3, [sp, #16]\n\t"
136 "stp x4, x5, [sp, #32]\n\t"
137 "stp x6, x7, [sp, #48]\n\t"
Andreas Gampe6cf80102014-05-19 11:32:41 -0700138
Andreas Gampef39b3782014-06-03 14:38:30 -0700139 "sub sp, sp, #16\n\t" // Reserve stack space, 16B aligned
140 ".cfi_adjust_cfa_offset 16\n\t"
141 "str %[referrer], [sp]\n\t" // referrer
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700142
143 // Push everything on the stack, so we don't rely on the order. What a mess. :-(
144 "sub sp, sp, #48\n\t"
Andreas Gampe6cf80102014-05-19 11:32:41 -0700145 ".cfi_adjust_cfa_offset 48\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700146 // All things are "r" constraints, so direct str/stp should work.
147 "stp %[arg0], %[arg1], [sp]\n\t"
148 "stp %[arg2], %[code], [sp, #16]\n\t"
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700149 "str %[self], [sp, #32]\n\t"
Andreas Gampe6cf80102014-05-19 11:32:41 -0700150
151 // Now we definitely have x0-x3 free, use it to garble d8 - d15
152 "movk x0, #0xfad0\n\t"
153 "movk x0, #0xebad, lsl #16\n\t"
154 "movk x0, #0xfad0, lsl #32\n\t"
155 "movk x0, #0xebad, lsl #48\n\t"
156 "fmov d8, x0\n\t"
157 "add x0, x0, 1\n\t"
158 "fmov d9, x0\n\t"
159 "add x0, x0, 1\n\t"
160 "fmov d10, x0\n\t"
161 "add x0, x0, 1\n\t"
162 "fmov d11, x0\n\t"
163 "add x0, x0, 1\n\t"
164 "fmov d12, x0\n\t"
165 "add x0, x0, 1\n\t"
166 "fmov d13, x0\n\t"
167 "add x0, x0, 1\n\t"
168 "fmov d14, x0\n\t"
169 "add x0, x0, 1\n\t"
170 "fmov d15, x0\n\t"
171
Andreas Gampef39b3782014-06-03 14:38:30 -0700172 // Load call params into the right registers.
173 "ldp x0, x1, [sp]\n\t"
174 "ldp x2, x3, [sp, #16]\n\t"
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700175 "ldr x18, [sp, #32]\n\t"
176 "add sp, sp, #48\n\t"
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700177 ".cfi_adjust_cfa_offset -48\n\t"
178
Andreas Gampe6cf80102014-05-19 11:32:41 -0700179
180 "blr x3\n\t" // Call the stub
Andreas Gampef39b3782014-06-03 14:38:30 -0700181 "mov x8, x0\n\t" // Store result
182 "add sp, sp, #16\n\t" // Drop the quick "frame"
183 ".cfi_adjust_cfa_offset -16\n\t"
Andreas Gampe6cf80102014-05-19 11:32:41 -0700184
185 // Test d8 - d15. We can use x1 and x2.
186 "movk x1, #0xfad0\n\t"
187 "movk x1, #0xebad, lsl #16\n\t"
188 "movk x1, #0xfad0, lsl #32\n\t"
189 "movk x1, #0xebad, lsl #48\n\t"
190 "fmov x2, d8\n\t"
191 "cmp x1, x2\n\t"
192 "b.ne 1f\n\t"
193 "add x1, x1, 1\n\t"
194
195 "fmov x2, d9\n\t"
196 "cmp x1, x2\n\t"
197 "b.ne 1f\n\t"
198 "add x1, x1, 1\n\t"
199
200 "fmov x2, d10\n\t"
201 "cmp x1, x2\n\t"
202 "b.ne 1f\n\t"
203 "add x1, x1, 1\n\t"
204
205 "fmov x2, d11\n\t"
206 "cmp x1, x2\n\t"
207 "b.ne 1f\n\t"
208 "add x1, x1, 1\n\t"
209
210 "fmov x2, d12\n\t"
211 "cmp x1, x2\n\t"
212 "b.ne 1f\n\t"
213 "add x1, x1, 1\n\t"
214
215 "fmov x2, d13\n\t"
216 "cmp x1, x2\n\t"
217 "b.ne 1f\n\t"
218 "add x1, x1, 1\n\t"
219
220 "fmov x2, d14\n\t"
221 "cmp x1, x2\n\t"
222 "b.ne 1f\n\t"
223 "add x1, x1, 1\n\t"
224
225 "fmov x2, d15\n\t"
226 "cmp x1, x2\n\t"
227 "b.ne 1f\n\t"
228
Andreas Gampef39b3782014-06-03 14:38:30 -0700229 "mov x9, #0\n\t" // Use x9 as flag, in clobber list
Andreas Gampe6cf80102014-05-19 11:32:41 -0700230
231 // Finish up.
232 "2:\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700233 "ldp x0, x1, [sp]\n\t" // Restore stuff not named clobbered, may contain fpr_result
234 "ldp x2, x3, [sp, #16]\n\t"
235 "ldp x4, x5, [sp, #32]\n\t"
236 "ldp x6, x7, [sp, #48]\n\t"
237 "add sp, sp, #64\n\t" // Free stack space, now sp as on entry
Andreas Gampe6cf80102014-05-19 11:32:41 -0700238 ".cfi_adjust_cfa_offset -64\n\t"
239
Andreas Gampef39b3782014-06-03 14:38:30 -0700240 "str x9, %[fpr_result]\n\t" // Store the FPR comparison result
241 "mov %[result], x8\n\t" // Store the call result
242
Andreas Gampe6cf80102014-05-19 11:32:41 -0700243 "b 3f\n\t" // Goto end
244
245 // Failed fpr verification.
246 "1:\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700247 "mov x9, #1\n\t"
Andreas Gampe6cf80102014-05-19 11:32:41 -0700248 "b 2b\n\t" // Goto finish-up
249
250 // End
251 "3:\n\t"
Andreas Gampecf4035a2014-05-28 22:43:01 -0700252 : [result] "=r" (result)
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700253 // Use the result from r0
254 : [arg0] "0"(arg0), [arg1] "r"(arg1), [arg2] "r"(arg2), [code] "r"(code), [self] "r"(self),
Andreas Gampecf4035a2014-05-28 22:43:01 -0700255 [referrer] "r"(referrer), [fpr_result] "m" (fpr_result)
Andreas Gampef39b3782014-06-03 14:38:30 -0700256 : "x8", "x9", "x10", "x11", "x12", "x13", "x14", "x15", "x16", "x17", "x18", "x19", "x20",
257 "x21", "x22", "x23", "x24", "x25", "x26", "x27", "x28", "x30",
258 "d0", "d1", "d2", "d3", "d4", "d5", "d6", "d7",
259 "d8", "d9", "d10", "d11", "d12", "d13", "d14", "d15",
260 "d16", "d17", "d18", "d19", "d20", "d21", "d22", "d23",
Andreas Gampe2f6e3512014-06-07 01:32:33 -0700261 "d24", "d25", "d26", "d27", "d28", "d29", "d30", "d31",
262 "memory"); // clobber.
Ian Rogersc3ccc102014-06-25 11:52:14 -0700263#elif defined(__x86_64__) && !defined(__APPLE__)
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700264 // Note: Uses the native convention
265 // TODO: Set the thread?
266 __asm__ __volatile__(
267 "pushq %[referrer]\n\t" // Push referrer
268 "pushq (%%rsp)\n\t" // & 16B alignment padding
269 ".cfi_adjust_cfa_offset 16\n\t"
270 "call *%%rax\n\t" // Call the stub
271 "addq $16, %%rsp\n\t" // Pop nullptr and padding
272 ".cfi_adjust_cfa_offset -16\n\t"
273 : "=a" (result)
274 // Use the result from rax
Andreas Gampe2f6e3512014-06-07 01:32:33 -0700275 : "D"(arg0), "S"(arg1), "d"(arg2), "a"(code), [referrer] "m"(referrer)
276 // This places arg0 into rdi, arg1 into rsi, arg2 into rdx, and code into rax
277 : "rbx", "rcx", "rbp", "r8", "r9", "r10", "r11", "r12", "r13", "r14", "r15",
278 "memory"); // clobber all
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700279 // TODO: Should we clobber the other registers?
280#else
281 LOG(WARNING) << "Was asked to invoke for an architecture I do not understand.";
282 result = 0;
283#endif
284 // Pop transition.
285 self->PopManagedStackFragment(fragment);
Andreas Gampe6cf80102014-05-19 11:32:41 -0700286
287 fp_result = fpr_result;
288 EXPECT_EQ(0U, fp_result);
289
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700290 return result;
291 }
292
Andreas Gampe51f76352014-05-21 08:28:48 -0700293 // TODO: Set up a frame according to referrer's specs.
294 size_t Invoke3WithReferrerAndHidden(size_t arg0, size_t arg1, size_t arg2, uintptr_t code,
295 Thread* self, mirror::ArtMethod* referrer, size_t hidden) {
296 // Push a transition back into managed code onto the linked list in thread.
297 ManagedStack fragment;
298 self->PushManagedStackFragment(&fragment);
299
300 size_t result;
301 size_t fpr_result = 0;
302#if defined(__i386__)
303 // TODO: Set the thread?
304 __asm__ __volatile__(
305 "movd %[hidden], %%xmm0\n\t"
Ian Rogersc5f17732014-06-05 20:48:42 -0700306 "subl $12, %%esp\n\t" // Align stack.
Andreas Gampe51f76352014-05-21 08:28:48 -0700307 "pushl %[referrer]\n\t" // Store referrer
308 "call *%%edi\n\t" // Call the stub
Ian Rogersc5f17732014-06-05 20:48:42 -0700309 "addl $16, %%esp" // Pop referrer
Andreas Gampe51f76352014-05-21 08:28:48 -0700310 : "=a" (result)
311 // Use the result from eax
Andreas Gampe2f6e3512014-06-07 01:32:33 -0700312 : "a"(arg0), "c"(arg1), "d"(arg2), "D"(code), [referrer]"m"(referrer), [hidden]"r"(hidden)
313 // This places code into edi, arg0 into eax, arg1 into ecx, and arg2 into edx
314 : "memory"); // clobber.
Andreas Gampe51f76352014-05-21 08:28:48 -0700315 // TODO: Should we clobber the other registers? EBX gets clobbered by some of the stubs,
316 // but compilation fails when declaring that.
317#elif defined(__arm__)
318 __asm__ __volatile__(
319 "push {r1-r12, lr}\n\t" // Save state, 13*4B = 52B
320 ".cfi_adjust_cfa_offset 52\n\t"
321 "push {r9}\n\t"
322 ".cfi_adjust_cfa_offset 4\n\t"
323 "mov r9, %[referrer]\n\n"
324 "str r9, [sp, #-8]!\n\t" // Push referrer, +8B padding so 16B aligned
325 ".cfi_adjust_cfa_offset 8\n\t"
326 "ldr r9, [sp, #8]\n\t"
327
328 // Push everything on the stack, so we don't rely on the order. What a mess. :-(
329 "sub sp, sp, #24\n\t"
330 "str %[arg0], [sp]\n\t"
331 "str %[arg1], [sp, #4]\n\t"
332 "str %[arg2], [sp, #8]\n\t"
333 "str %[code], [sp, #12]\n\t"
334 "str %[self], [sp, #16]\n\t"
335 "str %[hidden], [sp, #20]\n\t"
336 "ldr r0, [sp]\n\t"
337 "ldr r1, [sp, #4]\n\t"
338 "ldr r2, [sp, #8]\n\t"
339 "ldr r3, [sp, #12]\n\t"
340 "ldr r9, [sp, #16]\n\t"
341 "ldr r12, [sp, #20]\n\t"
342 "add sp, sp, #24\n\t"
343
344 "blx r3\n\t" // Call the stub
345 "add sp, sp, #12\n\t" // Pop nullptr and padding
346 ".cfi_adjust_cfa_offset -12\n\t"
347 "pop {r1-r12, lr}\n\t" // Restore state
348 ".cfi_adjust_cfa_offset -52\n\t"
349 "mov %[result], r0\n\t" // Save the result
350 : [result] "=r" (result)
351 // Use the result from r0
Andreas Gampe2f6e3512014-06-07 01:32:33 -0700352 : [arg0] "r"(arg0), [arg1] "r"(arg1), [arg2] "r"(arg2), [code] "r"(code), [self] "r"(self),
353 [referrer] "r"(referrer), [hidden] "r"(hidden)
354 : "memory"); // clobber.
Andreas Gampe51f76352014-05-21 08:28:48 -0700355#elif defined(__aarch64__)
356 __asm__ __volatile__(
Andreas Gampef39b3782014-06-03 14:38:30 -0700357 // Spill x0-x7 which we say we don't clobber. May contain args.
Andreas Gampe51f76352014-05-21 08:28:48 -0700358 "sub sp, sp, #64\n\t"
359 ".cfi_adjust_cfa_offset 64\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700360 "stp x0, x1, [sp]\n\t"
361 "stp x2, x3, [sp, #16]\n\t"
362 "stp x4, x5, [sp, #32]\n\t"
363 "stp x6, x7, [sp, #48]\n\t"
Andreas Gampe51f76352014-05-21 08:28:48 -0700364
Andreas Gampef39b3782014-06-03 14:38:30 -0700365 "sub sp, sp, #16\n\t" // Reserve stack space, 16B aligned
366 ".cfi_adjust_cfa_offset 16\n\t"
367 "str %[referrer], [sp]\n\t" // referrer
Andreas Gampe51f76352014-05-21 08:28:48 -0700368
369 // Push everything on the stack, so we don't rely on the order. What a mess. :-(
370 "sub sp, sp, #48\n\t"
371 ".cfi_adjust_cfa_offset 48\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700372 // All things are "r" constraints, so direct str/stp should work.
373 "stp %[arg0], %[arg1], [sp]\n\t"
374 "stp %[arg2], %[code], [sp, #16]\n\t"
375 "stp %[self], %[hidden], [sp, #32]\n\t"
Andreas Gampe51f76352014-05-21 08:28:48 -0700376
377 // Now we definitely have x0-x3 free, use it to garble d8 - d15
378 "movk x0, #0xfad0\n\t"
379 "movk x0, #0xebad, lsl #16\n\t"
380 "movk x0, #0xfad0, lsl #32\n\t"
381 "movk x0, #0xebad, lsl #48\n\t"
382 "fmov d8, x0\n\t"
383 "add x0, x0, 1\n\t"
384 "fmov d9, x0\n\t"
385 "add x0, x0, 1\n\t"
386 "fmov d10, x0\n\t"
387 "add x0, x0, 1\n\t"
388 "fmov d11, x0\n\t"
389 "add x0, x0, 1\n\t"
390 "fmov d12, x0\n\t"
391 "add x0, x0, 1\n\t"
392 "fmov d13, x0\n\t"
393 "add x0, x0, 1\n\t"
394 "fmov d14, x0\n\t"
395 "add x0, x0, 1\n\t"
396 "fmov d15, x0\n\t"
397
Andreas Gampef39b3782014-06-03 14:38:30 -0700398 // Load call params into the right registers.
399 "ldp x0, x1, [sp]\n\t"
400 "ldp x2, x3, [sp, #16]\n\t"
401 "ldp x18, x12, [sp, #32]\n\t"
Andreas Gampe51f76352014-05-21 08:28:48 -0700402 "add sp, sp, #48\n\t"
403 ".cfi_adjust_cfa_offset -48\n\t"
404
Andreas Gampe51f76352014-05-21 08:28:48 -0700405 "blr x3\n\t" // Call the stub
Andreas Gampef39b3782014-06-03 14:38:30 -0700406 "mov x8, x0\n\t" // Store result
407 "add sp, sp, #16\n\t" // Drop the quick "frame"
408 ".cfi_adjust_cfa_offset -16\n\t"
Andreas Gampe51f76352014-05-21 08:28:48 -0700409
410 // Test d8 - d15. We can use x1 and x2.
411 "movk x1, #0xfad0\n\t"
412 "movk x1, #0xebad, lsl #16\n\t"
413 "movk x1, #0xfad0, lsl #32\n\t"
414 "movk x1, #0xebad, lsl #48\n\t"
415 "fmov x2, d8\n\t"
416 "cmp x1, x2\n\t"
417 "b.ne 1f\n\t"
418 "add x1, x1, 1\n\t"
419
420 "fmov x2, d9\n\t"
421 "cmp x1, x2\n\t"
422 "b.ne 1f\n\t"
423 "add x1, x1, 1\n\t"
424
425 "fmov x2, d10\n\t"
426 "cmp x1, x2\n\t"
427 "b.ne 1f\n\t"
428 "add x1, x1, 1\n\t"
429
430 "fmov x2, d11\n\t"
431 "cmp x1, x2\n\t"
432 "b.ne 1f\n\t"
433 "add x1, x1, 1\n\t"
434
435 "fmov x2, d12\n\t"
436 "cmp x1, x2\n\t"
437 "b.ne 1f\n\t"
438 "add x1, x1, 1\n\t"
439
440 "fmov x2, d13\n\t"
441 "cmp x1, x2\n\t"
442 "b.ne 1f\n\t"
443 "add x1, x1, 1\n\t"
444
445 "fmov x2, d14\n\t"
446 "cmp x1, x2\n\t"
447 "b.ne 1f\n\t"
448 "add x1, x1, 1\n\t"
449
450 "fmov x2, d15\n\t"
451 "cmp x1, x2\n\t"
452 "b.ne 1f\n\t"
453
Andreas Gampef39b3782014-06-03 14:38:30 -0700454 "mov x9, #0\n\t" // Use x9 as flag, in clobber list
Andreas Gampe51f76352014-05-21 08:28:48 -0700455
456 // Finish up.
457 "2:\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700458 "ldp x0, x1, [sp]\n\t" // Restore stuff not named clobbered, may contain fpr_result
459 "ldp x2, x3, [sp, #16]\n\t"
460 "ldp x4, x5, [sp, #32]\n\t"
461 "ldp x6, x7, [sp, #48]\n\t"
462 "add sp, sp, #64\n\t" // Free stack space, now sp as on entry
Andreas Gampe51f76352014-05-21 08:28:48 -0700463 ".cfi_adjust_cfa_offset -64\n\t"
464
Andreas Gampef39b3782014-06-03 14:38:30 -0700465 "str x9, %[fpr_result]\n\t" // Store the FPR comparison result
466 "mov %[result], x8\n\t" // Store the call result
467
Andreas Gampe51f76352014-05-21 08:28:48 -0700468 "b 3f\n\t" // Goto end
469
470 // Failed fpr verification.
471 "1:\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700472 "mov x9, #1\n\t"
Andreas Gampe51f76352014-05-21 08:28:48 -0700473 "b 2b\n\t" // Goto finish-up
474
475 // End
476 "3:\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700477 : [result] "=r" (result)
478 // Use the result from r0
Andreas Gampe51f76352014-05-21 08:28:48 -0700479 : [arg0] "0"(arg0), [arg1] "r"(arg1), [arg2] "r"(arg2), [code] "r"(code), [self] "r"(self),
Andreas Gampef39b3782014-06-03 14:38:30 -0700480 [referrer] "r"(referrer), [hidden] "r"(hidden), [fpr_result] "m" (fpr_result)
481 : "x8", "x9", "x10", "x11", "x12", "x13", "x14", "x15", "x16", "x17", "x18", "x19", "x20",
482 "x21", "x22", "x23", "x24", "x25", "x26", "x27", "x28", "x30",
483 "d0", "d1", "d2", "d3", "d4", "d5", "d6", "d7",
484 "d8", "d9", "d10", "d11", "d12", "d13", "d14", "d15",
485 "d16", "d17", "d18", "d19", "d20", "d21", "d22", "d23",
Andreas Gampe2f6e3512014-06-07 01:32:33 -0700486 "d24", "d25", "d26", "d27", "d28", "d29", "d30", "d31",
487 "memory"); // clobber.
Ian Rogersc3ccc102014-06-25 11:52:14 -0700488#elif defined(__x86_64__) && !defined(__APPLE__)
Andreas Gampe51f76352014-05-21 08:28:48 -0700489 // Note: Uses the native convention
490 // TODO: Set the thread?
491 __asm__ __volatile__(
492 "movq %[hidden], %%r9\n\t" // No need to save r9, listed as clobbered
493 "movd %%r9, %%xmm0\n\t"
494 "pushq %[referrer]\n\t" // Push referrer
495 "pushq (%%rsp)\n\t" // & 16B alignment padding
496 ".cfi_adjust_cfa_offset 16\n\t"
497 "call *%%rax\n\t" // Call the stub
498 "addq $16, %%rsp\n\t" // Pop nullptr and padding
499 ".cfi_adjust_cfa_offset -16\n\t"
500 : "=a" (result)
501 // Use the result from rax
502 : "D"(arg0), "S"(arg1), "d"(arg2), "a"(code), [referrer] "m"(referrer), [hidden] "m"(hidden)
503 // This places arg0 into rdi, arg1 into rsi, arg2 into rdx, and code into rax
Andreas Gampe2f6e3512014-06-07 01:32:33 -0700504 : "rbx", "rcx", "rbp", "r8", "r9", "r10", "r11", "r12", "r13", "r14", "r15",
505 "memory"); // clobber all
Andreas Gampe51f76352014-05-21 08:28:48 -0700506 // TODO: Should we clobber the other registers?
507#else
508 LOG(WARNING) << "Was asked to invoke for an architecture I do not understand.";
509 result = 0;
510#endif
511 // Pop transition.
512 self->PopManagedStackFragment(fragment);
513
514 fp_result = fpr_result;
515 EXPECT_EQ(0U, fp_result);
516
517 return result;
518 }
519
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700520 // Method with 32b arg0, 64b arg1
521 size_t Invoke3UWithReferrer(size_t arg0, uint64_t arg1, uintptr_t code, Thread* self,
522 mirror::ArtMethod* referrer) {
Ian Rogersc3ccc102014-06-25 11:52:14 -0700523#if (defined(__x86_64__) && !defined(__APPLE__)) || defined(__aarch64__)
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700524 // Just pass through.
525 return Invoke3WithReferrer(arg0, arg1, 0U, code, self, referrer);
526#else
527 // Need to split up arguments.
528 uint32_t lower = static_cast<uint32_t>(arg1 & 0xFFFFFFFF);
529 uint32_t upper = static_cast<uint32_t>((arg1 >> 32) & 0xFFFFFFFF);
530
531 return Invoke3WithReferrer(arg0, lower, upper, code, self, referrer);
532#endif
533 }
534
535 // Method with 32b arg0, 32b arg1, 64b arg2
536 size_t Invoke3UUWithReferrer(uint32_t arg0, uint32_t arg1, uint64_t arg2, uintptr_t code,
537 Thread* self, mirror::ArtMethod* referrer) {
Ian Rogersc3ccc102014-06-25 11:52:14 -0700538#if (defined(__x86_64__) && !defined(__APPLE__)) || defined(__aarch64__)
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700539 // Just pass through.
540 return Invoke3WithReferrer(arg0, arg1, arg2, code, self, referrer);
541#else
542 // TODO: Needs 4-param invoke.
543 return 0;
544#endif
545 }
Andreas Gampe6cf80102014-05-19 11:32:41 -0700546
Andreas Gampe29b38412014-08-13 00:15:43 -0700547 static uintptr_t GetEntrypoint(Thread* self, QuickEntrypointEnum entrypoint) {
548 int32_t offset;
549#ifdef __LP64__
550 offset = GetThreadOffset<8>(entrypoint).Int32Value();
551#else
552 offset = GetThreadOffset<4>(entrypoint).Int32Value();
553#endif
554 return *reinterpret_cast<uintptr_t*>(reinterpret_cast<uint8_t*>(self) + offset);
555 }
556
Andreas Gampe6cf80102014-05-19 11:32:41 -0700557 protected:
558 size_t fp_result;
Andreas Gampe525cde22014-04-22 15:44:50 -0700559};
560
561
Andreas Gampe525cde22014-04-22 15:44:50 -0700562TEST_F(StubTest, Memcpy) {
Ian Rogersc3ccc102014-06-25 11:52:14 -0700563#if defined(__i386__) || (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe525cde22014-04-22 15:44:50 -0700564 Thread* self = Thread::Current();
565
566 uint32_t orig[20];
567 uint32_t trg[20];
568 for (size_t i = 0; i < 20; ++i) {
569 orig[i] = i;
570 trg[i] = 0;
571 }
572
573 Invoke3(reinterpret_cast<size_t>(&trg[4]), reinterpret_cast<size_t>(&orig[4]),
Andreas Gampe29b38412014-08-13 00:15:43 -0700574 10 * sizeof(uint32_t), StubTest::GetEntrypoint(self, kQuickMemcpy), self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700575
576 EXPECT_EQ(orig[0], trg[0]);
577
578 for (size_t i = 1; i < 4; ++i) {
579 EXPECT_NE(orig[i], trg[i]);
580 }
581
582 for (size_t i = 4; i < 14; ++i) {
583 EXPECT_EQ(orig[i], trg[i]);
584 }
585
586 for (size_t i = 14; i < 20; ++i) {
587 EXPECT_NE(orig[i], trg[i]);
588 }
589
590 // TODO: Test overlapping?
591
592#else
593 LOG(INFO) << "Skipping memcpy as I don't know how to do that on " << kRuntimeISA;
594 // Force-print to std::cout so it's also outside the logcat.
595 std::cout << "Skipping memcpy as I don't know how to do that on " << kRuntimeISA << std::endl;
596#endif
597}
598
Andreas Gampe525cde22014-04-22 15:44:50 -0700599TEST_F(StubTest, LockObject) {
Ian Rogersc3ccc102014-06-25 11:52:14 -0700600#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -0700601 static constexpr size_t kThinLockLoops = 100;
602
Andreas Gampe525cde22014-04-22 15:44:50 -0700603 Thread* self = Thread::Current();
Andreas Gampe29b38412014-08-13 00:15:43 -0700604
605 const uintptr_t art_quick_lock_object = StubTest::GetEntrypoint(self, kQuickLockObject);
606
Andreas Gampe525cde22014-04-22 15:44:50 -0700607 // Create an object
608 ScopedObjectAccess soa(self);
609 // garbage is created during ClassLinker::Init
610
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700611 StackHandleScope<2> hs(soa.Self());
612 Handle<mirror::String> obj(
613 hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), "hello, world!")));
Andreas Gampe525cde22014-04-22 15:44:50 -0700614 LockWord lock = obj->GetLockWord(false);
615 LockWord::LockState old_state = lock.GetState();
616 EXPECT_EQ(LockWord::LockState::kUnlocked, old_state);
617
Andreas Gampe29b38412014-08-13 00:15:43 -0700618 Invoke3(reinterpret_cast<size_t>(obj.Get()), 0U, 0U, art_quick_lock_object, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700619
620 LockWord lock_after = obj->GetLockWord(false);
621 LockWord::LockState new_state = lock_after.GetState();
622 EXPECT_EQ(LockWord::LockState::kThinLocked, new_state);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700623 EXPECT_EQ(lock_after.ThinLockCount(), 0U); // Thin lock starts count at zero
624
625 for (size_t i = 1; i < kThinLockLoops; ++i) {
Andreas Gampe29b38412014-08-13 00:15:43 -0700626 Invoke3(reinterpret_cast<size_t>(obj.Get()), 0U, 0U, art_quick_lock_object, self);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700627
628 // Check we're at lock count i
629
630 LockWord l_inc = obj->GetLockWord(false);
631 LockWord::LockState l_inc_state = l_inc.GetState();
632 EXPECT_EQ(LockWord::LockState::kThinLocked, l_inc_state);
633 EXPECT_EQ(l_inc.ThinLockCount(), i);
634 }
635
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700636 // Force a fat lock by running identity hashcode to fill up lock word.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700637 Handle<mirror::String> obj2(hs.NewHandle(
638 mirror::String::AllocFromModifiedUtf8(soa.Self(), "hello, world!")));
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700639
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700640 obj2->IdentityHashCode();
641
Andreas Gampe29b38412014-08-13 00:15:43 -0700642 Invoke3(reinterpret_cast<size_t>(obj2.Get()), 0U, 0U, art_quick_lock_object, self);
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700643
644 LockWord lock_after2 = obj2->GetLockWord(false);
645 LockWord::LockState new_state2 = lock_after2.GetState();
646 EXPECT_EQ(LockWord::LockState::kFatLocked, new_state2);
647 EXPECT_NE(lock_after2.FatLockMonitor(), static_cast<Monitor*>(nullptr));
648
649 // Test done.
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700650#else
651 LOG(INFO) << "Skipping lock_object as I don't know how to do that on " << kRuntimeISA;
652 // Force-print to std::cout so it's also outside the logcat.
653 std::cout << "Skipping lock_object as I don't know how to do that on " << kRuntimeISA << std::endl;
654#endif
655}
656
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700657
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700658class RandGen {
659 public:
660 explicit RandGen(uint32_t seed) : val_(seed) {}
661
662 uint32_t next() {
663 val_ = val_ * 48271 % 2147483647 + 13;
664 return val_;
665 }
666
667 uint32_t val_;
668};
669
670
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700671// NO_THREAD_SAFETY_ANALYSIS as we do not want to grab exclusive mutator lock for MonitorInfo.
672static void TestUnlockObject(StubTest* test) NO_THREAD_SAFETY_ANALYSIS {
Ian Rogersc3ccc102014-06-25 11:52:14 -0700673#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -0700674 static constexpr size_t kThinLockLoops = 100;
675
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700676 Thread* self = Thread::Current();
Andreas Gampe29b38412014-08-13 00:15:43 -0700677
678 const uintptr_t art_quick_lock_object = StubTest::GetEntrypoint(self, kQuickLockObject);
679 const uintptr_t art_quick_unlock_object = StubTest::GetEntrypoint(self, kQuickUnlockObject);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700680 // Create an object
681 ScopedObjectAccess soa(self);
682 // garbage is created during ClassLinker::Init
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700683 static constexpr size_t kNumberOfLocks = 10; // Number of objects = lock
684 StackHandleScope<kNumberOfLocks + 1> hs(self);
685 Handle<mirror::String> obj(
686 hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), "hello, world!")));
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700687 LockWord lock = obj->GetLockWord(false);
688 LockWord::LockState old_state = lock.GetState();
689 EXPECT_EQ(LockWord::LockState::kUnlocked, old_state);
690
Andreas Gampe29b38412014-08-13 00:15:43 -0700691 test->Invoke3(reinterpret_cast<size_t>(obj.Get()), 0U, 0U, art_quick_unlock_object, self);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700692 // This should be an illegal monitor state.
693 EXPECT_TRUE(self->IsExceptionPending());
694 self->ClearException();
695
696 LockWord lock_after = obj->GetLockWord(false);
697 LockWord::LockState new_state = lock_after.GetState();
698 EXPECT_EQ(LockWord::LockState::kUnlocked, new_state);
Andreas Gampe525cde22014-04-22 15:44:50 -0700699
Andreas Gampe29b38412014-08-13 00:15:43 -0700700 test->Invoke3(reinterpret_cast<size_t>(obj.Get()), 0U, 0U, art_quick_lock_object, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700701
702 LockWord lock_after2 = obj->GetLockWord(false);
703 LockWord::LockState new_state2 = lock_after2.GetState();
704 EXPECT_EQ(LockWord::LockState::kThinLocked, new_state2);
705
Andreas Gampe29b38412014-08-13 00:15:43 -0700706 test->Invoke3(reinterpret_cast<size_t>(obj.Get()), 0U, 0U, art_quick_unlock_object, self);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700707
708 LockWord lock_after3 = obj->GetLockWord(false);
709 LockWord::LockState new_state3 = lock_after3.GetState();
710 EXPECT_EQ(LockWord::LockState::kUnlocked, new_state3);
711
712 // Stress test:
713 // Keep a number of objects and their locks in flight. Randomly lock or unlock one of them in
714 // each step.
715
716 RandGen r(0x1234);
717
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700718 constexpr size_t kIterations = 10000; // Number of iterations
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700719 constexpr size_t kMoveToFat = 1000; // Chance of 1:kMoveFat to make a lock fat.
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700720
721 size_t counts[kNumberOfLocks];
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700722 bool fat[kNumberOfLocks]; // Whether a lock should be thin or fat.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700723 Handle<mirror::String> objects[kNumberOfLocks];
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700724
725 // Initialize = allocate.
726 for (size_t i = 0; i < kNumberOfLocks; ++i) {
727 counts[i] = 0;
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700728 fat[i] = false;
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700729 objects[i] = hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), ""));
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700730 }
731
732 for (size_t i = 0; i < kIterations; ++i) {
733 // Select which lock to update.
734 size_t index = r.next() % kNumberOfLocks;
735
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700736 // Make lock fat?
737 if (!fat[index] && (r.next() % kMoveToFat == 0)) {
738 fat[index] = true;
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700739 objects[index]->IdentityHashCode();
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700740
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700741 LockWord lock_iter = objects[index]->GetLockWord(false);
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700742 LockWord::LockState iter_state = lock_iter.GetState();
743 if (counts[index] == 0) {
744 EXPECT_EQ(LockWord::LockState::kHashCode, iter_state);
745 } else {
746 EXPECT_EQ(LockWord::LockState::kFatLocked, iter_state);
747 }
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700748 } else {
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700749 bool lock; // Whether to lock or unlock in this step.
750 if (counts[index] == 0) {
751 lock = true;
752 } else if (counts[index] == kThinLockLoops) {
753 lock = false;
754 } else {
755 // Randomly.
756 lock = r.next() % 2 == 0;
757 }
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700758
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700759 if (lock) {
Andreas Gampe29b38412014-08-13 00:15:43 -0700760 test->Invoke3(reinterpret_cast<size_t>(objects[index].Get()), 0U, 0U, art_quick_lock_object,
761 self);
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700762 counts[index]++;
763 } else {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700764 test->Invoke3(reinterpret_cast<size_t>(objects[index].Get()), 0U, 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -0700765 art_quick_unlock_object, self);
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700766 counts[index]--;
767 }
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700768
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700769 EXPECT_FALSE(self->IsExceptionPending());
770
771 // Check the new state.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700772 LockWord lock_iter = objects[index]->GetLockWord(true);
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700773 LockWord::LockState iter_state = lock_iter.GetState();
774 if (fat[index]) {
775 // Abuse MonitorInfo.
776 EXPECT_EQ(LockWord::LockState::kFatLocked, iter_state) << index;
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700777 MonitorInfo info(objects[index].Get());
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700778 EXPECT_EQ(counts[index], info.entry_count_) << index;
779 } else {
780 if (counts[index] > 0) {
781 EXPECT_EQ(LockWord::LockState::kThinLocked, iter_state);
782 EXPECT_EQ(counts[index] - 1, lock_iter.ThinLockCount());
783 } else {
784 EXPECT_EQ(LockWord::LockState::kUnlocked, iter_state);
785 }
786 }
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700787 }
788 }
789
790 // Unlock the remaining count times and then check it's unlocked. Then deallocate.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700791 // Go reverse order to correctly handle Handles.
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700792 for (size_t i = 0; i < kNumberOfLocks; ++i) {
793 size_t index = kNumberOfLocks - 1 - i;
794 size_t count = counts[index];
795 while (count > 0) {
Andreas Gampe29b38412014-08-13 00:15:43 -0700796 test->Invoke3(reinterpret_cast<size_t>(objects[index].Get()), 0U, 0U, art_quick_unlock_object,
797 self);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700798 count--;
799 }
800
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700801 LockWord lock_after4 = objects[index]->GetLockWord(false);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700802 LockWord::LockState new_state4 = lock_after4.GetState();
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700803 EXPECT_TRUE(LockWord::LockState::kUnlocked == new_state4
804 || LockWord::LockState::kFatLocked == new_state4);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700805 }
806
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700807 // Test done.
Andreas Gampe525cde22014-04-22 15:44:50 -0700808#else
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700809 LOG(INFO) << "Skipping unlock_object as I don't know how to do that on " << kRuntimeISA;
Andreas Gampe525cde22014-04-22 15:44:50 -0700810 // Force-print to std::cout so it's also outside the logcat.
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700811 std::cout << "Skipping unlock_object as I don't know how to do that on " << kRuntimeISA << std::endl;
Andreas Gampe525cde22014-04-22 15:44:50 -0700812#endif
813}
814
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700815TEST_F(StubTest, UnlockObject) {
816 TestUnlockObject(this);
817}
Andreas Gampe525cde22014-04-22 15:44:50 -0700818
Ian Rogersc3ccc102014-06-25 11:52:14 -0700819#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe525cde22014-04-22 15:44:50 -0700820extern "C" void art_quick_check_cast(void);
821#endif
822
823TEST_F(StubTest, CheckCast) {
Ian Rogersc3ccc102014-06-25 11:52:14 -0700824#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe525cde22014-04-22 15:44:50 -0700825 Thread* self = Thread::Current();
Andreas Gampe29b38412014-08-13 00:15:43 -0700826
827 const uintptr_t art_quick_check_cast = StubTest::GetEntrypoint(self, kQuickCheckCast);
828
Andreas Gampe525cde22014-04-22 15:44:50 -0700829 // Find some classes.
830 ScopedObjectAccess soa(self);
831 // garbage is created during ClassLinker::Init
832
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700833 StackHandleScope<2> hs(soa.Self());
834 Handle<mirror::Class> c(
835 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "[Ljava/lang/Object;")));
836 Handle<mirror::Class> c2(
837 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "[Ljava/lang/String;")));
Andreas Gampe525cde22014-04-22 15:44:50 -0700838
839 EXPECT_FALSE(self->IsExceptionPending());
840
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700841 Invoke3(reinterpret_cast<size_t>(c.Get()), reinterpret_cast<size_t>(c.Get()), 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -0700842 art_quick_check_cast, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700843
844 EXPECT_FALSE(self->IsExceptionPending());
845
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700846 Invoke3(reinterpret_cast<size_t>(c2.Get()), reinterpret_cast<size_t>(c2.Get()), 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -0700847 art_quick_check_cast, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700848
849 EXPECT_FALSE(self->IsExceptionPending());
850
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700851 Invoke3(reinterpret_cast<size_t>(c.Get()), reinterpret_cast<size_t>(c2.Get()), 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -0700852 art_quick_check_cast, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700853
854 EXPECT_FALSE(self->IsExceptionPending());
855
856 // TODO: Make the following work. But that would require correct managed frames.
857
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700858 Invoke3(reinterpret_cast<size_t>(c2.Get()), reinterpret_cast<size_t>(c.Get()), 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -0700859 art_quick_check_cast, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700860
861 EXPECT_TRUE(self->IsExceptionPending());
862 self->ClearException();
863
864#else
865 LOG(INFO) << "Skipping check_cast as I don't know how to do that on " << kRuntimeISA;
866 // Force-print to std::cout so it's also outside the logcat.
867 std::cout << "Skipping check_cast as I don't know how to do that on " << kRuntimeISA << std::endl;
868#endif
869}
870
871
Andreas Gampe525cde22014-04-22 15:44:50 -0700872TEST_F(StubTest, APutObj) {
Hiroshi Yamauchid6881ae2014-04-28 17:21:48 -0700873 TEST_DISABLED_FOR_HEAP_REFERENCE_POISONING();
874
Ian Rogersc3ccc102014-06-25 11:52:14 -0700875#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe525cde22014-04-22 15:44:50 -0700876 Thread* self = Thread::Current();
Andreas Gampe29b38412014-08-13 00:15:43 -0700877
878 // Do not check non-checked ones, we'd need handlers and stuff...
879 const uintptr_t art_quick_aput_obj_with_null_and_bound_check =
880 StubTest::GetEntrypoint(self, kQuickAputObjectWithNullAndBoundCheck);
881
Andreas Gampe525cde22014-04-22 15:44:50 -0700882 // Create an object
883 ScopedObjectAccess soa(self);
884 // garbage is created during ClassLinker::Init
885
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700886 StackHandleScope<5> hs(soa.Self());
887 Handle<mirror::Class> c(
888 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "Ljava/lang/Object;")));
889 Handle<mirror::Class> ca(
890 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "[Ljava/lang/String;")));
Andreas Gampe525cde22014-04-22 15:44:50 -0700891
892 // Build a string array of size 1
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700893 Handle<mirror::ObjectArray<mirror::Object>> array(
894 hs.NewHandle(mirror::ObjectArray<mirror::Object>::Alloc(soa.Self(), ca.Get(), 10)));
Andreas Gampe525cde22014-04-22 15:44:50 -0700895
896 // Build a string -> should be assignable
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700897 Handle<mirror::String> str_obj(
898 hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), "hello, world!")));
Andreas Gampe525cde22014-04-22 15:44:50 -0700899
900 // Build a generic object -> should fail assigning
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700901 Handle<mirror::Object> obj_obj(hs.NewHandle(c->AllocObject(soa.Self())));
Andreas Gampe525cde22014-04-22 15:44:50 -0700902
903 // Play with it...
904
905 // 1) Success cases
Andreas Gampef4e910b2014-04-29 16:55:52 -0700906 // 1.1) Assign str_obj to array[0..3]
Andreas Gampe525cde22014-04-22 15:44:50 -0700907
908 EXPECT_FALSE(self->IsExceptionPending());
909
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700910 Invoke3(reinterpret_cast<size_t>(array.Get()), 0U, reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampe29b38412014-08-13 00:15:43 -0700911 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700912
913 EXPECT_FALSE(self->IsExceptionPending());
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700914 EXPECT_EQ(str_obj.Get(), array->Get(0));
Andreas Gampe525cde22014-04-22 15:44:50 -0700915
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700916 Invoke3(reinterpret_cast<size_t>(array.Get()), 1U, reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampe29b38412014-08-13 00:15:43 -0700917 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampef4e910b2014-04-29 16:55:52 -0700918
919 EXPECT_FALSE(self->IsExceptionPending());
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700920 EXPECT_EQ(str_obj.Get(), array->Get(1));
Andreas Gampef4e910b2014-04-29 16:55:52 -0700921
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700922 Invoke3(reinterpret_cast<size_t>(array.Get()), 2U, reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampe29b38412014-08-13 00:15:43 -0700923 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampef4e910b2014-04-29 16:55:52 -0700924
925 EXPECT_FALSE(self->IsExceptionPending());
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700926 EXPECT_EQ(str_obj.Get(), array->Get(2));
Andreas Gampef4e910b2014-04-29 16:55:52 -0700927
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700928 Invoke3(reinterpret_cast<size_t>(array.Get()), 3U, reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampe29b38412014-08-13 00:15:43 -0700929 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampef4e910b2014-04-29 16:55:52 -0700930
931 EXPECT_FALSE(self->IsExceptionPending());
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700932 EXPECT_EQ(str_obj.Get(), array->Get(3));
Andreas Gampef4e910b2014-04-29 16:55:52 -0700933
934 // 1.2) Assign null to array[0..3]
Andreas Gampe525cde22014-04-22 15:44:50 -0700935
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700936 Invoke3(reinterpret_cast<size_t>(array.Get()), 0U, reinterpret_cast<size_t>(nullptr),
Andreas Gampe29b38412014-08-13 00:15:43 -0700937 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700938
939 EXPECT_FALSE(self->IsExceptionPending());
Andreas Gampef4e910b2014-04-29 16:55:52 -0700940 EXPECT_EQ(nullptr, array->Get(0));
941
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700942 Invoke3(reinterpret_cast<size_t>(array.Get()), 1U, reinterpret_cast<size_t>(nullptr),
Andreas Gampe29b38412014-08-13 00:15:43 -0700943 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampef4e910b2014-04-29 16:55:52 -0700944
945 EXPECT_FALSE(self->IsExceptionPending());
946 EXPECT_EQ(nullptr, array->Get(1));
947
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700948 Invoke3(reinterpret_cast<size_t>(array.Get()), 2U, reinterpret_cast<size_t>(nullptr),
Andreas Gampe29b38412014-08-13 00:15:43 -0700949 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampef4e910b2014-04-29 16:55:52 -0700950
951 EXPECT_FALSE(self->IsExceptionPending());
952 EXPECT_EQ(nullptr, array->Get(2));
953
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700954 Invoke3(reinterpret_cast<size_t>(array.Get()), 3U, reinterpret_cast<size_t>(nullptr),
Andreas Gampe29b38412014-08-13 00:15:43 -0700955 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampef4e910b2014-04-29 16:55:52 -0700956
957 EXPECT_FALSE(self->IsExceptionPending());
958 EXPECT_EQ(nullptr, array->Get(3));
Andreas Gampe525cde22014-04-22 15:44:50 -0700959
960 // TODO: Check _which_ exception is thrown. Then make 3) check that it's the right check order.
961
962 // 2) Failure cases (str into str[])
963 // 2.1) Array = null
964 // TODO: Throwing NPE needs actual DEX code
965
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700966// Invoke3(reinterpret_cast<size_t>(nullptr), 0U, reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampe525cde22014-04-22 15:44:50 -0700967// reinterpret_cast<uintptr_t>(&art_quick_aput_obj_with_null_and_bound_check), self);
968//
969// EXPECT_TRUE(self->IsExceptionPending());
970// self->ClearException();
971
972 // 2.2) Index < 0
973
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700974 Invoke3(reinterpret_cast<size_t>(array.Get()), static_cast<size_t>(-1),
975 reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampe29b38412014-08-13 00:15:43 -0700976 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700977
978 EXPECT_TRUE(self->IsExceptionPending());
979 self->ClearException();
980
981 // 2.3) Index > 0
982
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700983 Invoke3(reinterpret_cast<size_t>(array.Get()), 10U, reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampe29b38412014-08-13 00:15:43 -0700984 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700985
986 EXPECT_TRUE(self->IsExceptionPending());
987 self->ClearException();
988
989 // 3) Failure cases (obj into str[])
990
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700991 Invoke3(reinterpret_cast<size_t>(array.Get()), 0U, reinterpret_cast<size_t>(obj_obj.Get()),
Andreas Gampe29b38412014-08-13 00:15:43 -0700992 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700993
994 EXPECT_TRUE(self->IsExceptionPending());
995 self->ClearException();
996
997 // Tests done.
998#else
999 LOG(INFO) << "Skipping aput_obj as I don't know how to do that on " << kRuntimeISA;
1000 // Force-print to std::cout so it's also outside the logcat.
1001 std::cout << "Skipping aput_obj as I don't know how to do that on " << kRuntimeISA << std::endl;
1002#endif
1003}
1004
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001005TEST_F(StubTest, AllocObject) {
1006 TEST_DISABLED_FOR_HEAP_REFERENCE_POISONING();
1007
Ian Rogersc3ccc102014-06-25 11:52:14 -07001008#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001009 // TODO: Check the "Unresolved" allocation stubs
1010
1011 Thread* self = Thread::Current();
1012 // Create an object
1013 ScopedObjectAccess soa(self);
1014 // garbage is created during ClassLinker::Init
1015
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001016 StackHandleScope<2> hs(soa.Self());
1017 Handle<mirror::Class> c(
1018 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "Ljava/lang/Object;")));
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001019
1020 // Play with it...
1021
1022 EXPECT_FALSE(self->IsExceptionPending());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001023 {
1024 // Use an arbitrary method from c to use as referrer
1025 size_t result = Invoke3(static_cast<size_t>(c->GetDexTypeIndex()), // type_idx
1026 reinterpret_cast<size_t>(c->GetVirtualMethod(0)), // arbitrary
1027 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001028 StubTest::GetEntrypoint(self, kQuickAllocObject),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001029 self);
1030
1031 EXPECT_FALSE(self->IsExceptionPending());
1032 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
1033 mirror::Object* obj = reinterpret_cast<mirror::Object*>(result);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001034 EXPECT_EQ(c.Get(), obj->GetClass());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001035 VerifyObject(obj);
1036 }
1037
1038 {
1039 // We can use nullptr in the second argument as we do not need a method here (not used in
1040 // resolved/initialized cases)
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001041 size_t result = Invoke3(reinterpret_cast<size_t>(c.Get()), reinterpret_cast<size_t>(nullptr), 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001042 StubTest::GetEntrypoint(self, kQuickAllocObjectResolved),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001043 self);
1044
1045 EXPECT_FALSE(self->IsExceptionPending());
1046 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
1047 mirror::Object* obj = reinterpret_cast<mirror::Object*>(result);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001048 EXPECT_EQ(c.Get(), obj->GetClass());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001049 VerifyObject(obj);
1050 }
1051
1052 {
1053 // We can use nullptr in the second argument as we do not need a method here (not used in
1054 // resolved/initialized cases)
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001055 size_t result = Invoke3(reinterpret_cast<size_t>(c.Get()), reinterpret_cast<size_t>(nullptr), 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001056 StubTest::GetEntrypoint(self, kQuickAllocObjectInitialized),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001057 self);
1058
1059 EXPECT_FALSE(self->IsExceptionPending());
1060 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
1061 mirror::Object* obj = reinterpret_cast<mirror::Object*>(result);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001062 EXPECT_EQ(c.Get(), obj->GetClass());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001063 VerifyObject(obj);
1064 }
1065
1066 // Failure tests.
1067
1068 // Out-of-memory.
1069 {
1070 Runtime::Current()->GetHeap()->SetIdealFootprint(1 * GB);
1071
1072 // Array helps to fill memory faster.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001073 Handle<mirror::Class> ca(
1074 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "[Ljava/lang/Object;")));
1075
1076 // Use arbitrary large amount for now.
1077 static const size_t kMaxHandles = 1000000;
Ian Rogers700a4022014-05-19 16:49:03 -07001078 std::unique_ptr<StackHandleScope<kMaxHandles>> hsp(new StackHandleScope<kMaxHandles>(self));
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001079
1080 std::vector<Handle<mirror::Object>> handles;
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001081 // Start allocating with 128K
1082 size_t length = 128 * KB / 4;
1083 while (length > 10) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001084 Handle<mirror::Object> h(hsp->NewHandle<mirror::Object>(
1085 mirror::ObjectArray<mirror::Object>::Alloc(soa.Self(), ca.Get(), length / 4)));
1086 if (self->IsExceptionPending() || h.Get() == nullptr) {
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001087 self->ClearException();
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001088
1089 // Try a smaller length
1090 length = length / 8;
1091 // Use at most half the reported free space.
1092 size_t mem = Runtime::Current()->GetHeap()->GetFreeMemory();
1093 if (length * 8 > mem) {
1094 length = mem / 8;
1095 }
1096 } else {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001097 handles.push_back(h);
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001098 }
1099 }
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001100 LOG(INFO) << "Used " << handles.size() << " arrays to fill space.";
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001101
1102 // Allocate simple objects till it fails.
1103 while (!self->IsExceptionPending()) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001104 Handle<mirror::Object> h = hsp->NewHandle(c->AllocObject(soa.Self()));
1105 if (!self->IsExceptionPending() && h.Get() != nullptr) {
1106 handles.push_back(h);
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001107 }
1108 }
1109 self->ClearException();
1110
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001111 size_t result = Invoke3(reinterpret_cast<size_t>(c.Get()), reinterpret_cast<size_t>(nullptr), 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001112 StubTest::GetEntrypoint(self, kQuickAllocObjectInitialized),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001113 self);
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001114 EXPECT_TRUE(self->IsExceptionPending());
1115 self->ClearException();
1116 EXPECT_EQ(reinterpret_cast<size_t>(nullptr), result);
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001117 }
1118
1119 // Tests done.
1120#else
1121 LOG(INFO) << "Skipping alloc_object as I don't know how to do that on " << kRuntimeISA;
1122 // Force-print to std::cout so it's also outside the logcat.
1123 std::cout << "Skipping alloc_object as I don't know how to do that on " << kRuntimeISA << std::endl;
1124#endif
1125}
1126
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001127TEST_F(StubTest, AllocObjectArray) {
1128 TEST_DISABLED_FOR_HEAP_REFERENCE_POISONING();
1129
Ian Rogersc3ccc102014-06-25 11:52:14 -07001130#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001131 // TODO: Check the "Unresolved" allocation stubs
1132
1133 Thread* self = Thread::Current();
1134 // Create an object
1135 ScopedObjectAccess soa(self);
1136 // garbage is created during ClassLinker::Init
1137
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001138 StackHandleScope<2> hs(self);
1139 Handle<mirror::Class> c(
1140 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "[Ljava/lang/Object;")));
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001141
1142 // Needed to have a linked method.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001143 Handle<mirror::Class> c_obj(
1144 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "Ljava/lang/Object;")));
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001145
1146 // Play with it...
1147
1148 EXPECT_FALSE(self->IsExceptionPending());
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001149
1150 // For some reason this does not work, as the type_idx is artificial and outside what the
1151 // resolved types of c_obj allow...
1152
1153 if (false) {
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001154 // Use an arbitrary method from c to use as referrer
1155 size_t result = Invoke3(static_cast<size_t>(c->GetDexTypeIndex()), // type_idx
1156 reinterpret_cast<size_t>(c_obj->GetVirtualMethod(0)), // arbitrary
1157 10U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001158 StubTest::GetEntrypoint(self, kQuickAllocArray),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001159 self);
1160
1161 EXPECT_FALSE(self->IsExceptionPending());
1162 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
1163 mirror::Array* obj = reinterpret_cast<mirror::Array*>(result);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001164 EXPECT_EQ(c.Get(), obj->GetClass());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001165 VerifyObject(obj);
1166 EXPECT_EQ(obj->GetLength(), 10);
1167 }
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001168
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001169 {
1170 // We can use nullptr in the second argument as we do not need a method here (not used in
1171 // resolved/initialized cases)
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001172 size_t result = Invoke3(reinterpret_cast<size_t>(c.Get()), reinterpret_cast<size_t>(nullptr), 10U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001173 StubTest::GetEntrypoint(self, kQuickAllocArrayResolved),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001174 self);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001175 EXPECT_FALSE(self->IsExceptionPending()) << PrettyTypeOf(self->GetException(nullptr));
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001176 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
1177 mirror::Object* obj = reinterpret_cast<mirror::Object*>(result);
1178 EXPECT_TRUE(obj->IsArrayInstance());
1179 EXPECT_TRUE(obj->IsObjectArray());
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001180 EXPECT_EQ(c.Get(), obj->GetClass());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001181 VerifyObject(obj);
1182 mirror::Array* array = reinterpret_cast<mirror::Array*>(result);
1183 EXPECT_EQ(array->GetLength(), 10);
1184 }
1185
1186 // Failure tests.
1187
1188 // Out-of-memory.
1189 {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001190 size_t result = Invoke3(reinterpret_cast<size_t>(c.Get()), reinterpret_cast<size_t>(nullptr),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001191 GB, // that should fail...
Andreas Gampe29b38412014-08-13 00:15:43 -07001192 StubTest::GetEntrypoint(self, kQuickAllocArrayResolved),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001193 self);
1194
1195 EXPECT_TRUE(self->IsExceptionPending());
1196 self->ClearException();
1197 EXPECT_EQ(reinterpret_cast<size_t>(nullptr), result);
1198 }
1199
1200 // Tests done.
1201#else
1202 LOG(INFO) << "Skipping alloc_array as I don't know how to do that on " << kRuntimeISA;
1203 // Force-print to std::cout so it's also outside the logcat.
1204 std::cout << "Skipping alloc_array as I don't know how to do that on " << kRuntimeISA << std::endl;
1205#endif
1206}
1207
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001208
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001209TEST_F(StubTest, StringCompareTo) {
1210 TEST_DISABLED_FOR_HEAP_REFERENCE_POISONING();
1211
Ian Rogersc3ccc102014-06-25 11:52:14 -07001212#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001213 // TODO: Check the "Unresolved" allocation stubs
1214
1215 Thread* self = Thread::Current();
Andreas Gampe29b38412014-08-13 00:15:43 -07001216
1217 const uintptr_t art_quick_string_compareto = StubTest::GetEntrypoint(self, kQuickStringCompareTo);
1218
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001219 ScopedObjectAccess soa(self);
1220 // garbage is created during ClassLinker::Init
1221
1222 // Create some strings
1223 // Use array so we can index into it and use a matrix for expected results
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -07001224 // Setup: The first half is standard. The second half uses a non-zero offset.
1225 // TODO: Shared backing arrays.
Serban Constantinescu86797a72014-06-19 16:17:56 +01001226 static constexpr size_t kBaseStringCount = 8;
1227 const char* c[kBaseStringCount] = { "", "", "a", "aa", "ab",
1228 "aacaacaacaacaacaac", // This one's under the default limit to go to __memcmp16.
1229 "aacaacaacaacaacaacaacaacaacaacaacaac", // This one's over.
1230 "aacaacaacaacaacaacaacaacaacaacaacaaca" }; // As is this one. We need a separate one to
1231 // defeat object-equal optimizations.
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -07001232
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001233 static constexpr size_t kStringCount = 2 * kBaseStringCount;
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001234
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001235 StackHandleScope<kStringCount> hs(self);
1236 Handle<mirror::String> s[kStringCount];
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001237
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001238 for (size_t i = 0; i < kBaseStringCount; ++i) {
1239 s[i] = hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), c[i]));
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001240 }
1241
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -07001242 RandGen r(0x1234);
1243
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001244 for (size_t i = kBaseStringCount; i < kStringCount; ++i) {
1245 s[i] = hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), c[i - kBaseStringCount]));
1246 int32_t length = s[i]->GetLength();
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -07001247 if (length > 1) {
1248 // Set a random offset and length.
1249 int32_t new_offset = 1 + (r.next() % (length - 1));
1250 int32_t rest = length - new_offset - 1;
1251 int32_t new_length = 1 + (rest > 0 ? r.next() % rest : 0);
1252
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001253 s[i]->SetField32<false>(mirror::String::CountOffset(), new_length);
1254 s[i]->SetField32<false>(mirror::String::OffsetOffset(), new_offset);
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -07001255 }
1256 }
1257
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001258 // TODO: wide characters
1259
1260 // Matrix of expectations. First component is first parameter. Note we only check against the
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -07001261 // sign, not the value. As we are testing random offsets, we need to compute this and need to
1262 // rely on String::CompareTo being correct.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001263 int32_t expected[kStringCount][kStringCount];
1264 for (size_t x = 0; x < kStringCount; ++x) {
1265 for (size_t y = 0; y < kStringCount; ++y) {
1266 expected[x][y] = s[x]->CompareTo(s[y].Get());
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -07001267 }
1268 }
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001269
1270 // Play with it...
1271
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001272 for (size_t x = 0; x < kStringCount; ++x) {
1273 for (size_t y = 0; y < kStringCount; ++y) {
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001274 // Test string_compareto x y
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001275 size_t result = Invoke3(reinterpret_cast<size_t>(s[x].Get()),
1276 reinterpret_cast<size_t>(s[y].Get()), 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001277 art_quick_string_compareto, self);
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001278
1279 EXPECT_FALSE(self->IsExceptionPending());
1280
1281 // The result is a 32b signed integer
1282 union {
1283 size_t r;
1284 int32_t i;
1285 } conv;
1286 conv.r = result;
1287 int32_t e = expected[x][y];
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -07001288 EXPECT_TRUE(e == 0 ? conv.i == 0 : true) << "x=" << c[x] << " y=" << c[y] << " res=" <<
1289 conv.r;
1290 EXPECT_TRUE(e < 0 ? conv.i < 0 : true) << "x=" << c[x] << " y=" << c[y] << " res=" <<
1291 conv.r;
1292 EXPECT_TRUE(e > 0 ? conv.i > 0 : true) << "x=" << c[x] << " y=" << c[y] << " res=" <<
1293 conv.r;
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001294 }
1295 }
1296
Andreas Gampe7177d7c2014-05-02 12:10:02 -07001297 // TODO: Deallocate things.
1298
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001299 // Tests done.
1300#else
1301 LOG(INFO) << "Skipping string_compareto as I don't know how to do that on " << kRuntimeISA;
1302 // Force-print to std::cout so it's also outside the logcat.
1303 std::cout << "Skipping string_compareto as I don't know how to do that on " << kRuntimeISA <<
1304 std::endl;
1305#endif
1306}
1307
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001308
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001309static void GetSet32Static(Handle<mirror::Object>* obj, Handle<mirror::ArtField>* f, Thread* self,
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001310 mirror::ArtMethod* referrer, StubTest* test)
1311 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Ian Rogersc3ccc102014-06-25 11:52:14 -07001312#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001313 constexpr size_t num_values = 7;
1314 uint32_t values[num_values] = { 0, 1, 2, 255, 32768, 1000000, 0xFFFFFFFF };
1315
1316 for (size_t i = 0; i < num_values; ++i) {
1317 test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
1318 static_cast<size_t>(values[i]),
1319 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001320 StubTest::GetEntrypoint(self, kQuickSet32Static),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001321 self,
1322 referrer);
1323
1324 size_t res = test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
1325 0U, 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001326 StubTest::GetEntrypoint(self, kQuickGet32Static),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001327 self,
1328 referrer);
1329
1330 EXPECT_EQ(res, values[i]) << "Iteration " << i;
1331 }
1332#else
1333 LOG(INFO) << "Skipping set32static as I don't know how to do that on " << kRuntimeISA;
1334 // Force-print to std::cout so it's also outside the logcat.
1335 std::cout << "Skipping set32static as I don't know how to do that on " << kRuntimeISA << std::endl;
1336#endif
1337}
1338
1339
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001340static void GetSet32Instance(Handle<mirror::Object>* obj, Handle<mirror::ArtField>* f,
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001341 Thread* self, mirror::ArtMethod* referrer, StubTest* test)
1342 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Ian Rogersc3ccc102014-06-25 11:52:14 -07001343#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001344 constexpr size_t num_values = 7;
1345 uint32_t values[num_values] = { 0, 1, 2, 255, 32768, 1000000, 0xFFFFFFFF };
1346
1347 for (size_t i = 0; i < num_values; ++i) {
1348 test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001349 reinterpret_cast<size_t>(obj->Get()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001350 static_cast<size_t>(values[i]),
Andreas Gampe29b38412014-08-13 00:15:43 -07001351 StubTest::GetEntrypoint(self, kQuickSet32Instance),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001352 self,
1353 referrer);
1354
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001355 int32_t res = f->Get()->GetInt(obj->Get());
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001356 EXPECT_EQ(res, static_cast<int32_t>(values[i])) << "Iteration " << i;
1357
1358 res++;
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001359 f->Get()->SetInt<false>(obj->Get(), res);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001360
1361 size_t res2 = test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001362 reinterpret_cast<size_t>(obj->Get()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001363 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001364 StubTest::GetEntrypoint(self, kQuickGet32Instance),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001365 self,
1366 referrer);
1367 EXPECT_EQ(res, static_cast<int32_t>(res2));
1368 }
1369#else
1370 LOG(INFO) << "Skipping set32instance as I don't know how to do that on " << kRuntimeISA;
1371 // Force-print to std::cout so it's also outside the logcat.
1372 std::cout << "Skipping set32instance as I don't know how to do that on " << kRuntimeISA << std::endl;
1373#endif
1374}
1375
1376
Ian Rogersc3ccc102014-06-25 11:52:14 -07001377#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001378
1379static void set_and_check_static(uint32_t f_idx, mirror::Object* val, Thread* self,
1380 mirror::ArtMethod* referrer, StubTest* test)
1381 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1382 test->Invoke3WithReferrer(static_cast<size_t>(f_idx),
1383 reinterpret_cast<size_t>(val),
1384 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001385 StubTest::GetEntrypoint(self, kQuickSetObjStatic),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001386 self,
1387 referrer);
1388
1389 size_t res = test->Invoke3WithReferrer(static_cast<size_t>(f_idx),
1390 0U, 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001391 StubTest::GetEntrypoint(self, kQuickGetObjStatic),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001392 self,
1393 referrer);
1394
1395 EXPECT_EQ(res, reinterpret_cast<size_t>(val)) << "Value " << val;
1396}
1397#endif
1398
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001399static void GetSetObjStatic(Handle<mirror::Object>* obj, Handle<mirror::ArtField>* f, Thread* self,
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001400 mirror::ArtMethod* referrer, StubTest* test)
1401 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Ian Rogersc3ccc102014-06-25 11:52:14 -07001402#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001403 set_and_check_static((*f)->GetDexFieldIndex(), nullptr, self, referrer, test);
1404
1405 // Allocate a string object for simplicity.
1406 mirror::String* str = mirror::String::AllocFromModifiedUtf8(self, "Test");
1407 set_and_check_static((*f)->GetDexFieldIndex(), str, self, referrer, test);
1408
1409 set_and_check_static((*f)->GetDexFieldIndex(), nullptr, self, referrer, test);
1410#else
1411 LOG(INFO) << "Skipping setObjstatic as I don't know how to do that on " << kRuntimeISA;
1412 // Force-print to std::cout so it's also outside the logcat.
1413 std::cout << "Skipping setObjstatic as I don't know how to do that on " << kRuntimeISA << std::endl;
1414#endif
1415}
1416
1417
Ian Rogersc3ccc102014-06-25 11:52:14 -07001418#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001419static void set_and_check_instance(Handle<mirror::ArtField>* f, mirror::Object* trg,
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001420 mirror::Object* val, Thread* self, mirror::ArtMethod* referrer,
1421 StubTest* test)
1422 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1423 test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
1424 reinterpret_cast<size_t>(trg),
1425 reinterpret_cast<size_t>(val),
Andreas Gampe29b38412014-08-13 00:15:43 -07001426 StubTest::GetEntrypoint(self, kQuickSetObjInstance),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001427 self,
1428 referrer);
1429
1430 size_t res = test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
1431 reinterpret_cast<size_t>(trg),
1432 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001433 StubTest::GetEntrypoint(self, kQuickGetObjInstance),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001434 self,
1435 referrer);
1436
1437 EXPECT_EQ(res, reinterpret_cast<size_t>(val)) << "Value " << val;
1438
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001439 EXPECT_EQ(val, f->Get()->GetObj(trg));
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001440}
1441#endif
1442
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001443static void GetSetObjInstance(Handle<mirror::Object>* obj, Handle<mirror::ArtField>* f,
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001444 Thread* self, mirror::ArtMethod* referrer, StubTest* test)
1445 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Ian Rogersc3ccc102014-06-25 11:52:14 -07001446#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001447 set_and_check_instance(f, obj->Get(), nullptr, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001448
1449 // Allocate a string object for simplicity.
1450 mirror::String* str = mirror::String::AllocFromModifiedUtf8(self, "Test");
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001451 set_and_check_instance(f, obj->Get(), str, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001452
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001453 set_and_check_instance(f, obj->Get(), nullptr, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001454#else
1455 LOG(INFO) << "Skipping setObjinstance as I don't know how to do that on " << kRuntimeISA;
1456 // Force-print to std::cout so it's also outside the logcat.
1457 std::cout << "Skipping setObjinstance as I don't know how to do that on " << kRuntimeISA << std::endl;
1458#endif
1459}
1460
1461
1462// TODO: Complete these tests for 32b architectures.
1463
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001464static void GetSet64Static(Handle<mirror::Object>* obj, Handle<mirror::ArtField>* f, Thread* self,
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001465 mirror::ArtMethod* referrer, StubTest* test)
1466 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Ian Rogersc3ccc102014-06-25 11:52:14 -07001467#if (defined(__x86_64__) && !defined(__APPLE__)) || defined(__aarch64__)
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001468 constexpr size_t num_values = 8;
1469 uint64_t values[num_values] = { 0, 1, 2, 255, 32768, 1000000, 0xFFFFFFFF, 0xFFFFFFFFFFFF };
1470
1471 for (size_t i = 0; i < num_values; ++i) {
1472 test->Invoke3UWithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
1473 values[i],
Andreas Gampe29b38412014-08-13 00:15:43 -07001474 StubTest::GetEntrypoint(self, kQuickSet64Static),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001475 self,
1476 referrer);
1477
1478 size_t res = test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
1479 0U, 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001480 StubTest::GetEntrypoint(self, kQuickGet64Static),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001481 self,
1482 referrer);
1483
1484 EXPECT_EQ(res, values[i]) << "Iteration " << i;
1485 }
1486#else
1487 LOG(INFO) << "Skipping set64static as I don't know how to do that on " << kRuntimeISA;
1488 // Force-print to std::cout so it's also outside the logcat.
1489 std::cout << "Skipping set64static as I don't know how to do that on " << kRuntimeISA << std::endl;
1490#endif
1491}
1492
1493
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001494static void GetSet64Instance(Handle<mirror::Object>* obj, Handle<mirror::ArtField>* f,
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001495 Thread* self, mirror::ArtMethod* referrer, StubTest* test)
1496 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Ian Rogersc3ccc102014-06-25 11:52:14 -07001497#if (defined(__x86_64__) && !defined(__APPLE__)) || defined(__aarch64__)
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001498 constexpr size_t num_values = 8;
1499 uint64_t values[num_values] = { 0, 1, 2, 255, 32768, 1000000, 0xFFFFFFFF, 0xFFFFFFFFFFFF };
1500
1501 for (size_t i = 0; i < num_values; ++i) {
1502 test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001503 reinterpret_cast<size_t>(obj->Get()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001504 static_cast<size_t>(values[i]),
Andreas Gampe29b38412014-08-13 00:15:43 -07001505 StubTest::GetEntrypoint(self, kQuickSet64Instance),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001506 self,
1507 referrer);
1508
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001509 int64_t res = f->Get()->GetLong(obj->Get());
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001510 EXPECT_EQ(res, static_cast<int64_t>(values[i])) << "Iteration " << i;
1511
1512 res++;
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001513 f->Get()->SetLong<false>(obj->Get(), res);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001514
1515 size_t res2 = test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001516 reinterpret_cast<size_t>(obj->Get()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001517 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001518 StubTest::GetEntrypoint(self, kQuickGet64Instance),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001519 self,
1520 referrer);
1521 EXPECT_EQ(res, static_cast<int64_t>(res2));
1522 }
1523#else
1524 LOG(INFO) << "Skipping set64instance as I don't know how to do that on " << kRuntimeISA;
1525 // Force-print to std::cout so it's also outside the logcat.
1526 std::cout << "Skipping set64instance as I don't know how to do that on " << kRuntimeISA << std::endl;
1527#endif
1528}
1529
1530static void TestFields(Thread* self, StubTest* test, Primitive::Type test_type) {
1531 // garbage is created during ClassLinker::Init
1532
1533 JNIEnv* env = Thread::Current()->GetJniEnv();
1534 jclass jc = env->FindClass("AllFields");
1535 CHECK(jc != NULL);
1536 jobject o = env->AllocObject(jc);
1537 CHECK(o != NULL);
1538
1539 ScopedObjectAccess soa(self);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001540 StackHandleScope<5> hs(self);
1541 Handle<mirror::Object> obj(hs.NewHandle(soa.Decode<mirror::Object*>(o)));
1542 Handle<mirror::Class> c(hs.NewHandle(obj->GetClass()));
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001543 // Need a method as a referrer
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001544 Handle<mirror::ArtMethod> m(hs.NewHandle(c->GetDirectMethod(0)));
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001545
1546 // Play with it...
1547
1548 // Static fields.
1549 {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001550 Handle<mirror::ObjectArray<mirror::ArtField>> fields(hs.NewHandle(c.Get()->GetSFields()));
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001551 int32_t num_fields = fields->GetLength();
1552 for (int32_t i = 0; i < num_fields; ++i) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001553 StackHandleScope<1> hs(self);
1554 Handle<mirror::ArtField> f(hs.NewHandle(fields->Get(i)));
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001555
Mathieu Chartier61c5ebc2014-06-05 17:42:53 -07001556 Primitive::Type type = f->GetTypeAsPrimitiveType();
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001557 switch (type) {
1558 case Primitive::Type::kPrimInt:
1559 if (test_type == type) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001560 GetSet32Static(&obj, &f, self, m.Get(), test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001561 }
1562 break;
1563
1564 case Primitive::Type::kPrimLong:
1565 if (test_type == type) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001566 GetSet64Static(&obj, &f, self, m.Get(), test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001567 }
1568 break;
1569
1570 case Primitive::Type::kPrimNot:
1571 // Don't try array.
Mathieu Chartier61c5ebc2014-06-05 17:42:53 -07001572 if (test_type == type && f->GetTypeDescriptor()[0] != '[') {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001573 GetSetObjStatic(&obj, &f, self, m.Get(), test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001574 }
1575 break;
1576
1577 default:
1578 break; // Skip.
1579 }
1580 }
1581 }
1582
1583 // Instance fields.
1584 {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001585 Handle<mirror::ObjectArray<mirror::ArtField>> fields(hs.NewHandle(c.Get()->GetIFields()));
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001586 int32_t num_fields = fields->GetLength();
1587 for (int32_t i = 0; i < num_fields; ++i) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001588 StackHandleScope<1> hs(self);
1589 Handle<mirror::ArtField> f(hs.NewHandle(fields->Get(i)));
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001590
Mathieu Chartier61c5ebc2014-06-05 17:42:53 -07001591 Primitive::Type type = f->GetTypeAsPrimitiveType();
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001592 switch (type) {
1593 case Primitive::Type::kPrimInt:
1594 if (test_type == type) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001595 GetSet32Instance(&obj, &f, self, m.Get(), test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001596 }
1597 break;
1598
1599 case Primitive::Type::kPrimLong:
1600 if (test_type == type) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001601 GetSet64Instance(&obj, &f, self, m.Get(), test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001602 }
1603 break;
1604
1605 case Primitive::Type::kPrimNot:
1606 // Don't try array.
Mathieu Chartier61c5ebc2014-06-05 17:42:53 -07001607 if (test_type == type && f->GetTypeDescriptor()[0] != '[') {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001608 GetSetObjInstance(&obj, &f, self, m.Get(), test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001609 }
1610 break;
1611
1612 default:
1613 break; // Skip.
1614 }
1615 }
1616 }
1617
1618 // TODO: Deallocate things.
1619}
1620
1621
1622TEST_F(StubTest, Fields32) {
1623 TEST_DISABLED_FOR_HEAP_REFERENCE_POISONING();
1624
1625 Thread* self = Thread::Current();
1626
1627 self->TransitionFromSuspendedToRunnable();
1628 LoadDex("AllFields");
1629 bool started = runtime_->Start();
1630 CHECK(started);
1631
1632 TestFields(self, this, Primitive::Type::kPrimInt);
1633}
1634
1635TEST_F(StubTest, FieldsObj) {
1636 TEST_DISABLED_FOR_HEAP_REFERENCE_POISONING();
1637
1638 Thread* self = Thread::Current();
1639
1640 self->TransitionFromSuspendedToRunnable();
1641 LoadDex("AllFields");
1642 bool started = runtime_->Start();
1643 CHECK(started);
1644
1645 TestFields(self, this, Primitive::Type::kPrimNot);
1646}
1647
1648TEST_F(StubTest, Fields64) {
1649 TEST_DISABLED_FOR_HEAP_REFERENCE_POISONING();
1650
1651 Thread* self = Thread::Current();
1652
1653 self->TransitionFromSuspendedToRunnable();
1654 LoadDex("AllFields");
1655 bool started = runtime_->Start();
1656 CHECK(started);
1657
1658 TestFields(self, this, Primitive::Type::kPrimLong);
1659}
1660
Andreas Gampe51f76352014-05-21 08:28:48 -07001661
1662TEST_F(StubTest, IMT) {
Ian Rogersc3ccc102014-06-25 11:52:14 -07001663#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe51f76352014-05-21 08:28:48 -07001664 TEST_DISABLED_FOR_HEAP_REFERENCE_POISONING();
1665
1666 Thread* self = Thread::Current();
1667
1668 ScopedObjectAccess soa(self);
1669 StackHandleScope<7> hs(self);
1670
1671 JNIEnv* env = Thread::Current()->GetJniEnv();
1672
1673 // ArrayList
1674
1675 // Load ArrayList and used methods (JNI).
1676 jclass arraylist_jclass = env->FindClass("java/util/ArrayList");
1677 ASSERT_NE(nullptr, arraylist_jclass);
1678 jmethodID arraylist_constructor = env->GetMethodID(arraylist_jclass, "<init>", "()V");
1679 ASSERT_NE(nullptr, arraylist_constructor);
1680 jmethodID contains_jmethod = env->GetMethodID(arraylist_jclass, "contains", "(Ljava/lang/Object;)Z");
1681 ASSERT_NE(nullptr, contains_jmethod);
1682 jmethodID add_jmethod = env->GetMethodID(arraylist_jclass, "add", "(Ljava/lang/Object;)Z");
1683 ASSERT_NE(nullptr, add_jmethod);
1684
1685 // Get mirror representation.
1686 Handle<mirror::ArtMethod> contains_amethod(hs.NewHandle(soa.DecodeMethod(contains_jmethod)));
1687
1688 // Patch up ArrayList.contains.
1689 if (contains_amethod.Get()->GetEntryPointFromQuickCompiledCode() == nullptr) {
1690 contains_amethod.Get()->SetEntryPointFromQuickCompiledCode(reinterpret_cast<void*>(
Andreas Gampe29b38412014-08-13 00:15:43 -07001691 StubTest::GetEntrypoint(self, kQuickQuickToInterpreterBridge)));
Andreas Gampe51f76352014-05-21 08:28:48 -07001692 }
1693
1694 // List
1695
1696 // Load List and used methods (JNI).
1697 jclass list_jclass = env->FindClass("java/util/List");
1698 ASSERT_NE(nullptr, list_jclass);
1699 jmethodID inf_contains_jmethod = env->GetMethodID(list_jclass, "contains", "(Ljava/lang/Object;)Z");
1700 ASSERT_NE(nullptr, inf_contains_jmethod);
1701
1702 // Get mirror representation.
1703 Handle<mirror::ArtMethod> inf_contains(hs.NewHandle(soa.DecodeMethod(inf_contains_jmethod)));
1704
1705 // Object
1706
1707 jclass obj_jclass = env->FindClass("java/lang/Object");
1708 ASSERT_NE(nullptr, obj_jclass);
1709 jmethodID obj_constructor = env->GetMethodID(obj_jclass, "<init>", "()V");
1710 ASSERT_NE(nullptr, obj_constructor);
1711
1712 // Sanity check: check that there is a conflict for List.contains in ArrayList.
1713
1714 mirror::Class* arraylist_class = soa.Decode<mirror::Class*>(arraylist_jclass);
Mingyao Yang98d1cc82014-05-15 17:02:16 -07001715 mirror::ArtMethod* m = arraylist_class->GetEmbeddedImTableEntry(
1716 inf_contains->GetDexMethodIndex() % mirror::Class::kImtSize);
Andreas Gampe51f76352014-05-21 08:28:48 -07001717
Andreas Gampe0ea37942014-05-21 14:12:18 -07001718 if (!m->IsImtConflictMethod()) {
1719 LOG(WARNING) << "Test is meaningless, no IMT conflict in setup: " <<
1720 PrettyMethod(m, true);
1721 LOG(WARNING) << "Please update StubTest.IMT.";
1722 return;
1723 }
Andreas Gampe51f76352014-05-21 08:28:48 -07001724
1725 // Create instances.
1726
1727 jobject jarray_list = env->NewObject(arraylist_jclass, arraylist_constructor);
1728 ASSERT_NE(nullptr, jarray_list);
1729 Handle<mirror::Object> array_list(hs.NewHandle(soa.Decode<mirror::Object*>(jarray_list)));
1730
1731 jobject jobj = env->NewObject(obj_jclass, obj_constructor);
1732 ASSERT_NE(nullptr, jobj);
1733 Handle<mirror::Object> obj(hs.NewHandle(soa.Decode<mirror::Object*>(jobj)));
1734
1735 // Invoke.
1736
1737 size_t result =
1738 Invoke3WithReferrerAndHidden(0U, reinterpret_cast<size_t>(array_list.Get()),
1739 reinterpret_cast<size_t>(obj.Get()),
Andreas Gampe29b38412014-08-13 00:15:43 -07001740 StubTest::GetEntrypoint(self, kQuickQuickImtConflictTrampoline),
Andreas Gampe51f76352014-05-21 08:28:48 -07001741 self, contains_amethod.Get(),
1742 static_cast<size_t>(inf_contains.Get()->GetDexMethodIndex()));
1743
1744 ASSERT_FALSE(self->IsExceptionPending());
1745 EXPECT_EQ(static_cast<size_t>(JNI_FALSE), result);
1746
1747 // Add object.
1748
1749 env->CallBooleanMethod(jarray_list, add_jmethod, jobj);
1750
1751 ASSERT_FALSE(self->IsExceptionPending()) << PrettyTypeOf(self->GetException(nullptr));
1752
1753 // Invoke again.
1754
1755 result = Invoke3WithReferrerAndHidden(0U, reinterpret_cast<size_t>(array_list.Get()),
1756 reinterpret_cast<size_t>(obj.Get()),
Andreas Gampe29b38412014-08-13 00:15:43 -07001757 StubTest::GetEntrypoint(self, kQuickQuickImtConflictTrampoline),
Andreas Gampe51f76352014-05-21 08:28:48 -07001758 self, contains_amethod.Get(),
1759 static_cast<size_t>(inf_contains.Get()->GetDexMethodIndex()));
1760
1761 ASSERT_FALSE(self->IsExceptionPending());
1762 EXPECT_EQ(static_cast<size_t>(JNI_TRUE), result);
1763#else
Andreas Gampe6aac3552014-06-09 14:55:53 -07001764 LOG(INFO) << "Skipping imt as I don't know how to do that on " << kRuntimeISA;
Andreas Gampe51f76352014-05-21 08:28:48 -07001765 // Force-print to std::cout so it's also outside the logcat.
Andreas Gampe6aac3552014-06-09 14:55:53 -07001766 std::cout << "Skipping imt as I don't know how to do that on " << kRuntimeISA << std::endl;
1767#endif
1768}
1769
Andreas Gampe6aac3552014-06-09 14:55:53 -07001770TEST_F(StubTest, StringIndexOf) {
1771#if defined(__arm__) || defined(__aarch64__)
Hiroshi Yamauchi52fa8142014-06-16 12:59:49 -07001772 TEST_DISABLED_FOR_HEAP_REFERENCE_POISONING();
1773
Andreas Gampe6aac3552014-06-09 14:55:53 -07001774 Thread* self = Thread::Current();
1775 ScopedObjectAccess soa(self);
1776 // garbage is created during ClassLinker::Init
1777
1778 // Create some strings
1779 // Use array so we can index into it and use a matrix for expected results
1780 // Setup: The first half is standard. The second half uses a non-zero offset.
1781 // TODO: Shared backing arrays.
1782 static constexpr size_t kStringCount = 7;
1783 const char* c_str[kStringCount] = { "", "a", "ba", "cba", "dcba", "edcba", "asdfghjkl" };
1784 static constexpr size_t kCharCount = 5;
1785 const char c_char[kCharCount] = { 'a', 'b', 'c', 'd', 'e' };
1786
1787 StackHandleScope<kStringCount> hs(self);
1788 Handle<mirror::String> s[kStringCount];
1789
1790 for (size_t i = 0; i < kStringCount; ++i) {
1791 s[i] = hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), c_str[i]));
1792 }
1793
1794 // Matrix of expectations. First component is first parameter. Note we only check against the
1795 // sign, not the value. As we are testing random offsets, we need to compute this and need to
1796 // rely on String::CompareTo being correct.
1797 static constexpr size_t kMaxLen = 9;
1798 DCHECK_LE(strlen(c_str[kStringCount-1]), kMaxLen) << "Please fix the indexof test.";
1799
1800 // Last dimension: start, offset by 1.
1801 int32_t expected[kStringCount][kCharCount][kMaxLen + 3];
1802 for (size_t x = 0; x < kStringCount; ++x) {
1803 for (size_t y = 0; y < kCharCount; ++y) {
1804 for (size_t z = 0; z <= kMaxLen + 2; ++z) {
1805 expected[x][y][z] = s[x]->FastIndexOf(c_char[y], static_cast<int32_t>(z) - 1);
1806 }
1807 }
1808 }
1809
1810 // Play with it...
1811
1812 for (size_t x = 0; x < kStringCount; ++x) {
1813 for (size_t y = 0; y < kCharCount; ++y) {
1814 for (size_t z = 0; z <= kMaxLen + 2; ++z) {
1815 int32_t start = static_cast<int32_t>(z) - 1;
1816
1817 // Test string_compareto x y
1818 size_t result = Invoke3(reinterpret_cast<size_t>(s[x].Get()), c_char[y], start,
Andreas Gampe29b38412014-08-13 00:15:43 -07001819 StubTest::GetEntrypoint(self, kQuickIndexOf), self);
Andreas Gampe6aac3552014-06-09 14:55:53 -07001820
1821 EXPECT_FALSE(self->IsExceptionPending());
1822
1823 // The result is a 32b signed integer
1824 union {
1825 size_t r;
1826 int32_t i;
1827 } conv;
1828 conv.r = result;
1829
1830 EXPECT_EQ(expected[x][y][z], conv.i) << "Wrong result for " << c_str[x] << " / " <<
1831 c_char[y] << " @ " << start;
1832 }
1833 }
1834 }
1835
1836 // TODO: Deallocate things.
1837
1838 // Tests done.
1839#else
1840 LOG(INFO) << "Skipping indexof as I don't know how to do that on " << kRuntimeISA;
1841 // Force-print to std::cout so it's also outside the logcat.
1842 std::cout << "Skipping indexof as I don't know how to do that on " << kRuntimeISA << std::endl;
Andreas Gampe51f76352014-05-21 08:28:48 -07001843#endif
1844}
1845
Andreas Gampe525cde22014-04-22 15:44:50 -07001846} // namespace art