blob: 25f9a5a2515b27b258b7cc70268e74d60e51869b [file] [log] [blame]
Andreas Gampe525cde22014-04-22 15:44:50 -07001/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
Ian Rogerse63db272014-07-15 15:36:11 -070017#include <cstdio>
18
Andreas Gampe525cde22014-04-22 15:44:50 -070019#include "common_runtime_test.h"
Andreas Gampe6e4e59c2014-05-05 20:11:02 -070020#include "mirror/art_field-inl.h"
Andreas Gampe51f76352014-05-21 08:28:48 -070021#include "mirror/art_method-inl.h"
22#include "mirror/class-inl.h"
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -070023#include "mirror/string-inl.h"
Ian Rogerse63db272014-07-15 15:36:11 -070024#include "scoped_thread_state_change.h"
Andreas Gampe525cde22014-04-22 15:44:50 -070025
26namespace art {
27
28
29class StubTest : public CommonRuntimeTest {
30 protected:
31 // We need callee-save methods set up in the Runtime for exceptions.
32 void SetUp() OVERRIDE {
33 // Do the normal setup.
34 CommonRuntimeTest::SetUp();
35
36 {
37 // Create callee-save methods
38 ScopedObjectAccess soa(Thread::Current());
Vladimir Marko7624d252014-05-02 14:40:15 +010039 runtime_->SetInstructionSet(kRuntimeISA);
Andreas Gampe525cde22014-04-22 15:44:50 -070040 for (int i = 0; i < Runtime::kLastCalleeSaveType; i++) {
41 Runtime::CalleeSaveType type = Runtime::CalleeSaveType(i);
42 if (!runtime_->HasCalleeSaveMethod(type)) {
Vladimir Marko7624d252014-05-02 14:40:15 +010043 runtime_->SetCalleeSaveMethod(runtime_->CreateCalleeSaveMethod(type), type);
Andreas Gampe525cde22014-04-22 15:44:50 -070044 }
45 }
46 }
47 }
48
Ian Rogerse63db272014-07-15 15:36:11 -070049 void SetUpRuntimeOptions(RuntimeOptions *options) OVERRIDE {
Andreas Gampe00c1e6d2014-04-25 15:47:13 -070050 // Use a smaller heap
51 for (std::pair<std::string, const void*>& pair : *options) {
52 if (pair.first.find("-Xmx") == 0) {
53 pair.first = "-Xmx4M"; // Smallest we can go.
54 }
55 }
Andreas Gampe51f76352014-05-21 08:28:48 -070056 options->push_back(std::make_pair("-Xint", nullptr));
Andreas Gampe00c1e6d2014-04-25 15:47:13 -070057 }
Andreas Gampe525cde22014-04-22 15:44:50 -070058
Mathieu Chartier119c6bd2014-05-09 14:11:47 -070059 // Helper function needed since TEST_F makes a new class.
60 Thread::tls_ptr_sized_values* GetTlsPtr(Thread* self) {
61 return &self->tlsPtr_;
62 }
63
Andreas Gampe4fc046e2014-05-06 16:56:39 -070064 public:
Andreas Gampe525cde22014-04-22 15:44:50 -070065 size_t Invoke3(size_t arg0, size_t arg1, size_t arg2, uintptr_t code, Thread* self) {
Andreas Gampe6cf80102014-05-19 11:32:41 -070066 return Invoke3WithReferrer(arg0, arg1, arg2, code, self, nullptr);
Andreas Gampe525cde22014-04-22 15:44:50 -070067 }
Andreas Gampe6e4e59c2014-05-05 20:11:02 -070068
Andreas Gampe6e4e59c2014-05-05 20:11:02 -070069 // TODO: Set up a frame according to referrer's specs.
70 size_t Invoke3WithReferrer(size_t arg0, size_t arg1, size_t arg2, uintptr_t code, Thread* self,
71 mirror::ArtMethod* referrer) {
72 // Push a transition back into managed code onto the linked list in thread.
73 ManagedStack fragment;
74 self->PushManagedStackFragment(&fragment);
75
76 size_t result;
Andreas Gampe6cf80102014-05-19 11:32:41 -070077 size_t fpr_result = 0;
Andreas Gampe6e4e59c2014-05-05 20:11:02 -070078#if defined(__i386__)
79 // TODO: Set the thread?
80 __asm__ __volatile__(
Ian Rogersc5f17732014-06-05 20:48:42 -070081 "subl $12, %%esp\n\t" // Align stack.
82 "pushl %[referrer]\n\t" // Store referrer.
Andreas Gampe6e4e59c2014-05-05 20:11:02 -070083 "call *%%edi\n\t" // Call the stub
Ian Rogersc5f17732014-06-05 20:48:42 -070084 "addl $16, %%esp" // Pop referrer
Andreas Gampe6e4e59c2014-05-05 20:11:02 -070085 : "=a" (result)
86 // Use the result from eax
Andreas Gampe2f6e3512014-06-07 01:32:33 -070087 : "a"(arg0), "c"(arg1), "d"(arg2), "D"(code), [referrer]"r"(referrer)
88 // This places code into edi, arg0 into eax, arg1 into ecx, and arg2 into edx
89 : "memory"); // clobber.
Andreas Gampe6e4e59c2014-05-05 20:11:02 -070090 // TODO: Should we clobber the other registers? EBX gets clobbered by some of the stubs,
91 // but compilation fails when declaring that.
92#elif defined(__arm__)
93 __asm__ __volatile__(
94 "push {r1-r12, lr}\n\t" // Save state, 13*4B = 52B
95 ".cfi_adjust_cfa_offset 52\n\t"
96 "push {r9}\n\t"
97 ".cfi_adjust_cfa_offset 4\n\t"
98 "mov r9, %[referrer]\n\n"
99 "str r9, [sp, #-8]!\n\t" // Push referrer, +8B padding so 16B aligned
100 ".cfi_adjust_cfa_offset 8\n\t"
101 "ldr r9, [sp, #8]\n\t"
102
103 // Push everything on the stack, so we don't rely on the order. What a mess. :-(
104 "sub sp, sp, #20\n\t"
105 "str %[arg0], [sp]\n\t"
106 "str %[arg1], [sp, #4]\n\t"
107 "str %[arg2], [sp, #8]\n\t"
108 "str %[code], [sp, #12]\n\t"
109 "str %[self], [sp, #16]\n\t"
110 "ldr r0, [sp]\n\t"
111 "ldr r1, [sp, #4]\n\t"
112 "ldr r2, [sp, #8]\n\t"
113 "ldr r3, [sp, #12]\n\t"
114 "ldr r9, [sp, #16]\n\t"
115 "add sp, sp, #20\n\t"
116
117 "blx r3\n\t" // Call the stub
118 "add sp, sp, #12\n\t" // Pop nullptr and padding
119 ".cfi_adjust_cfa_offset -12\n\t"
120 "pop {r1-r12, lr}\n\t" // Restore state
121 ".cfi_adjust_cfa_offset -52\n\t"
122 "mov %[result], r0\n\t" // Save the result
123 : [result] "=r" (result)
124 // Use the result from r0
125 : [arg0] "r"(arg0), [arg1] "r"(arg1), [arg2] "r"(arg2), [code] "r"(code), [self] "r"(self),
126 [referrer] "r"(referrer)
Andreas Gampe2f6e3512014-06-07 01:32:33 -0700127 : "memory"); // clobber.
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700128#elif defined(__aarch64__)
129 __asm__ __volatile__(
Andreas Gampef39b3782014-06-03 14:38:30 -0700130 // Spill x0-x7 which we say we don't clobber. May contain args.
Andreas Gampe6cf80102014-05-19 11:32:41 -0700131 "sub sp, sp, #64\n\t"
132 ".cfi_adjust_cfa_offset 64\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700133 "stp x0, x1, [sp]\n\t"
134 "stp x2, x3, [sp, #16]\n\t"
135 "stp x4, x5, [sp, #32]\n\t"
136 "stp x6, x7, [sp, #48]\n\t"
Andreas Gampe6cf80102014-05-19 11:32:41 -0700137
Andreas Gampef39b3782014-06-03 14:38:30 -0700138 "sub sp, sp, #16\n\t" // Reserve stack space, 16B aligned
139 ".cfi_adjust_cfa_offset 16\n\t"
140 "str %[referrer], [sp]\n\t" // referrer
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700141
142 // Push everything on the stack, so we don't rely on the order. What a mess. :-(
143 "sub sp, sp, #48\n\t"
Andreas Gampe6cf80102014-05-19 11:32:41 -0700144 ".cfi_adjust_cfa_offset 48\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700145 // All things are "r" constraints, so direct str/stp should work.
146 "stp %[arg0], %[arg1], [sp]\n\t"
147 "stp %[arg2], %[code], [sp, #16]\n\t"
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700148 "str %[self], [sp, #32]\n\t"
Andreas Gampe6cf80102014-05-19 11:32:41 -0700149
150 // Now we definitely have x0-x3 free, use it to garble d8 - d15
151 "movk x0, #0xfad0\n\t"
152 "movk x0, #0xebad, lsl #16\n\t"
153 "movk x0, #0xfad0, lsl #32\n\t"
154 "movk x0, #0xebad, lsl #48\n\t"
155 "fmov d8, x0\n\t"
156 "add x0, x0, 1\n\t"
157 "fmov d9, x0\n\t"
158 "add x0, x0, 1\n\t"
159 "fmov d10, x0\n\t"
160 "add x0, x0, 1\n\t"
161 "fmov d11, x0\n\t"
162 "add x0, x0, 1\n\t"
163 "fmov d12, x0\n\t"
164 "add x0, x0, 1\n\t"
165 "fmov d13, x0\n\t"
166 "add x0, x0, 1\n\t"
167 "fmov d14, x0\n\t"
168 "add x0, x0, 1\n\t"
169 "fmov d15, x0\n\t"
170
Andreas Gampef39b3782014-06-03 14:38:30 -0700171 // Load call params into the right registers.
172 "ldp x0, x1, [sp]\n\t"
173 "ldp x2, x3, [sp, #16]\n\t"
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700174 "ldr x18, [sp, #32]\n\t"
175 "add sp, sp, #48\n\t"
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700176 ".cfi_adjust_cfa_offset -48\n\t"
177
Andreas Gampe6cf80102014-05-19 11:32:41 -0700178
179 "blr x3\n\t" // Call the stub
Andreas Gampef39b3782014-06-03 14:38:30 -0700180 "mov x8, x0\n\t" // Store result
181 "add sp, sp, #16\n\t" // Drop the quick "frame"
182 ".cfi_adjust_cfa_offset -16\n\t"
Andreas Gampe6cf80102014-05-19 11:32:41 -0700183
184 // Test d8 - d15. We can use x1 and x2.
185 "movk x1, #0xfad0\n\t"
186 "movk x1, #0xebad, lsl #16\n\t"
187 "movk x1, #0xfad0, lsl #32\n\t"
188 "movk x1, #0xebad, lsl #48\n\t"
189 "fmov x2, d8\n\t"
190 "cmp x1, x2\n\t"
191 "b.ne 1f\n\t"
192 "add x1, x1, 1\n\t"
193
194 "fmov x2, d9\n\t"
195 "cmp x1, x2\n\t"
196 "b.ne 1f\n\t"
197 "add x1, x1, 1\n\t"
198
199 "fmov x2, d10\n\t"
200 "cmp x1, x2\n\t"
201 "b.ne 1f\n\t"
202 "add x1, x1, 1\n\t"
203
204 "fmov x2, d11\n\t"
205 "cmp x1, x2\n\t"
206 "b.ne 1f\n\t"
207 "add x1, x1, 1\n\t"
208
209 "fmov x2, d12\n\t"
210 "cmp x1, x2\n\t"
211 "b.ne 1f\n\t"
212 "add x1, x1, 1\n\t"
213
214 "fmov x2, d13\n\t"
215 "cmp x1, x2\n\t"
216 "b.ne 1f\n\t"
217 "add x1, x1, 1\n\t"
218
219 "fmov x2, d14\n\t"
220 "cmp x1, x2\n\t"
221 "b.ne 1f\n\t"
222 "add x1, x1, 1\n\t"
223
224 "fmov x2, d15\n\t"
225 "cmp x1, x2\n\t"
226 "b.ne 1f\n\t"
227
Andreas Gampef39b3782014-06-03 14:38:30 -0700228 "mov x9, #0\n\t" // Use x9 as flag, in clobber list
Andreas Gampe6cf80102014-05-19 11:32:41 -0700229
230 // Finish up.
231 "2:\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700232 "ldp x0, x1, [sp]\n\t" // Restore stuff not named clobbered, may contain fpr_result
233 "ldp x2, x3, [sp, #16]\n\t"
234 "ldp x4, x5, [sp, #32]\n\t"
235 "ldp x6, x7, [sp, #48]\n\t"
236 "add sp, sp, #64\n\t" // Free stack space, now sp as on entry
Andreas Gampe6cf80102014-05-19 11:32:41 -0700237 ".cfi_adjust_cfa_offset -64\n\t"
238
Andreas Gampef39b3782014-06-03 14:38:30 -0700239 "str x9, %[fpr_result]\n\t" // Store the FPR comparison result
240 "mov %[result], x8\n\t" // Store the call result
241
Andreas Gampe6cf80102014-05-19 11:32:41 -0700242 "b 3f\n\t" // Goto end
243
244 // Failed fpr verification.
245 "1:\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700246 "mov x9, #1\n\t"
Andreas Gampe6cf80102014-05-19 11:32:41 -0700247 "b 2b\n\t" // Goto finish-up
248
249 // End
250 "3:\n\t"
Andreas Gampecf4035a2014-05-28 22:43:01 -0700251 : [result] "=r" (result)
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700252 // Use the result from r0
253 : [arg0] "0"(arg0), [arg1] "r"(arg1), [arg2] "r"(arg2), [code] "r"(code), [self] "r"(self),
Andreas Gampecf4035a2014-05-28 22:43:01 -0700254 [referrer] "r"(referrer), [fpr_result] "m" (fpr_result)
Andreas Gampef39b3782014-06-03 14:38:30 -0700255 : "x8", "x9", "x10", "x11", "x12", "x13", "x14", "x15", "x16", "x17", "x18", "x19", "x20",
256 "x21", "x22", "x23", "x24", "x25", "x26", "x27", "x28", "x30",
257 "d0", "d1", "d2", "d3", "d4", "d5", "d6", "d7",
258 "d8", "d9", "d10", "d11", "d12", "d13", "d14", "d15",
259 "d16", "d17", "d18", "d19", "d20", "d21", "d22", "d23",
Andreas Gampe2f6e3512014-06-07 01:32:33 -0700260 "d24", "d25", "d26", "d27", "d28", "d29", "d30", "d31",
261 "memory"); // clobber.
Ian Rogersc3ccc102014-06-25 11:52:14 -0700262#elif defined(__x86_64__) && !defined(__APPLE__)
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700263 // Note: Uses the native convention
264 // TODO: Set the thread?
265 __asm__ __volatile__(
266 "pushq %[referrer]\n\t" // Push referrer
267 "pushq (%%rsp)\n\t" // & 16B alignment padding
268 ".cfi_adjust_cfa_offset 16\n\t"
269 "call *%%rax\n\t" // Call the stub
270 "addq $16, %%rsp\n\t" // Pop nullptr and padding
271 ".cfi_adjust_cfa_offset -16\n\t"
272 : "=a" (result)
273 // Use the result from rax
Andreas Gampe2f6e3512014-06-07 01:32:33 -0700274 : "D"(arg0), "S"(arg1), "d"(arg2), "a"(code), [referrer] "m"(referrer)
275 // This places arg0 into rdi, arg1 into rsi, arg2 into rdx, and code into rax
276 : "rbx", "rcx", "rbp", "r8", "r9", "r10", "r11", "r12", "r13", "r14", "r15",
277 "memory"); // clobber all
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700278 // TODO: Should we clobber the other registers?
279#else
280 LOG(WARNING) << "Was asked to invoke for an architecture I do not understand.";
281 result = 0;
282#endif
283 // Pop transition.
284 self->PopManagedStackFragment(fragment);
Andreas Gampe6cf80102014-05-19 11:32:41 -0700285
286 fp_result = fpr_result;
287 EXPECT_EQ(0U, fp_result);
288
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700289 return result;
290 }
291
Andreas Gampe51f76352014-05-21 08:28:48 -0700292 // TODO: Set up a frame according to referrer's specs.
293 size_t Invoke3WithReferrerAndHidden(size_t arg0, size_t arg1, size_t arg2, uintptr_t code,
294 Thread* self, mirror::ArtMethod* referrer, size_t hidden) {
295 // Push a transition back into managed code onto the linked list in thread.
296 ManagedStack fragment;
297 self->PushManagedStackFragment(&fragment);
298
299 size_t result;
300 size_t fpr_result = 0;
301#if defined(__i386__)
302 // TODO: Set the thread?
303 __asm__ __volatile__(
304 "movd %[hidden], %%xmm0\n\t"
Ian Rogersc5f17732014-06-05 20:48:42 -0700305 "subl $12, %%esp\n\t" // Align stack.
Andreas Gampe51f76352014-05-21 08:28:48 -0700306 "pushl %[referrer]\n\t" // Store referrer
307 "call *%%edi\n\t" // Call the stub
Ian Rogersc5f17732014-06-05 20:48:42 -0700308 "addl $16, %%esp" // Pop referrer
Andreas Gampe51f76352014-05-21 08:28:48 -0700309 : "=a" (result)
310 // Use the result from eax
Andreas Gampe2f6e3512014-06-07 01:32:33 -0700311 : "a"(arg0), "c"(arg1), "d"(arg2), "D"(code), [referrer]"m"(referrer), [hidden]"r"(hidden)
312 // This places code into edi, arg0 into eax, arg1 into ecx, and arg2 into edx
313 : "memory"); // clobber.
Andreas Gampe51f76352014-05-21 08:28:48 -0700314 // TODO: Should we clobber the other registers? EBX gets clobbered by some of the stubs,
315 // but compilation fails when declaring that.
316#elif defined(__arm__)
317 __asm__ __volatile__(
318 "push {r1-r12, lr}\n\t" // Save state, 13*4B = 52B
319 ".cfi_adjust_cfa_offset 52\n\t"
320 "push {r9}\n\t"
321 ".cfi_adjust_cfa_offset 4\n\t"
322 "mov r9, %[referrer]\n\n"
323 "str r9, [sp, #-8]!\n\t" // Push referrer, +8B padding so 16B aligned
324 ".cfi_adjust_cfa_offset 8\n\t"
325 "ldr r9, [sp, #8]\n\t"
326
327 // Push everything on the stack, so we don't rely on the order. What a mess. :-(
328 "sub sp, sp, #24\n\t"
329 "str %[arg0], [sp]\n\t"
330 "str %[arg1], [sp, #4]\n\t"
331 "str %[arg2], [sp, #8]\n\t"
332 "str %[code], [sp, #12]\n\t"
333 "str %[self], [sp, #16]\n\t"
334 "str %[hidden], [sp, #20]\n\t"
335 "ldr r0, [sp]\n\t"
336 "ldr r1, [sp, #4]\n\t"
337 "ldr r2, [sp, #8]\n\t"
338 "ldr r3, [sp, #12]\n\t"
339 "ldr r9, [sp, #16]\n\t"
340 "ldr r12, [sp, #20]\n\t"
341 "add sp, sp, #24\n\t"
342
343 "blx r3\n\t" // Call the stub
344 "add sp, sp, #12\n\t" // Pop nullptr and padding
345 ".cfi_adjust_cfa_offset -12\n\t"
346 "pop {r1-r12, lr}\n\t" // Restore state
347 ".cfi_adjust_cfa_offset -52\n\t"
348 "mov %[result], r0\n\t" // Save the result
349 : [result] "=r" (result)
350 // Use the result from r0
Andreas Gampe2f6e3512014-06-07 01:32:33 -0700351 : [arg0] "r"(arg0), [arg1] "r"(arg1), [arg2] "r"(arg2), [code] "r"(code), [self] "r"(self),
352 [referrer] "r"(referrer), [hidden] "r"(hidden)
353 : "memory"); // clobber.
Andreas Gampe51f76352014-05-21 08:28:48 -0700354#elif defined(__aarch64__)
355 __asm__ __volatile__(
Andreas Gampef39b3782014-06-03 14:38:30 -0700356 // Spill x0-x7 which we say we don't clobber. May contain args.
Andreas Gampe51f76352014-05-21 08:28:48 -0700357 "sub sp, sp, #64\n\t"
358 ".cfi_adjust_cfa_offset 64\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700359 "stp x0, x1, [sp]\n\t"
360 "stp x2, x3, [sp, #16]\n\t"
361 "stp x4, x5, [sp, #32]\n\t"
362 "stp x6, x7, [sp, #48]\n\t"
Andreas Gampe51f76352014-05-21 08:28:48 -0700363
Andreas Gampef39b3782014-06-03 14:38:30 -0700364 "sub sp, sp, #16\n\t" // Reserve stack space, 16B aligned
365 ".cfi_adjust_cfa_offset 16\n\t"
366 "str %[referrer], [sp]\n\t" // referrer
Andreas Gampe51f76352014-05-21 08:28:48 -0700367
368 // Push everything on the stack, so we don't rely on the order. What a mess. :-(
369 "sub sp, sp, #48\n\t"
370 ".cfi_adjust_cfa_offset 48\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700371 // All things are "r" constraints, so direct str/stp should work.
372 "stp %[arg0], %[arg1], [sp]\n\t"
373 "stp %[arg2], %[code], [sp, #16]\n\t"
374 "stp %[self], %[hidden], [sp, #32]\n\t"
Andreas Gampe51f76352014-05-21 08:28:48 -0700375
376 // Now we definitely have x0-x3 free, use it to garble d8 - d15
377 "movk x0, #0xfad0\n\t"
378 "movk x0, #0xebad, lsl #16\n\t"
379 "movk x0, #0xfad0, lsl #32\n\t"
380 "movk x0, #0xebad, lsl #48\n\t"
381 "fmov d8, x0\n\t"
382 "add x0, x0, 1\n\t"
383 "fmov d9, x0\n\t"
384 "add x0, x0, 1\n\t"
385 "fmov d10, x0\n\t"
386 "add x0, x0, 1\n\t"
387 "fmov d11, x0\n\t"
388 "add x0, x0, 1\n\t"
389 "fmov d12, x0\n\t"
390 "add x0, x0, 1\n\t"
391 "fmov d13, x0\n\t"
392 "add x0, x0, 1\n\t"
393 "fmov d14, x0\n\t"
394 "add x0, x0, 1\n\t"
395 "fmov d15, x0\n\t"
396
Andreas Gampef39b3782014-06-03 14:38:30 -0700397 // Load call params into the right registers.
398 "ldp x0, x1, [sp]\n\t"
399 "ldp x2, x3, [sp, #16]\n\t"
400 "ldp x18, x12, [sp, #32]\n\t"
Andreas Gampe51f76352014-05-21 08:28:48 -0700401 "add sp, sp, #48\n\t"
402 ".cfi_adjust_cfa_offset -48\n\t"
403
Andreas Gampe51f76352014-05-21 08:28:48 -0700404 "blr x3\n\t" // Call the stub
Andreas Gampef39b3782014-06-03 14:38:30 -0700405 "mov x8, x0\n\t" // Store result
406 "add sp, sp, #16\n\t" // Drop the quick "frame"
407 ".cfi_adjust_cfa_offset -16\n\t"
Andreas Gampe51f76352014-05-21 08:28:48 -0700408
409 // Test d8 - d15. We can use x1 and x2.
410 "movk x1, #0xfad0\n\t"
411 "movk x1, #0xebad, lsl #16\n\t"
412 "movk x1, #0xfad0, lsl #32\n\t"
413 "movk x1, #0xebad, lsl #48\n\t"
414 "fmov x2, d8\n\t"
415 "cmp x1, x2\n\t"
416 "b.ne 1f\n\t"
417 "add x1, x1, 1\n\t"
418
419 "fmov x2, d9\n\t"
420 "cmp x1, x2\n\t"
421 "b.ne 1f\n\t"
422 "add x1, x1, 1\n\t"
423
424 "fmov x2, d10\n\t"
425 "cmp x1, x2\n\t"
426 "b.ne 1f\n\t"
427 "add x1, x1, 1\n\t"
428
429 "fmov x2, d11\n\t"
430 "cmp x1, x2\n\t"
431 "b.ne 1f\n\t"
432 "add x1, x1, 1\n\t"
433
434 "fmov x2, d12\n\t"
435 "cmp x1, x2\n\t"
436 "b.ne 1f\n\t"
437 "add x1, x1, 1\n\t"
438
439 "fmov x2, d13\n\t"
440 "cmp x1, x2\n\t"
441 "b.ne 1f\n\t"
442 "add x1, x1, 1\n\t"
443
444 "fmov x2, d14\n\t"
445 "cmp x1, x2\n\t"
446 "b.ne 1f\n\t"
447 "add x1, x1, 1\n\t"
448
449 "fmov x2, d15\n\t"
450 "cmp x1, x2\n\t"
451 "b.ne 1f\n\t"
452
Andreas Gampef39b3782014-06-03 14:38:30 -0700453 "mov x9, #0\n\t" // Use x9 as flag, in clobber list
Andreas Gampe51f76352014-05-21 08:28:48 -0700454
455 // Finish up.
456 "2:\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700457 "ldp x0, x1, [sp]\n\t" // Restore stuff not named clobbered, may contain fpr_result
458 "ldp x2, x3, [sp, #16]\n\t"
459 "ldp x4, x5, [sp, #32]\n\t"
460 "ldp x6, x7, [sp, #48]\n\t"
461 "add sp, sp, #64\n\t" // Free stack space, now sp as on entry
Andreas Gampe51f76352014-05-21 08:28:48 -0700462 ".cfi_adjust_cfa_offset -64\n\t"
463
Andreas Gampef39b3782014-06-03 14:38:30 -0700464 "str x9, %[fpr_result]\n\t" // Store the FPR comparison result
465 "mov %[result], x8\n\t" // Store the call result
466
Andreas Gampe51f76352014-05-21 08:28:48 -0700467 "b 3f\n\t" // Goto end
468
469 // Failed fpr verification.
470 "1:\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700471 "mov x9, #1\n\t"
Andreas Gampe51f76352014-05-21 08:28:48 -0700472 "b 2b\n\t" // Goto finish-up
473
474 // End
475 "3:\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700476 : [result] "=r" (result)
477 // Use the result from r0
Andreas Gampe51f76352014-05-21 08:28:48 -0700478 : [arg0] "0"(arg0), [arg1] "r"(arg1), [arg2] "r"(arg2), [code] "r"(code), [self] "r"(self),
Andreas Gampef39b3782014-06-03 14:38:30 -0700479 [referrer] "r"(referrer), [hidden] "r"(hidden), [fpr_result] "m" (fpr_result)
480 : "x8", "x9", "x10", "x11", "x12", "x13", "x14", "x15", "x16", "x17", "x18", "x19", "x20",
481 "x21", "x22", "x23", "x24", "x25", "x26", "x27", "x28", "x30",
482 "d0", "d1", "d2", "d3", "d4", "d5", "d6", "d7",
483 "d8", "d9", "d10", "d11", "d12", "d13", "d14", "d15",
484 "d16", "d17", "d18", "d19", "d20", "d21", "d22", "d23",
Andreas Gampe2f6e3512014-06-07 01:32:33 -0700485 "d24", "d25", "d26", "d27", "d28", "d29", "d30", "d31",
486 "memory"); // clobber.
Ian Rogersc3ccc102014-06-25 11:52:14 -0700487#elif defined(__x86_64__) && !defined(__APPLE__)
Andreas Gampe51f76352014-05-21 08:28:48 -0700488 // Note: Uses the native convention
489 // TODO: Set the thread?
490 __asm__ __volatile__(
491 "movq %[hidden], %%r9\n\t" // No need to save r9, listed as clobbered
492 "movd %%r9, %%xmm0\n\t"
493 "pushq %[referrer]\n\t" // Push referrer
494 "pushq (%%rsp)\n\t" // & 16B alignment padding
495 ".cfi_adjust_cfa_offset 16\n\t"
496 "call *%%rax\n\t" // Call the stub
497 "addq $16, %%rsp\n\t" // Pop nullptr and padding
498 ".cfi_adjust_cfa_offset -16\n\t"
499 : "=a" (result)
500 // Use the result from rax
501 : "D"(arg0), "S"(arg1), "d"(arg2), "a"(code), [referrer] "m"(referrer), [hidden] "m"(hidden)
502 // This places arg0 into rdi, arg1 into rsi, arg2 into rdx, and code into rax
Andreas Gampe2f6e3512014-06-07 01:32:33 -0700503 : "rbx", "rcx", "rbp", "r8", "r9", "r10", "r11", "r12", "r13", "r14", "r15",
504 "memory"); // clobber all
Andreas Gampe51f76352014-05-21 08:28:48 -0700505 // TODO: Should we clobber the other registers?
506#else
507 LOG(WARNING) << "Was asked to invoke for an architecture I do not understand.";
508 result = 0;
509#endif
510 // Pop transition.
511 self->PopManagedStackFragment(fragment);
512
513 fp_result = fpr_result;
514 EXPECT_EQ(0U, fp_result);
515
516 return result;
517 }
518
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700519 // Method with 32b arg0, 64b arg1
520 size_t Invoke3UWithReferrer(size_t arg0, uint64_t arg1, uintptr_t code, Thread* self,
521 mirror::ArtMethod* referrer) {
Ian Rogersc3ccc102014-06-25 11:52:14 -0700522#if (defined(__x86_64__) && !defined(__APPLE__)) || defined(__aarch64__)
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700523 // Just pass through.
524 return Invoke3WithReferrer(arg0, arg1, 0U, code, self, referrer);
525#else
526 // Need to split up arguments.
527 uint32_t lower = static_cast<uint32_t>(arg1 & 0xFFFFFFFF);
528 uint32_t upper = static_cast<uint32_t>((arg1 >> 32) & 0xFFFFFFFF);
529
530 return Invoke3WithReferrer(arg0, lower, upper, code, self, referrer);
531#endif
532 }
533
534 // Method with 32b arg0, 32b arg1, 64b arg2
535 size_t Invoke3UUWithReferrer(uint32_t arg0, uint32_t arg1, uint64_t arg2, uintptr_t code,
536 Thread* self, mirror::ArtMethod* referrer) {
Ian Rogersc3ccc102014-06-25 11:52:14 -0700537#if (defined(__x86_64__) && !defined(__APPLE__)) || defined(__aarch64__)
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700538 // Just pass through.
539 return Invoke3WithReferrer(arg0, arg1, arg2, code, self, referrer);
540#else
541 // TODO: Needs 4-param invoke.
542 return 0;
543#endif
544 }
Andreas Gampe6cf80102014-05-19 11:32:41 -0700545
546 protected:
547 size_t fp_result;
Andreas Gampe525cde22014-04-22 15:44:50 -0700548};
549
550
Ian Rogersc3ccc102014-06-25 11:52:14 -0700551#if defined(__i386__) || (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe525cde22014-04-22 15:44:50 -0700552extern "C" void art_quick_memcpy(void);
553#endif
554
555TEST_F(StubTest, Memcpy) {
Ian Rogersc3ccc102014-06-25 11:52:14 -0700556#if defined(__i386__) || (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe525cde22014-04-22 15:44:50 -0700557 Thread* self = Thread::Current();
558
559 uint32_t orig[20];
560 uint32_t trg[20];
561 for (size_t i = 0; i < 20; ++i) {
562 orig[i] = i;
563 trg[i] = 0;
564 }
565
566 Invoke3(reinterpret_cast<size_t>(&trg[4]), reinterpret_cast<size_t>(&orig[4]),
567 10 * sizeof(uint32_t), reinterpret_cast<uintptr_t>(&art_quick_memcpy), self);
568
569 EXPECT_EQ(orig[0], trg[0]);
570
571 for (size_t i = 1; i < 4; ++i) {
572 EXPECT_NE(orig[i], trg[i]);
573 }
574
575 for (size_t i = 4; i < 14; ++i) {
576 EXPECT_EQ(orig[i], trg[i]);
577 }
578
579 for (size_t i = 14; i < 20; ++i) {
580 EXPECT_NE(orig[i], trg[i]);
581 }
582
583 // TODO: Test overlapping?
584
585#else
586 LOG(INFO) << "Skipping memcpy as I don't know how to do that on " << kRuntimeISA;
587 // Force-print to std::cout so it's also outside the logcat.
588 std::cout << "Skipping memcpy as I don't know how to do that on " << kRuntimeISA << std::endl;
589#endif
590}
591
Ian Rogersc3ccc102014-06-25 11:52:14 -0700592#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe525cde22014-04-22 15:44:50 -0700593extern "C" void art_quick_lock_object(void);
594#endif
595
596TEST_F(StubTest, LockObject) {
Ian Rogersc3ccc102014-06-25 11:52:14 -0700597#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -0700598 static constexpr size_t kThinLockLoops = 100;
599
Andreas Gampe525cde22014-04-22 15:44:50 -0700600 Thread* self = Thread::Current();
601 // Create an object
602 ScopedObjectAccess soa(self);
603 // garbage is created during ClassLinker::Init
604
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700605 StackHandleScope<2> hs(soa.Self());
606 Handle<mirror::String> obj(
607 hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), "hello, world!")));
Andreas Gampe525cde22014-04-22 15:44:50 -0700608 LockWord lock = obj->GetLockWord(false);
609 LockWord::LockState old_state = lock.GetState();
610 EXPECT_EQ(LockWord::LockState::kUnlocked, old_state);
611
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700612 Invoke3(reinterpret_cast<size_t>(obj.Get()), 0U, 0U,
Andreas Gampe525cde22014-04-22 15:44:50 -0700613 reinterpret_cast<uintptr_t>(&art_quick_lock_object), self);
614
615 LockWord lock_after = obj->GetLockWord(false);
616 LockWord::LockState new_state = lock_after.GetState();
617 EXPECT_EQ(LockWord::LockState::kThinLocked, new_state);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700618 EXPECT_EQ(lock_after.ThinLockCount(), 0U); // Thin lock starts count at zero
619
620 for (size_t i = 1; i < kThinLockLoops; ++i) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700621 Invoke3(reinterpret_cast<size_t>(obj.Get()), 0U, 0U,
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700622 reinterpret_cast<uintptr_t>(&art_quick_lock_object), self);
623
624 // Check we're at lock count i
625
626 LockWord l_inc = obj->GetLockWord(false);
627 LockWord::LockState l_inc_state = l_inc.GetState();
628 EXPECT_EQ(LockWord::LockState::kThinLocked, l_inc_state);
629 EXPECT_EQ(l_inc.ThinLockCount(), i);
630 }
631
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700632 // Force a fat lock by running identity hashcode to fill up lock word.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700633 Handle<mirror::String> obj2(hs.NewHandle(
634 mirror::String::AllocFromModifiedUtf8(soa.Self(), "hello, world!")));
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700635
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700636 obj2->IdentityHashCode();
637
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700638 Invoke3(reinterpret_cast<size_t>(obj2.Get()), 0U, 0U,
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700639 reinterpret_cast<uintptr_t>(&art_quick_lock_object), self);
640
641 LockWord lock_after2 = obj2->GetLockWord(false);
642 LockWord::LockState new_state2 = lock_after2.GetState();
643 EXPECT_EQ(LockWord::LockState::kFatLocked, new_state2);
644 EXPECT_NE(lock_after2.FatLockMonitor(), static_cast<Monitor*>(nullptr));
645
646 // Test done.
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700647#else
648 LOG(INFO) << "Skipping lock_object as I don't know how to do that on " << kRuntimeISA;
649 // Force-print to std::cout so it's also outside the logcat.
650 std::cout << "Skipping lock_object as I don't know how to do that on " << kRuntimeISA << std::endl;
651#endif
652}
653
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700654
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700655class RandGen {
656 public:
657 explicit RandGen(uint32_t seed) : val_(seed) {}
658
659 uint32_t next() {
660 val_ = val_ * 48271 % 2147483647 + 13;
661 return val_;
662 }
663
664 uint32_t val_;
665};
666
667
Ian Rogersc3ccc102014-06-25 11:52:14 -0700668#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700669extern "C" void art_quick_lock_object(void);
670extern "C" void art_quick_unlock_object(void);
671#endif
672
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700673// NO_THREAD_SAFETY_ANALYSIS as we do not want to grab exclusive mutator lock for MonitorInfo.
674static void TestUnlockObject(StubTest* test) NO_THREAD_SAFETY_ANALYSIS {
Ian Rogersc3ccc102014-06-25 11:52:14 -0700675#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -0700676 static constexpr size_t kThinLockLoops = 100;
677
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700678 Thread* self = Thread::Current();
679 // Create an object
680 ScopedObjectAccess soa(self);
681 // garbage is created during ClassLinker::Init
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700682 static constexpr size_t kNumberOfLocks = 10; // Number of objects = lock
683 StackHandleScope<kNumberOfLocks + 1> hs(self);
684 Handle<mirror::String> obj(
685 hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), "hello, world!")));
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700686 LockWord lock = obj->GetLockWord(false);
687 LockWord::LockState old_state = lock.GetState();
688 EXPECT_EQ(LockWord::LockState::kUnlocked, old_state);
689
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700690 test->Invoke3(reinterpret_cast<size_t>(obj.Get()), 0U, 0U,
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700691 reinterpret_cast<uintptr_t>(&art_quick_unlock_object), self);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700692 // This should be an illegal monitor state.
693 EXPECT_TRUE(self->IsExceptionPending());
694 self->ClearException();
695
696 LockWord lock_after = obj->GetLockWord(false);
697 LockWord::LockState new_state = lock_after.GetState();
698 EXPECT_EQ(LockWord::LockState::kUnlocked, new_state);
Andreas Gampe525cde22014-04-22 15:44:50 -0700699
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700700 test->Invoke3(reinterpret_cast<size_t>(obj.Get()), 0U, 0U,
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700701 reinterpret_cast<uintptr_t>(&art_quick_lock_object), self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700702
703 LockWord lock_after2 = obj->GetLockWord(false);
704 LockWord::LockState new_state2 = lock_after2.GetState();
705 EXPECT_EQ(LockWord::LockState::kThinLocked, new_state2);
706
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700707 test->Invoke3(reinterpret_cast<size_t>(obj.Get()), 0U, 0U,
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700708 reinterpret_cast<uintptr_t>(&art_quick_unlock_object), self);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700709
710 LockWord lock_after3 = obj->GetLockWord(false);
711 LockWord::LockState new_state3 = lock_after3.GetState();
712 EXPECT_EQ(LockWord::LockState::kUnlocked, new_state3);
713
714 // Stress test:
715 // Keep a number of objects and their locks in flight. Randomly lock or unlock one of them in
716 // each step.
717
718 RandGen r(0x1234);
719
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700720 constexpr size_t kIterations = 10000; // Number of iterations
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700721 constexpr size_t kMoveToFat = 1000; // Chance of 1:kMoveFat to make a lock fat.
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700722
723 size_t counts[kNumberOfLocks];
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700724 bool fat[kNumberOfLocks]; // Whether a lock should be thin or fat.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700725 Handle<mirror::String> objects[kNumberOfLocks];
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700726
727 // Initialize = allocate.
728 for (size_t i = 0; i < kNumberOfLocks; ++i) {
729 counts[i] = 0;
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700730 fat[i] = false;
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700731 objects[i] = hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), ""));
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700732 }
733
734 for (size_t i = 0; i < kIterations; ++i) {
735 // Select which lock to update.
736 size_t index = r.next() % kNumberOfLocks;
737
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700738 // Make lock fat?
739 if (!fat[index] && (r.next() % kMoveToFat == 0)) {
740 fat[index] = true;
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700741 objects[index]->IdentityHashCode();
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700742
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700743 LockWord lock_iter = objects[index]->GetLockWord(false);
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700744 LockWord::LockState iter_state = lock_iter.GetState();
745 if (counts[index] == 0) {
746 EXPECT_EQ(LockWord::LockState::kHashCode, iter_state);
747 } else {
748 EXPECT_EQ(LockWord::LockState::kFatLocked, iter_state);
749 }
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700750 } else {
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700751 bool lock; // Whether to lock or unlock in this step.
752 if (counts[index] == 0) {
753 lock = true;
754 } else if (counts[index] == kThinLockLoops) {
755 lock = false;
756 } else {
757 // Randomly.
758 lock = r.next() % 2 == 0;
759 }
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700760
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700761 if (lock) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700762 test->Invoke3(reinterpret_cast<size_t>(objects[index].Get()), 0U, 0U,
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700763 reinterpret_cast<uintptr_t>(&art_quick_lock_object), self);
764 counts[index]++;
765 } else {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700766 test->Invoke3(reinterpret_cast<size_t>(objects[index].Get()), 0U, 0U,
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700767 reinterpret_cast<uintptr_t>(&art_quick_unlock_object), self);
768 counts[index]--;
769 }
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700770
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700771 EXPECT_FALSE(self->IsExceptionPending());
772
773 // Check the new state.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700774 LockWord lock_iter = objects[index]->GetLockWord(true);
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700775 LockWord::LockState iter_state = lock_iter.GetState();
776 if (fat[index]) {
777 // Abuse MonitorInfo.
778 EXPECT_EQ(LockWord::LockState::kFatLocked, iter_state) << index;
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700779 MonitorInfo info(objects[index].Get());
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700780 EXPECT_EQ(counts[index], info.entry_count_) << index;
781 } else {
782 if (counts[index] > 0) {
783 EXPECT_EQ(LockWord::LockState::kThinLocked, iter_state);
784 EXPECT_EQ(counts[index] - 1, lock_iter.ThinLockCount());
785 } else {
786 EXPECT_EQ(LockWord::LockState::kUnlocked, iter_state);
787 }
788 }
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700789 }
790 }
791
792 // Unlock the remaining count times and then check it's unlocked. Then deallocate.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700793 // Go reverse order to correctly handle Handles.
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700794 for (size_t i = 0; i < kNumberOfLocks; ++i) {
795 size_t index = kNumberOfLocks - 1 - i;
796 size_t count = counts[index];
797 while (count > 0) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700798 test->Invoke3(reinterpret_cast<size_t>(objects[index].Get()), 0U, 0U,
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700799 reinterpret_cast<uintptr_t>(&art_quick_unlock_object), self);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700800 count--;
801 }
802
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700803 LockWord lock_after4 = objects[index]->GetLockWord(false);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700804 LockWord::LockState new_state4 = lock_after4.GetState();
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700805 EXPECT_TRUE(LockWord::LockState::kUnlocked == new_state4
806 || LockWord::LockState::kFatLocked == new_state4);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700807 }
808
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700809 // Test done.
Andreas Gampe525cde22014-04-22 15:44:50 -0700810#else
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700811 LOG(INFO) << "Skipping unlock_object as I don't know how to do that on " << kRuntimeISA;
Andreas Gampe525cde22014-04-22 15:44:50 -0700812 // Force-print to std::cout so it's also outside the logcat.
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700813 std::cout << "Skipping unlock_object as I don't know how to do that on " << kRuntimeISA << std::endl;
Andreas Gampe525cde22014-04-22 15:44:50 -0700814#endif
815}
816
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700817TEST_F(StubTest, UnlockObject) {
818 TestUnlockObject(this);
819}
Andreas Gampe525cde22014-04-22 15:44:50 -0700820
Ian Rogersc3ccc102014-06-25 11:52:14 -0700821#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe525cde22014-04-22 15:44:50 -0700822extern "C" void art_quick_check_cast(void);
823#endif
824
825TEST_F(StubTest, CheckCast) {
Ian Rogersc3ccc102014-06-25 11:52:14 -0700826#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe525cde22014-04-22 15:44:50 -0700827 Thread* self = Thread::Current();
828 // Find some classes.
829 ScopedObjectAccess soa(self);
830 // garbage is created during ClassLinker::Init
831
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700832 StackHandleScope<2> hs(soa.Self());
833 Handle<mirror::Class> c(
834 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "[Ljava/lang/Object;")));
835 Handle<mirror::Class> c2(
836 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "[Ljava/lang/String;")));
Andreas Gampe525cde22014-04-22 15:44:50 -0700837
838 EXPECT_FALSE(self->IsExceptionPending());
839
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700840 Invoke3(reinterpret_cast<size_t>(c.Get()), reinterpret_cast<size_t>(c.Get()), 0U,
Andreas Gampe525cde22014-04-22 15:44:50 -0700841 reinterpret_cast<uintptr_t>(&art_quick_check_cast), self);
842
843 EXPECT_FALSE(self->IsExceptionPending());
844
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700845 Invoke3(reinterpret_cast<size_t>(c2.Get()), reinterpret_cast<size_t>(c2.Get()), 0U,
Andreas Gampe525cde22014-04-22 15:44:50 -0700846 reinterpret_cast<uintptr_t>(&art_quick_check_cast), self);
847
848 EXPECT_FALSE(self->IsExceptionPending());
849
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700850 Invoke3(reinterpret_cast<size_t>(c.Get()), reinterpret_cast<size_t>(c2.Get()), 0U,
Andreas Gampe525cde22014-04-22 15:44:50 -0700851 reinterpret_cast<uintptr_t>(&art_quick_check_cast), self);
852
853 EXPECT_FALSE(self->IsExceptionPending());
854
855 // TODO: Make the following work. But that would require correct managed frames.
856
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700857 Invoke3(reinterpret_cast<size_t>(c2.Get()), reinterpret_cast<size_t>(c.Get()), 0U,
Andreas Gampe525cde22014-04-22 15:44:50 -0700858 reinterpret_cast<uintptr_t>(&art_quick_check_cast), self);
859
860 EXPECT_TRUE(self->IsExceptionPending());
861 self->ClearException();
862
863#else
864 LOG(INFO) << "Skipping check_cast as I don't know how to do that on " << kRuntimeISA;
865 // Force-print to std::cout so it's also outside the logcat.
866 std::cout << "Skipping check_cast as I don't know how to do that on " << kRuntimeISA << std::endl;
867#endif
868}
869
870
Ian Rogersc3ccc102014-06-25 11:52:14 -0700871#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe525cde22014-04-22 15:44:50 -0700872extern "C" void art_quick_aput_obj_with_null_and_bound_check(void);
873// Do not check non-checked ones, we'd need handlers and stuff...
874#endif
875
876TEST_F(StubTest, APutObj) {
Hiroshi Yamauchid6881ae2014-04-28 17:21:48 -0700877 TEST_DISABLED_FOR_HEAP_REFERENCE_POISONING();
878
Ian Rogersc3ccc102014-06-25 11:52:14 -0700879#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe525cde22014-04-22 15:44:50 -0700880 Thread* self = Thread::Current();
881 // Create an object
882 ScopedObjectAccess soa(self);
883 // garbage is created during ClassLinker::Init
884
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700885 StackHandleScope<5> hs(soa.Self());
886 Handle<mirror::Class> c(
887 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "Ljava/lang/Object;")));
888 Handle<mirror::Class> ca(
889 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "[Ljava/lang/String;")));
Andreas Gampe525cde22014-04-22 15:44:50 -0700890
891 // Build a string array of size 1
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700892 Handle<mirror::ObjectArray<mirror::Object>> array(
893 hs.NewHandle(mirror::ObjectArray<mirror::Object>::Alloc(soa.Self(), ca.Get(), 10)));
Andreas Gampe525cde22014-04-22 15:44:50 -0700894
895 // Build a string -> should be assignable
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700896 Handle<mirror::String> str_obj(
897 hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), "hello, world!")));
Andreas Gampe525cde22014-04-22 15:44:50 -0700898
899 // Build a generic object -> should fail assigning
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700900 Handle<mirror::Object> obj_obj(hs.NewHandle(c->AllocObject(soa.Self())));
Andreas Gampe525cde22014-04-22 15:44:50 -0700901
902 // Play with it...
903
904 // 1) Success cases
Andreas Gampef4e910b2014-04-29 16:55:52 -0700905 // 1.1) Assign str_obj to array[0..3]
Andreas Gampe525cde22014-04-22 15:44:50 -0700906
907 EXPECT_FALSE(self->IsExceptionPending());
908
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700909 Invoke3(reinterpret_cast<size_t>(array.Get()), 0U, reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampe525cde22014-04-22 15:44:50 -0700910 reinterpret_cast<uintptr_t>(&art_quick_aput_obj_with_null_and_bound_check), self);
911
912 EXPECT_FALSE(self->IsExceptionPending());
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700913 EXPECT_EQ(str_obj.Get(), array->Get(0));
Andreas Gampe525cde22014-04-22 15:44:50 -0700914
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700915 Invoke3(reinterpret_cast<size_t>(array.Get()), 1U, reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampef4e910b2014-04-29 16:55:52 -0700916 reinterpret_cast<uintptr_t>(&art_quick_aput_obj_with_null_and_bound_check), self);
917
918 EXPECT_FALSE(self->IsExceptionPending());
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700919 EXPECT_EQ(str_obj.Get(), array->Get(1));
Andreas Gampef4e910b2014-04-29 16:55:52 -0700920
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700921 Invoke3(reinterpret_cast<size_t>(array.Get()), 2U, reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampef4e910b2014-04-29 16:55:52 -0700922 reinterpret_cast<uintptr_t>(&art_quick_aput_obj_with_null_and_bound_check), self);
923
924 EXPECT_FALSE(self->IsExceptionPending());
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700925 EXPECT_EQ(str_obj.Get(), array->Get(2));
Andreas Gampef4e910b2014-04-29 16:55:52 -0700926
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700927 Invoke3(reinterpret_cast<size_t>(array.Get()), 3U, reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampef4e910b2014-04-29 16:55:52 -0700928 reinterpret_cast<uintptr_t>(&art_quick_aput_obj_with_null_and_bound_check), self);
929
930 EXPECT_FALSE(self->IsExceptionPending());
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700931 EXPECT_EQ(str_obj.Get(), array->Get(3));
Andreas Gampef4e910b2014-04-29 16:55:52 -0700932
933 // 1.2) Assign null to array[0..3]
Andreas Gampe525cde22014-04-22 15:44:50 -0700934
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700935 Invoke3(reinterpret_cast<size_t>(array.Get()), 0U, reinterpret_cast<size_t>(nullptr),
Andreas Gampe525cde22014-04-22 15:44:50 -0700936 reinterpret_cast<uintptr_t>(&art_quick_aput_obj_with_null_and_bound_check), self);
937
938 EXPECT_FALSE(self->IsExceptionPending());
Andreas Gampef4e910b2014-04-29 16:55:52 -0700939 EXPECT_EQ(nullptr, array->Get(0));
940
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700941 Invoke3(reinterpret_cast<size_t>(array.Get()), 1U, reinterpret_cast<size_t>(nullptr),
Andreas Gampef4e910b2014-04-29 16:55:52 -0700942 reinterpret_cast<uintptr_t>(&art_quick_aput_obj_with_null_and_bound_check), self);
943
944 EXPECT_FALSE(self->IsExceptionPending());
945 EXPECT_EQ(nullptr, array->Get(1));
946
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700947 Invoke3(reinterpret_cast<size_t>(array.Get()), 2U, reinterpret_cast<size_t>(nullptr),
Andreas Gampef4e910b2014-04-29 16:55:52 -0700948 reinterpret_cast<uintptr_t>(&art_quick_aput_obj_with_null_and_bound_check), self);
949
950 EXPECT_FALSE(self->IsExceptionPending());
951 EXPECT_EQ(nullptr, array->Get(2));
952
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700953 Invoke3(reinterpret_cast<size_t>(array.Get()), 3U, reinterpret_cast<size_t>(nullptr),
Andreas Gampef4e910b2014-04-29 16:55:52 -0700954 reinterpret_cast<uintptr_t>(&art_quick_aput_obj_with_null_and_bound_check), self);
955
956 EXPECT_FALSE(self->IsExceptionPending());
957 EXPECT_EQ(nullptr, array->Get(3));
Andreas Gampe525cde22014-04-22 15:44:50 -0700958
959 // TODO: Check _which_ exception is thrown. Then make 3) check that it's the right check order.
960
961 // 2) Failure cases (str into str[])
962 // 2.1) Array = null
963 // TODO: Throwing NPE needs actual DEX code
964
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700965// Invoke3(reinterpret_cast<size_t>(nullptr), 0U, reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampe525cde22014-04-22 15:44:50 -0700966// reinterpret_cast<uintptr_t>(&art_quick_aput_obj_with_null_and_bound_check), self);
967//
968// EXPECT_TRUE(self->IsExceptionPending());
969// self->ClearException();
970
971 // 2.2) Index < 0
972
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700973 Invoke3(reinterpret_cast<size_t>(array.Get()), static_cast<size_t>(-1),
974 reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampe525cde22014-04-22 15:44:50 -0700975 reinterpret_cast<uintptr_t>(&art_quick_aput_obj_with_null_and_bound_check), self);
976
977 EXPECT_TRUE(self->IsExceptionPending());
978 self->ClearException();
979
980 // 2.3) Index > 0
981
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700982 Invoke3(reinterpret_cast<size_t>(array.Get()), 10U, reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampe525cde22014-04-22 15:44:50 -0700983 reinterpret_cast<uintptr_t>(&art_quick_aput_obj_with_null_and_bound_check), self);
984
985 EXPECT_TRUE(self->IsExceptionPending());
986 self->ClearException();
987
988 // 3) Failure cases (obj into str[])
989
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700990 Invoke3(reinterpret_cast<size_t>(array.Get()), 0U, reinterpret_cast<size_t>(obj_obj.Get()),
Andreas Gampe525cde22014-04-22 15:44:50 -0700991 reinterpret_cast<uintptr_t>(&art_quick_aput_obj_with_null_and_bound_check), self);
992
993 EXPECT_TRUE(self->IsExceptionPending());
994 self->ClearException();
995
996 // Tests done.
997#else
998 LOG(INFO) << "Skipping aput_obj as I don't know how to do that on " << kRuntimeISA;
999 // Force-print to std::cout so it's also outside the logcat.
1000 std::cout << "Skipping aput_obj as I don't know how to do that on " << kRuntimeISA << std::endl;
1001#endif
1002}
1003
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001004TEST_F(StubTest, AllocObject) {
1005 TEST_DISABLED_FOR_HEAP_REFERENCE_POISONING();
1006
Ian Rogersc3ccc102014-06-25 11:52:14 -07001007#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001008 // TODO: Check the "Unresolved" allocation stubs
1009
1010 Thread* self = Thread::Current();
1011 // Create an object
1012 ScopedObjectAccess soa(self);
1013 // garbage is created during ClassLinker::Init
1014
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001015 StackHandleScope<2> hs(soa.Self());
1016 Handle<mirror::Class> c(
1017 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "Ljava/lang/Object;")));
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001018
1019 // Play with it...
1020
1021 EXPECT_FALSE(self->IsExceptionPending());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001022 {
1023 // Use an arbitrary method from c to use as referrer
1024 size_t result = Invoke3(static_cast<size_t>(c->GetDexTypeIndex()), // type_idx
1025 reinterpret_cast<size_t>(c->GetVirtualMethod(0)), // arbitrary
1026 0U,
Mathieu Chartier119c6bd2014-05-09 14:11:47 -07001027 reinterpret_cast<uintptr_t>(GetTlsPtr(self)->quick_entrypoints.pAllocObject),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001028 self);
1029
1030 EXPECT_FALSE(self->IsExceptionPending());
1031 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
1032 mirror::Object* obj = reinterpret_cast<mirror::Object*>(result);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001033 EXPECT_EQ(c.Get(), obj->GetClass());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001034 VerifyObject(obj);
1035 }
1036
1037 {
1038 // We can use nullptr in the second argument as we do not need a method here (not used in
1039 // resolved/initialized cases)
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001040 size_t result = Invoke3(reinterpret_cast<size_t>(c.Get()), reinterpret_cast<size_t>(nullptr), 0U,
Mathieu Chartier119c6bd2014-05-09 14:11:47 -07001041 reinterpret_cast<uintptr_t>(GetTlsPtr(self)->quick_entrypoints.pAllocObjectResolved),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001042 self);
1043
1044 EXPECT_FALSE(self->IsExceptionPending());
1045 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
1046 mirror::Object* obj = reinterpret_cast<mirror::Object*>(result);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001047 EXPECT_EQ(c.Get(), obj->GetClass());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001048 VerifyObject(obj);
1049 }
1050
1051 {
1052 // We can use nullptr in the second argument as we do not need a method here (not used in
1053 // resolved/initialized cases)
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001054 size_t result = Invoke3(reinterpret_cast<size_t>(c.Get()), reinterpret_cast<size_t>(nullptr), 0U,
Mathieu Chartier119c6bd2014-05-09 14:11:47 -07001055 reinterpret_cast<uintptr_t>(GetTlsPtr(self)->quick_entrypoints.pAllocObjectInitialized),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001056 self);
1057
1058 EXPECT_FALSE(self->IsExceptionPending());
1059 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
1060 mirror::Object* obj = reinterpret_cast<mirror::Object*>(result);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001061 EXPECT_EQ(c.Get(), obj->GetClass());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001062 VerifyObject(obj);
1063 }
1064
1065 // Failure tests.
1066
1067 // Out-of-memory.
1068 {
1069 Runtime::Current()->GetHeap()->SetIdealFootprint(1 * GB);
1070
1071 // Array helps to fill memory faster.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001072 Handle<mirror::Class> ca(
1073 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "[Ljava/lang/Object;")));
1074
1075 // Use arbitrary large amount for now.
1076 static const size_t kMaxHandles = 1000000;
Ian Rogers700a4022014-05-19 16:49:03 -07001077 std::unique_ptr<StackHandleScope<kMaxHandles>> hsp(new StackHandleScope<kMaxHandles>(self));
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001078
1079 std::vector<Handle<mirror::Object>> handles;
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001080 // Start allocating with 128K
1081 size_t length = 128 * KB / 4;
1082 while (length > 10) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001083 Handle<mirror::Object> h(hsp->NewHandle<mirror::Object>(
1084 mirror::ObjectArray<mirror::Object>::Alloc(soa.Self(), ca.Get(), length / 4)));
1085 if (self->IsExceptionPending() || h.Get() == nullptr) {
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001086 self->ClearException();
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001087
1088 // Try a smaller length
1089 length = length / 8;
1090 // Use at most half the reported free space.
1091 size_t mem = Runtime::Current()->GetHeap()->GetFreeMemory();
1092 if (length * 8 > mem) {
1093 length = mem / 8;
1094 }
1095 } else {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001096 handles.push_back(h);
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001097 }
1098 }
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001099 LOG(INFO) << "Used " << handles.size() << " arrays to fill space.";
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001100
1101 // Allocate simple objects till it fails.
1102 while (!self->IsExceptionPending()) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001103 Handle<mirror::Object> h = hsp->NewHandle(c->AllocObject(soa.Self()));
1104 if (!self->IsExceptionPending() && h.Get() != nullptr) {
1105 handles.push_back(h);
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001106 }
1107 }
1108 self->ClearException();
1109
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001110 size_t result = Invoke3(reinterpret_cast<size_t>(c.Get()), reinterpret_cast<size_t>(nullptr), 0U,
Mathieu Chartier119c6bd2014-05-09 14:11:47 -07001111 reinterpret_cast<uintptr_t>(GetTlsPtr(self)->quick_entrypoints.pAllocObjectInitialized),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001112 self);
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001113 EXPECT_TRUE(self->IsExceptionPending());
1114 self->ClearException();
1115 EXPECT_EQ(reinterpret_cast<size_t>(nullptr), result);
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001116 }
1117
1118 // Tests done.
1119#else
1120 LOG(INFO) << "Skipping alloc_object as I don't know how to do that on " << kRuntimeISA;
1121 // Force-print to std::cout so it's also outside the logcat.
1122 std::cout << "Skipping alloc_object as I don't know how to do that on " << kRuntimeISA << std::endl;
1123#endif
1124}
1125
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001126TEST_F(StubTest, AllocObjectArray) {
1127 TEST_DISABLED_FOR_HEAP_REFERENCE_POISONING();
1128
Ian Rogersc3ccc102014-06-25 11:52:14 -07001129#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001130 // TODO: Check the "Unresolved" allocation stubs
1131
1132 Thread* self = Thread::Current();
1133 // Create an object
1134 ScopedObjectAccess soa(self);
1135 // garbage is created during ClassLinker::Init
1136
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001137 StackHandleScope<2> hs(self);
1138 Handle<mirror::Class> c(
1139 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "[Ljava/lang/Object;")));
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001140
1141 // Needed to have a linked method.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001142 Handle<mirror::Class> c_obj(
1143 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "Ljava/lang/Object;")));
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001144
1145 // Play with it...
1146
1147 EXPECT_FALSE(self->IsExceptionPending());
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001148
1149 // For some reason this does not work, as the type_idx is artificial and outside what the
1150 // resolved types of c_obj allow...
1151
1152 if (false) {
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001153 // Use an arbitrary method from c to use as referrer
1154 size_t result = Invoke3(static_cast<size_t>(c->GetDexTypeIndex()), // type_idx
1155 reinterpret_cast<size_t>(c_obj->GetVirtualMethod(0)), // arbitrary
1156 10U,
Mathieu Chartier119c6bd2014-05-09 14:11:47 -07001157 reinterpret_cast<uintptr_t>(GetTlsPtr(self)->quick_entrypoints.pAllocArray),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001158 self);
1159
1160 EXPECT_FALSE(self->IsExceptionPending());
1161 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
1162 mirror::Array* obj = reinterpret_cast<mirror::Array*>(result);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001163 EXPECT_EQ(c.Get(), obj->GetClass());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001164 VerifyObject(obj);
1165 EXPECT_EQ(obj->GetLength(), 10);
1166 }
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001167
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001168 {
1169 // We can use nullptr in the second argument as we do not need a method here (not used in
1170 // resolved/initialized cases)
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001171 size_t result = Invoke3(reinterpret_cast<size_t>(c.Get()), reinterpret_cast<size_t>(nullptr), 10U,
Mathieu Chartier119c6bd2014-05-09 14:11:47 -07001172 reinterpret_cast<uintptr_t>(GetTlsPtr(self)->quick_entrypoints.pAllocArrayResolved),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001173 self);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001174 EXPECT_FALSE(self->IsExceptionPending()) << PrettyTypeOf(self->GetException(nullptr));
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001175 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
1176 mirror::Object* obj = reinterpret_cast<mirror::Object*>(result);
1177 EXPECT_TRUE(obj->IsArrayInstance());
1178 EXPECT_TRUE(obj->IsObjectArray());
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001179 EXPECT_EQ(c.Get(), obj->GetClass());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001180 VerifyObject(obj);
1181 mirror::Array* array = reinterpret_cast<mirror::Array*>(result);
1182 EXPECT_EQ(array->GetLength(), 10);
1183 }
1184
1185 // Failure tests.
1186
1187 // Out-of-memory.
1188 {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001189 size_t result = Invoke3(reinterpret_cast<size_t>(c.Get()), reinterpret_cast<size_t>(nullptr),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001190 GB, // that should fail...
Mathieu Chartier119c6bd2014-05-09 14:11:47 -07001191 reinterpret_cast<uintptr_t>(GetTlsPtr(self)->quick_entrypoints.pAllocArrayResolved),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001192 self);
1193
1194 EXPECT_TRUE(self->IsExceptionPending());
1195 self->ClearException();
1196 EXPECT_EQ(reinterpret_cast<size_t>(nullptr), result);
1197 }
1198
1199 // Tests done.
1200#else
1201 LOG(INFO) << "Skipping alloc_array as I don't know how to do that on " << kRuntimeISA;
1202 // Force-print to std::cout so it's also outside the logcat.
1203 std::cout << "Skipping alloc_array as I don't know how to do that on " << kRuntimeISA << std::endl;
1204#endif
1205}
1206
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001207
Ian Rogersc3ccc102014-06-25 11:52:14 -07001208#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001209extern "C" void art_quick_string_compareto(void);
1210#endif
1211
1212TEST_F(StubTest, StringCompareTo) {
1213 TEST_DISABLED_FOR_HEAP_REFERENCE_POISONING();
1214
Ian Rogersc3ccc102014-06-25 11:52:14 -07001215#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001216 // TODO: Check the "Unresolved" allocation stubs
1217
1218 Thread* self = Thread::Current();
1219 ScopedObjectAccess soa(self);
1220 // garbage is created during ClassLinker::Init
1221
1222 // Create some strings
1223 // Use array so we can index into it and use a matrix for expected results
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -07001224 // Setup: The first half is standard. The second half uses a non-zero offset.
1225 // TODO: Shared backing arrays.
Serban Constantinescu86797a72014-06-19 16:17:56 +01001226 static constexpr size_t kBaseStringCount = 8;
1227 const char* c[kBaseStringCount] = { "", "", "a", "aa", "ab",
1228 "aacaacaacaacaacaac", // This one's under the default limit to go to __memcmp16.
1229 "aacaacaacaacaacaacaacaacaacaacaacaac", // This one's over.
1230 "aacaacaacaacaacaacaacaacaacaacaacaaca" }; // As is this one. We need a separate one to
1231 // defeat object-equal optimizations.
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -07001232
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001233 static constexpr size_t kStringCount = 2 * kBaseStringCount;
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001234
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001235 StackHandleScope<kStringCount> hs(self);
1236 Handle<mirror::String> s[kStringCount];
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001237
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001238 for (size_t i = 0; i < kBaseStringCount; ++i) {
1239 s[i] = hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), c[i]));
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001240 }
1241
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -07001242 RandGen r(0x1234);
1243
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001244 for (size_t i = kBaseStringCount; i < kStringCount; ++i) {
1245 s[i] = hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), c[i - kBaseStringCount]));
1246 int32_t length = s[i]->GetLength();
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -07001247 if (length > 1) {
1248 // Set a random offset and length.
1249 int32_t new_offset = 1 + (r.next() % (length - 1));
1250 int32_t rest = length - new_offset - 1;
1251 int32_t new_length = 1 + (rest > 0 ? r.next() % rest : 0);
1252
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001253 s[i]->SetField32<false>(mirror::String::CountOffset(), new_length);
1254 s[i]->SetField32<false>(mirror::String::OffsetOffset(), new_offset);
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -07001255 }
1256 }
1257
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001258 // TODO: wide characters
1259
1260 // Matrix of expectations. First component is first parameter. Note we only check against the
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -07001261 // sign, not the value. As we are testing random offsets, we need to compute this and need to
1262 // rely on String::CompareTo being correct.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001263 int32_t expected[kStringCount][kStringCount];
1264 for (size_t x = 0; x < kStringCount; ++x) {
1265 for (size_t y = 0; y < kStringCount; ++y) {
1266 expected[x][y] = s[x]->CompareTo(s[y].Get());
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -07001267 }
1268 }
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001269
1270 // Play with it...
1271
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001272 for (size_t x = 0; x < kStringCount; ++x) {
1273 for (size_t y = 0; y < kStringCount; ++y) {
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001274 // Test string_compareto x y
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001275 size_t result = Invoke3(reinterpret_cast<size_t>(s[x].Get()),
1276 reinterpret_cast<size_t>(s[y].Get()), 0U,
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001277 reinterpret_cast<uintptr_t>(&art_quick_string_compareto), self);
1278
1279 EXPECT_FALSE(self->IsExceptionPending());
1280
1281 // The result is a 32b signed integer
1282 union {
1283 size_t r;
1284 int32_t i;
1285 } conv;
1286 conv.r = result;
1287 int32_t e = expected[x][y];
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -07001288 EXPECT_TRUE(e == 0 ? conv.i == 0 : true) << "x=" << c[x] << " y=" << c[y] << " res=" <<
1289 conv.r;
1290 EXPECT_TRUE(e < 0 ? conv.i < 0 : true) << "x=" << c[x] << " y=" << c[y] << " res=" <<
1291 conv.r;
1292 EXPECT_TRUE(e > 0 ? conv.i > 0 : true) << "x=" << c[x] << " y=" << c[y] << " res=" <<
1293 conv.r;
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001294 }
1295 }
1296
Andreas Gampe7177d7c2014-05-02 12:10:02 -07001297 // TODO: Deallocate things.
1298
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001299 // Tests done.
1300#else
1301 LOG(INFO) << "Skipping string_compareto as I don't know how to do that on " << kRuntimeISA;
1302 // Force-print to std::cout so it's also outside the logcat.
1303 std::cout << "Skipping string_compareto as I don't know how to do that on " << kRuntimeISA <<
1304 std::endl;
1305#endif
1306}
1307
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001308
Ian Rogersc3ccc102014-06-25 11:52:14 -07001309#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001310extern "C" void art_quick_set32_static(void);
1311extern "C" void art_quick_get32_static(void);
1312#endif
1313
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001314static void GetSet32Static(Handle<mirror::Object>* obj, Handle<mirror::ArtField>* f, Thread* self,
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001315 mirror::ArtMethod* referrer, StubTest* test)
1316 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Ian Rogersc3ccc102014-06-25 11:52:14 -07001317#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001318 constexpr size_t num_values = 7;
1319 uint32_t values[num_values] = { 0, 1, 2, 255, 32768, 1000000, 0xFFFFFFFF };
1320
1321 for (size_t i = 0; i < num_values; ++i) {
1322 test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
1323 static_cast<size_t>(values[i]),
1324 0U,
1325 reinterpret_cast<uintptr_t>(&art_quick_set32_static),
1326 self,
1327 referrer);
1328
1329 size_t res = test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
1330 0U, 0U,
1331 reinterpret_cast<uintptr_t>(&art_quick_get32_static),
1332 self,
1333 referrer);
1334
1335 EXPECT_EQ(res, values[i]) << "Iteration " << i;
1336 }
1337#else
1338 LOG(INFO) << "Skipping set32static as I don't know how to do that on " << kRuntimeISA;
1339 // Force-print to std::cout so it's also outside the logcat.
1340 std::cout << "Skipping set32static as I don't know how to do that on " << kRuntimeISA << std::endl;
1341#endif
1342}
1343
1344
Ian Rogersc3ccc102014-06-25 11:52:14 -07001345#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001346extern "C" void art_quick_set32_instance(void);
1347extern "C" void art_quick_get32_instance(void);
1348#endif
1349
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001350static void GetSet32Instance(Handle<mirror::Object>* obj, Handle<mirror::ArtField>* f,
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001351 Thread* self, mirror::ArtMethod* referrer, StubTest* test)
1352 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Ian Rogersc3ccc102014-06-25 11:52:14 -07001353#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001354 constexpr size_t num_values = 7;
1355 uint32_t values[num_values] = { 0, 1, 2, 255, 32768, 1000000, 0xFFFFFFFF };
1356
1357 for (size_t i = 0; i < num_values; ++i) {
1358 test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001359 reinterpret_cast<size_t>(obj->Get()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001360 static_cast<size_t>(values[i]),
1361 reinterpret_cast<uintptr_t>(&art_quick_set32_instance),
1362 self,
1363 referrer);
1364
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001365 int32_t res = f->Get()->GetInt(obj->Get());
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001366 EXPECT_EQ(res, static_cast<int32_t>(values[i])) << "Iteration " << i;
1367
1368 res++;
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001369 f->Get()->SetInt<false>(obj->Get(), res);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001370
1371 size_t res2 = test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001372 reinterpret_cast<size_t>(obj->Get()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001373 0U,
1374 reinterpret_cast<uintptr_t>(&art_quick_get32_instance),
1375 self,
1376 referrer);
1377 EXPECT_EQ(res, static_cast<int32_t>(res2));
1378 }
1379#else
1380 LOG(INFO) << "Skipping set32instance as I don't know how to do that on " << kRuntimeISA;
1381 // Force-print to std::cout so it's also outside the logcat.
1382 std::cout << "Skipping set32instance as I don't know how to do that on " << kRuntimeISA << std::endl;
1383#endif
1384}
1385
1386
Ian Rogersc3ccc102014-06-25 11:52:14 -07001387#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001388extern "C" void art_quick_set_obj_static(void);
1389extern "C" void art_quick_get_obj_static(void);
1390
1391static void set_and_check_static(uint32_t f_idx, mirror::Object* val, Thread* self,
1392 mirror::ArtMethod* referrer, StubTest* test)
1393 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1394 test->Invoke3WithReferrer(static_cast<size_t>(f_idx),
1395 reinterpret_cast<size_t>(val),
1396 0U,
1397 reinterpret_cast<uintptr_t>(&art_quick_set_obj_static),
1398 self,
1399 referrer);
1400
1401 size_t res = test->Invoke3WithReferrer(static_cast<size_t>(f_idx),
1402 0U, 0U,
1403 reinterpret_cast<uintptr_t>(&art_quick_get_obj_static),
1404 self,
1405 referrer);
1406
1407 EXPECT_EQ(res, reinterpret_cast<size_t>(val)) << "Value " << val;
1408}
1409#endif
1410
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001411static void GetSetObjStatic(Handle<mirror::Object>* obj, Handle<mirror::ArtField>* f, Thread* self,
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001412 mirror::ArtMethod* referrer, StubTest* test)
1413 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Ian Rogersc3ccc102014-06-25 11:52:14 -07001414#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001415 set_and_check_static((*f)->GetDexFieldIndex(), nullptr, self, referrer, test);
1416
1417 // Allocate a string object for simplicity.
1418 mirror::String* str = mirror::String::AllocFromModifiedUtf8(self, "Test");
1419 set_and_check_static((*f)->GetDexFieldIndex(), str, self, referrer, test);
1420
1421 set_and_check_static((*f)->GetDexFieldIndex(), nullptr, self, referrer, test);
1422#else
1423 LOG(INFO) << "Skipping setObjstatic as I don't know how to do that on " << kRuntimeISA;
1424 // Force-print to std::cout so it's also outside the logcat.
1425 std::cout << "Skipping setObjstatic as I don't know how to do that on " << kRuntimeISA << std::endl;
1426#endif
1427}
1428
1429
Ian Rogersc3ccc102014-06-25 11:52:14 -07001430#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001431extern "C" void art_quick_set_obj_instance(void);
1432extern "C" void art_quick_get_obj_instance(void);
1433
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001434static void set_and_check_instance(Handle<mirror::ArtField>* f, mirror::Object* trg,
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001435 mirror::Object* val, Thread* self, mirror::ArtMethod* referrer,
1436 StubTest* test)
1437 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1438 test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
1439 reinterpret_cast<size_t>(trg),
1440 reinterpret_cast<size_t>(val),
1441 reinterpret_cast<uintptr_t>(&art_quick_set_obj_instance),
1442 self,
1443 referrer);
1444
1445 size_t res = test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
1446 reinterpret_cast<size_t>(trg),
1447 0U,
1448 reinterpret_cast<uintptr_t>(&art_quick_get_obj_instance),
1449 self,
1450 referrer);
1451
1452 EXPECT_EQ(res, reinterpret_cast<size_t>(val)) << "Value " << val;
1453
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001454 EXPECT_EQ(val, f->Get()->GetObj(trg));
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001455}
1456#endif
1457
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001458static void GetSetObjInstance(Handle<mirror::Object>* obj, Handle<mirror::ArtField>* f,
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001459 Thread* self, mirror::ArtMethod* referrer, StubTest* test)
1460 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Ian Rogersc3ccc102014-06-25 11:52:14 -07001461#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001462 set_and_check_instance(f, obj->Get(), nullptr, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001463
1464 // Allocate a string object for simplicity.
1465 mirror::String* str = mirror::String::AllocFromModifiedUtf8(self, "Test");
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001466 set_and_check_instance(f, obj->Get(), str, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001467
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001468 set_and_check_instance(f, obj->Get(), nullptr, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001469#else
1470 LOG(INFO) << "Skipping setObjinstance as I don't know how to do that on " << kRuntimeISA;
1471 // Force-print to std::cout so it's also outside the logcat.
1472 std::cout << "Skipping setObjinstance as I don't know how to do that on " << kRuntimeISA << std::endl;
1473#endif
1474}
1475
1476
1477// TODO: Complete these tests for 32b architectures.
1478
Ian Rogersc3ccc102014-06-25 11:52:14 -07001479#if (defined(__x86_64__) && !defined(__APPLE__)) || defined(__aarch64__)
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001480extern "C" void art_quick_set64_static(void);
1481extern "C" void art_quick_get64_static(void);
1482#endif
1483
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001484static void GetSet64Static(Handle<mirror::Object>* obj, Handle<mirror::ArtField>* f, Thread* self,
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001485 mirror::ArtMethod* referrer, StubTest* test)
1486 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Ian Rogersc3ccc102014-06-25 11:52:14 -07001487#if (defined(__x86_64__) && !defined(__APPLE__)) || defined(__aarch64__)
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001488 constexpr size_t num_values = 8;
1489 uint64_t values[num_values] = { 0, 1, 2, 255, 32768, 1000000, 0xFFFFFFFF, 0xFFFFFFFFFFFF };
1490
1491 for (size_t i = 0; i < num_values; ++i) {
1492 test->Invoke3UWithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
1493 values[i],
1494 reinterpret_cast<uintptr_t>(&art_quick_set64_static),
1495 self,
1496 referrer);
1497
1498 size_t res = test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
1499 0U, 0U,
1500 reinterpret_cast<uintptr_t>(&art_quick_get64_static),
1501 self,
1502 referrer);
1503
1504 EXPECT_EQ(res, values[i]) << "Iteration " << i;
1505 }
1506#else
1507 LOG(INFO) << "Skipping set64static as I don't know how to do that on " << kRuntimeISA;
1508 // Force-print to std::cout so it's also outside the logcat.
1509 std::cout << "Skipping set64static as I don't know how to do that on " << kRuntimeISA << std::endl;
1510#endif
1511}
1512
1513
Ian Rogersc3ccc102014-06-25 11:52:14 -07001514#if (defined(__x86_64__) && !defined(__APPLE__)) || defined(__aarch64__)
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001515extern "C" void art_quick_set64_instance(void);
1516extern "C" void art_quick_get64_instance(void);
1517#endif
1518
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001519static void GetSet64Instance(Handle<mirror::Object>* obj, Handle<mirror::ArtField>* f,
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001520 Thread* self, mirror::ArtMethod* referrer, StubTest* test)
1521 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Ian Rogersc3ccc102014-06-25 11:52:14 -07001522#if (defined(__x86_64__) && !defined(__APPLE__)) || defined(__aarch64__)
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001523 constexpr size_t num_values = 8;
1524 uint64_t values[num_values] = { 0, 1, 2, 255, 32768, 1000000, 0xFFFFFFFF, 0xFFFFFFFFFFFF };
1525
1526 for (size_t i = 0; i < num_values; ++i) {
1527 test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001528 reinterpret_cast<size_t>(obj->Get()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001529 static_cast<size_t>(values[i]),
1530 reinterpret_cast<uintptr_t>(&art_quick_set64_instance),
1531 self,
1532 referrer);
1533
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001534 int64_t res = f->Get()->GetLong(obj->Get());
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001535 EXPECT_EQ(res, static_cast<int64_t>(values[i])) << "Iteration " << i;
1536
1537 res++;
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001538 f->Get()->SetLong<false>(obj->Get(), res);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001539
1540 size_t res2 = test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001541 reinterpret_cast<size_t>(obj->Get()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001542 0U,
1543 reinterpret_cast<uintptr_t>(&art_quick_get64_instance),
1544 self,
1545 referrer);
1546 EXPECT_EQ(res, static_cast<int64_t>(res2));
1547 }
1548#else
1549 LOG(INFO) << "Skipping set64instance as I don't know how to do that on " << kRuntimeISA;
1550 // Force-print to std::cout so it's also outside the logcat.
1551 std::cout << "Skipping set64instance as I don't know how to do that on " << kRuntimeISA << std::endl;
1552#endif
1553}
1554
1555static void TestFields(Thread* self, StubTest* test, Primitive::Type test_type) {
1556 // garbage is created during ClassLinker::Init
1557
1558 JNIEnv* env = Thread::Current()->GetJniEnv();
1559 jclass jc = env->FindClass("AllFields");
1560 CHECK(jc != NULL);
1561 jobject o = env->AllocObject(jc);
1562 CHECK(o != NULL);
1563
1564 ScopedObjectAccess soa(self);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001565 StackHandleScope<5> hs(self);
1566 Handle<mirror::Object> obj(hs.NewHandle(soa.Decode<mirror::Object*>(o)));
1567 Handle<mirror::Class> c(hs.NewHandle(obj->GetClass()));
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001568 // Need a method as a referrer
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001569 Handle<mirror::ArtMethod> m(hs.NewHandle(c->GetDirectMethod(0)));
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001570
1571 // Play with it...
1572
1573 // Static fields.
1574 {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001575 Handle<mirror::ObjectArray<mirror::ArtField>> fields(hs.NewHandle(c.Get()->GetSFields()));
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001576 int32_t num_fields = fields->GetLength();
1577 for (int32_t i = 0; i < num_fields; ++i) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001578 StackHandleScope<1> hs(self);
1579 Handle<mirror::ArtField> f(hs.NewHandle(fields->Get(i)));
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001580
Mathieu Chartier61c5ebc2014-06-05 17:42:53 -07001581 Primitive::Type type = f->GetTypeAsPrimitiveType();
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001582 switch (type) {
1583 case Primitive::Type::kPrimInt:
1584 if (test_type == type) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001585 GetSet32Static(&obj, &f, self, m.Get(), test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001586 }
1587 break;
1588
1589 case Primitive::Type::kPrimLong:
1590 if (test_type == type) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001591 GetSet64Static(&obj, &f, self, m.Get(), test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001592 }
1593 break;
1594
1595 case Primitive::Type::kPrimNot:
1596 // Don't try array.
Mathieu Chartier61c5ebc2014-06-05 17:42:53 -07001597 if (test_type == type && f->GetTypeDescriptor()[0] != '[') {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001598 GetSetObjStatic(&obj, &f, self, m.Get(), test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001599 }
1600 break;
1601
1602 default:
1603 break; // Skip.
1604 }
1605 }
1606 }
1607
1608 // Instance fields.
1609 {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001610 Handle<mirror::ObjectArray<mirror::ArtField>> fields(hs.NewHandle(c.Get()->GetIFields()));
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001611 int32_t num_fields = fields->GetLength();
1612 for (int32_t i = 0; i < num_fields; ++i) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001613 StackHandleScope<1> hs(self);
1614 Handle<mirror::ArtField> f(hs.NewHandle(fields->Get(i)));
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001615
Mathieu Chartier61c5ebc2014-06-05 17:42:53 -07001616 Primitive::Type type = f->GetTypeAsPrimitiveType();
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001617 switch (type) {
1618 case Primitive::Type::kPrimInt:
1619 if (test_type == type) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001620 GetSet32Instance(&obj, &f, self, m.Get(), test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001621 }
1622 break;
1623
1624 case Primitive::Type::kPrimLong:
1625 if (test_type == type) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001626 GetSet64Instance(&obj, &f, self, m.Get(), test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001627 }
1628 break;
1629
1630 case Primitive::Type::kPrimNot:
1631 // Don't try array.
Mathieu Chartier61c5ebc2014-06-05 17:42:53 -07001632 if (test_type == type && f->GetTypeDescriptor()[0] != '[') {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001633 GetSetObjInstance(&obj, &f, self, m.Get(), test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001634 }
1635 break;
1636
1637 default:
1638 break; // Skip.
1639 }
1640 }
1641 }
1642
1643 // TODO: Deallocate things.
1644}
1645
1646
1647TEST_F(StubTest, Fields32) {
1648 TEST_DISABLED_FOR_HEAP_REFERENCE_POISONING();
1649
1650 Thread* self = Thread::Current();
1651
1652 self->TransitionFromSuspendedToRunnable();
1653 LoadDex("AllFields");
1654 bool started = runtime_->Start();
1655 CHECK(started);
1656
1657 TestFields(self, this, Primitive::Type::kPrimInt);
1658}
1659
1660TEST_F(StubTest, FieldsObj) {
1661 TEST_DISABLED_FOR_HEAP_REFERENCE_POISONING();
1662
1663 Thread* self = Thread::Current();
1664
1665 self->TransitionFromSuspendedToRunnable();
1666 LoadDex("AllFields");
1667 bool started = runtime_->Start();
1668 CHECK(started);
1669
1670 TestFields(self, this, Primitive::Type::kPrimNot);
1671}
1672
1673TEST_F(StubTest, Fields64) {
1674 TEST_DISABLED_FOR_HEAP_REFERENCE_POISONING();
1675
1676 Thread* self = Thread::Current();
1677
1678 self->TransitionFromSuspendedToRunnable();
1679 LoadDex("AllFields");
1680 bool started = runtime_->Start();
1681 CHECK(started);
1682
1683 TestFields(self, this, Primitive::Type::kPrimLong);
1684}
1685
Ian Rogersc3ccc102014-06-25 11:52:14 -07001686#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe51f76352014-05-21 08:28:48 -07001687extern "C" void art_quick_imt_conflict_trampoline(void);
1688#endif
1689
1690TEST_F(StubTest, IMT) {
Ian Rogersc3ccc102014-06-25 11:52:14 -07001691#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe51f76352014-05-21 08:28:48 -07001692 TEST_DISABLED_FOR_HEAP_REFERENCE_POISONING();
1693
1694 Thread* self = Thread::Current();
1695
1696 ScopedObjectAccess soa(self);
1697 StackHandleScope<7> hs(self);
1698
1699 JNIEnv* env = Thread::Current()->GetJniEnv();
1700
1701 // ArrayList
1702
1703 // Load ArrayList and used methods (JNI).
1704 jclass arraylist_jclass = env->FindClass("java/util/ArrayList");
1705 ASSERT_NE(nullptr, arraylist_jclass);
1706 jmethodID arraylist_constructor = env->GetMethodID(arraylist_jclass, "<init>", "()V");
1707 ASSERT_NE(nullptr, arraylist_constructor);
1708 jmethodID contains_jmethod = env->GetMethodID(arraylist_jclass, "contains", "(Ljava/lang/Object;)Z");
1709 ASSERT_NE(nullptr, contains_jmethod);
1710 jmethodID add_jmethod = env->GetMethodID(arraylist_jclass, "add", "(Ljava/lang/Object;)Z");
1711 ASSERT_NE(nullptr, add_jmethod);
1712
1713 // Get mirror representation.
1714 Handle<mirror::ArtMethod> contains_amethod(hs.NewHandle(soa.DecodeMethod(contains_jmethod)));
1715
1716 // Patch up ArrayList.contains.
1717 if (contains_amethod.Get()->GetEntryPointFromQuickCompiledCode() == nullptr) {
1718 contains_amethod.Get()->SetEntryPointFromQuickCompiledCode(reinterpret_cast<void*>(
1719 GetTlsPtr(self)->quick_entrypoints.pQuickToInterpreterBridge));
1720 }
1721
1722 // List
1723
1724 // Load List and used methods (JNI).
1725 jclass list_jclass = env->FindClass("java/util/List");
1726 ASSERT_NE(nullptr, list_jclass);
1727 jmethodID inf_contains_jmethod = env->GetMethodID(list_jclass, "contains", "(Ljava/lang/Object;)Z");
1728 ASSERT_NE(nullptr, inf_contains_jmethod);
1729
1730 // Get mirror representation.
1731 Handle<mirror::ArtMethod> inf_contains(hs.NewHandle(soa.DecodeMethod(inf_contains_jmethod)));
1732
1733 // Object
1734
1735 jclass obj_jclass = env->FindClass("java/lang/Object");
1736 ASSERT_NE(nullptr, obj_jclass);
1737 jmethodID obj_constructor = env->GetMethodID(obj_jclass, "<init>", "()V");
1738 ASSERT_NE(nullptr, obj_constructor);
1739
1740 // Sanity check: check that there is a conflict for List.contains in ArrayList.
1741
1742 mirror::Class* arraylist_class = soa.Decode<mirror::Class*>(arraylist_jclass);
Mingyao Yang98d1cc82014-05-15 17:02:16 -07001743 mirror::ArtMethod* m = arraylist_class->GetEmbeddedImTableEntry(
1744 inf_contains->GetDexMethodIndex() % mirror::Class::kImtSize);
Andreas Gampe51f76352014-05-21 08:28:48 -07001745
Andreas Gampe0ea37942014-05-21 14:12:18 -07001746 if (!m->IsImtConflictMethod()) {
1747 LOG(WARNING) << "Test is meaningless, no IMT conflict in setup: " <<
1748 PrettyMethod(m, true);
1749 LOG(WARNING) << "Please update StubTest.IMT.";
1750 return;
1751 }
Andreas Gampe51f76352014-05-21 08:28:48 -07001752
1753 // Create instances.
1754
1755 jobject jarray_list = env->NewObject(arraylist_jclass, arraylist_constructor);
1756 ASSERT_NE(nullptr, jarray_list);
1757 Handle<mirror::Object> array_list(hs.NewHandle(soa.Decode<mirror::Object*>(jarray_list)));
1758
1759 jobject jobj = env->NewObject(obj_jclass, obj_constructor);
1760 ASSERT_NE(nullptr, jobj);
1761 Handle<mirror::Object> obj(hs.NewHandle(soa.Decode<mirror::Object*>(jobj)));
1762
1763 // Invoke.
1764
1765 size_t result =
1766 Invoke3WithReferrerAndHidden(0U, reinterpret_cast<size_t>(array_list.Get()),
1767 reinterpret_cast<size_t>(obj.Get()),
1768 reinterpret_cast<uintptr_t>(&art_quick_imt_conflict_trampoline),
1769 self, contains_amethod.Get(),
1770 static_cast<size_t>(inf_contains.Get()->GetDexMethodIndex()));
1771
1772 ASSERT_FALSE(self->IsExceptionPending());
1773 EXPECT_EQ(static_cast<size_t>(JNI_FALSE), result);
1774
1775 // Add object.
1776
1777 env->CallBooleanMethod(jarray_list, add_jmethod, jobj);
1778
1779 ASSERT_FALSE(self->IsExceptionPending()) << PrettyTypeOf(self->GetException(nullptr));
1780
1781 // Invoke again.
1782
1783 result = Invoke3WithReferrerAndHidden(0U, reinterpret_cast<size_t>(array_list.Get()),
1784 reinterpret_cast<size_t>(obj.Get()),
1785 reinterpret_cast<uintptr_t>(&art_quick_imt_conflict_trampoline),
1786 self, contains_amethod.Get(),
1787 static_cast<size_t>(inf_contains.Get()->GetDexMethodIndex()));
1788
1789 ASSERT_FALSE(self->IsExceptionPending());
1790 EXPECT_EQ(static_cast<size_t>(JNI_TRUE), result);
1791#else
Andreas Gampe6aac3552014-06-09 14:55:53 -07001792 LOG(INFO) << "Skipping imt as I don't know how to do that on " << kRuntimeISA;
Andreas Gampe51f76352014-05-21 08:28:48 -07001793 // Force-print to std::cout so it's also outside the logcat.
Andreas Gampe6aac3552014-06-09 14:55:53 -07001794 std::cout << "Skipping imt as I don't know how to do that on " << kRuntimeISA << std::endl;
1795#endif
1796}
1797
1798#if defined(__arm__) || defined(__aarch64__)
1799extern "C" void art_quick_indexof(void);
1800#endif
1801
1802TEST_F(StubTest, StringIndexOf) {
1803#if defined(__arm__) || defined(__aarch64__)
Hiroshi Yamauchi52fa8142014-06-16 12:59:49 -07001804 TEST_DISABLED_FOR_HEAP_REFERENCE_POISONING();
1805
Andreas Gampe6aac3552014-06-09 14:55:53 -07001806 Thread* self = Thread::Current();
1807 ScopedObjectAccess soa(self);
1808 // garbage is created during ClassLinker::Init
1809
1810 // Create some strings
1811 // Use array so we can index into it and use a matrix for expected results
1812 // Setup: The first half is standard. The second half uses a non-zero offset.
1813 // TODO: Shared backing arrays.
1814 static constexpr size_t kStringCount = 7;
1815 const char* c_str[kStringCount] = { "", "a", "ba", "cba", "dcba", "edcba", "asdfghjkl" };
1816 static constexpr size_t kCharCount = 5;
1817 const char c_char[kCharCount] = { 'a', 'b', 'c', 'd', 'e' };
1818
1819 StackHandleScope<kStringCount> hs(self);
1820 Handle<mirror::String> s[kStringCount];
1821
1822 for (size_t i = 0; i < kStringCount; ++i) {
1823 s[i] = hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), c_str[i]));
1824 }
1825
1826 // Matrix of expectations. First component is first parameter. Note we only check against the
1827 // sign, not the value. As we are testing random offsets, we need to compute this and need to
1828 // rely on String::CompareTo being correct.
1829 static constexpr size_t kMaxLen = 9;
1830 DCHECK_LE(strlen(c_str[kStringCount-1]), kMaxLen) << "Please fix the indexof test.";
1831
1832 // Last dimension: start, offset by 1.
1833 int32_t expected[kStringCount][kCharCount][kMaxLen + 3];
1834 for (size_t x = 0; x < kStringCount; ++x) {
1835 for (size_t y = 0; y < kCharCount; ++y) {
1836 for (size_t z = 0; z <= kMaxLen + 2; ++z) {
1837 expected[x][y][z] = s[x]->FastIndexOf(c_char[y], static_cast<int32_t>(z) - 1);
1838 }
1839 }
1840 }
1841
1842 // Play with it...
1843
1844 for (size_t x = 0; x < kStringCount; ++x) {
1845 for (size_t y = 0; y < kCharCount; ++y) {
1846 for (size_t z = 0; z <= kMaxLen + 2; ++z) {
1847 int32_t start = static_cast<int32_t>(z) - 1;
1848
1849 // Test string_compareto x y
1850 size_t result = Invoke3(reinterpret_cast<size_t>(s[x].Get()), c_char[y], start,
1851 reinterpret_cast<uintptr_t>(&art_quick_indexof), self);
1852
1853 EXPECT_FALSE(self->IsExceptionPending());
1854
1855 // The result is a 32b signed integer
1856 union {
1857 size_t r;
1858 int32_t i;
1859 } conv;
1860 conv.r = result;
1861
1862 EXPECT_EQ(expected[x][y][z], conv.i) << "Wrong result for " << c_str[x] << " / " <<
1863 c_char[y] << " @ " << start;
1864 }
1865 }
1866 }
1867
1868 // TODO: Deallocate things.
1869
1870 // Tests done.
1871#else
1872 LOG(INFO) << "Skipping indexof as I don't know how to do that on " << kRuntimeISA;
1873 // Force-print to std::cout so it's also outside the logcat.
1874 std::cout << "Skipping indexof as I don't know how to do that on " << kRuntimeISA << std::endl;
Andreas Gampe51f76352014-05-21 08:28:48 -07001875#endif
1876}
1877
Andreas Gampe525cde22014-04-22 15:44:50 -07001878} // namespace art