blob: eb490eb50045a2a6393e1b9290a2ca76c5d081d0 [file] [log] [blame]
Andreas Gampe525cde22014-04-22 15:44:50 -07001/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "common_runtime_test.h"
Andreas Gampe6e4e59c2014-05-05 20:11:02 -070018#include "mirror/art_field-inl.h"
Andreas Gampe51f76352014-05-21 08:28:48 -070019#include "mirror/art_method-inl.h"
20#include "mirror/class-inl.h"
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -070021#include "mirror/string-inl.h"
Andreas Gampe525cde22014-04-22 15:44:50 -070022
23#include <cstdio>
24
25namespace art {
26
27
28class StubTest : public CommonRuntimeTest {
29 protected:
30 // We need callee-save methods set up in the Runtime for exceptions.
31 void SetUp() OVERRIDE {
32 // Do the normal setup.
33 CommonRuntimeTest::SetUp();
34
35 {
36 // Create callee-save methods
37 ScopedObjectAccess soa(Thread::Current());
Vladimir Marko7624d252014-05-02 14:40:15 +010038 runtime_->SetInstructionSet(kRuntimeISA);
Andreas Gampe525cde22014-04-22 15:44:50 -070039 for (int i = 0; i < Runtime::kLastCalleeSaveType; i++) {
40 Runtime::CalleeSaveType type = Runtime::CalleeSaveType(i);
41 if (!runtime_->HasCalleeSaveMethod(type)) {
Vladimir Marko7624d252014-05-02 14:40:15 +010042 runtime_->SetCalleeSaveMethod(runtime_->CreateCalleeSaveMethod(type), type);
Andreas Gampe525cde22014-04-22 15:44:50 -070043 }
44 }
45 }
46 }
47
Andreas Gampe00c1e6d2014-04-25 15:47:13 -070048 void SetUpRuntimeOptions(Runtime::Options *options) OVERRIDE {
49 // Use a smaller heap
50 for (std::pair<std::string, const void*>& pair : *options) {
51 if (pair.first.find("-Xmx") == 0) {
52 pair.first = "-Xmx4M"; // Smallest we can go.
53 }
54 }
Andreas Gampe51f76352014-05-21 08:28:48 -070055 options->push_back(std::make_pair("-Xint", nullptr));
Andreas Gampe00c1e6d2014-04-25 15:47:13 -070056 }
Andreas Gampe525cde22014-04-22 15:44:50 -070057
Mathieu Chartier119c6bd2014-05-09 14:11:47 -070058 // Helper function needed since TEST_F makes a new class.
59 Thread::tls_ptr_sized_values* GetTlsPtr(Thread* self) {
60 return &self->tlsPtr_;
61 }
62
Andreas Gampe4fc046e2014-05-06 16:56:39 -070063 public:
Andreas Gampe525cde22014-04-22 15:44:50 -070064 size_t Invoke3(size_t arg0, size_t arg1, size_t arg2, uintptr_t code, Thread* self) {
Andreas Gampe6cf80102014-05-19 11:32:41 -070065 return Invoke3WithReferrer(arg0, arg1, arg2, code, self, nullptr);
Andreas Gampe525cde22014-04-22 15:44:50 -070066 }
Andreas Gampe6e4e59c2014-05-05 20:11:02 -070067
Andreas Gampe6e4e59c2014-05-05 20:11:02 -070068 // TODO: Set up a frame according to referrer's specs.
69 size_t Invoke3WithReferrer(size_t arg0, size_t arg1, size_t arg2, uintptr_t code, Thread* self,
70 mirror::ArtMethod* referrer) {
71 // Push a transition back into managed code onto the linked list in thread.
72 ManagedStack fragment;
73 self->PushManagedStackFragment(&fragment);
74
75 size_t result;
Andreas Gampe6cf80102014-05-19 11:32:41 -070076 size_t fpr_result = 0;
Andreas Gampe6e4e59c2014-05-05 20:11:02 -070077#if defined(__i386__)
78 // TODO: Set the thread?
79 __asm__ __volatile__(
Ian Rogersc5f17732014-06-05 20:48:42 -070080 "subl $12, %%esp\n\t" // Align stack.
81 "pushl %[referrer]\n\t" // Store referrer.
Andreas Gampe6e4e59c2014-05-05 20:11:02 -070082 "call *%%edi\n\t" // Call the stub
Ian Rogersc5f17732014-06-05 20:48:42 -070083 "addl $16, %%esp" // Pop referrer
Andreas Gampe6e4e59c2014-05-05 20:11:02 -070084 : "=a" (result)
85 // Use the result from eax
Andreas Gampe2f6e3512014-06-07 01:32:33 -070086 : "a"(arg0), "c"(arg1), "d"(arg2), "D"(code), [referrer]"r"(referrer)
87 // This places code into edi, arg0 into eax, arg1 into ecx, and arg2 into edx
88 : "memory"); // clobber.
Andreas Gampe6e4e59c2014-05-05 20:11:02 -070089 // TODO: Should we clobber the other registers? EBX gets clobbered by some of the stubs,
90 // but compilation fails when declaring that.
91#elif defined(__arm__)
92 __asm__ __volatile__(
93 "push {r1-r12, lr}\n\t" // Save state, 13*4B = 52B
94 ".cfi_adjust_cfa_offset 52\n\t"
95 "push {r9}\n\t"
96 ".cfi_adjust_cfa_offset 4\n\t"
97 "mov r9, %[referrer]\n\n"
98 "str r9, [sp, #-8]!\n\t" // Push referrer, +8B padding so 16B aligned
99 ".cfi_adjust_cfa_offset 8\n\t"
100 "ldr r9, [sp, #8]\n\t"
101
102 // Push everything on the stack, so we don't rely on the order. What a mess. :-(
103 "sub sp, sp, #20\n\t"
104 "str %[arg0], [sp]\n\t"
105 "str %[arg1], [sp, #4]\n\t"
106 "str %[arg2], [sp, #8]\n\t"
107 "str %[code], [sp, #12]\n\t"
108 "str %[self], [sp, #16]\n\t"
109 "ldr r0, [sp]\n\t"
110 "ldr r1, [sp, #4]\n\t"
111 "ldr r2, [sp, #8]\n\t"
112 "ldr r3, [sp, #12]\n\t"
113 "ldr r9, [sp, #16]\n\t"
114 "add sp, sp, #20\n\t"
115
116 "blx r3\n\t" // Call the stub
117 "add sp, sp, #12\n\t" // Pop nullptr and padding
118 ".cfi_adjust_cfa_offset -12\n\t"
119 "pop {r1-r12, lr}\n\t" // Restore state
120 ".cfi_adjust_cfa_offset -52\n\t"
121 "mov %[result], r0\n\t" // Save the result
122 : [result] "=r" (result)
123 // Use the result from r0
124 : [arg0] "r"(arg0), [arg1] "r"(arg1), [arg2] "r"(arg2), [code] "r"(code), [self] "r"(self),
125 [referrer] "r"(referrer)
Andreas Gampe2f6e3512014-06-07 01:32:33 -0700126 : "memory"); // clobber.
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700127#elif defined(__aarch64__)
128 __asm__ __volatile__(
Andreas Gampef39b3782014-06-03 14:38:30 -0700129 // Spill x0-x7 which we say we don't clobber. May contain args.
Andreas Gampe6cf80102014-05-19 11:32:41 -0700130 "sub sp, sp, #64\n\t"
131 ".cfi_adjust_cfa_offset 64\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700132 "stp x0, x1, [sp]\n\t"
133 "stp x2, x3, [sp, #16]\n\t"
134 "stp x4, x5, [sp, #32]\n\t"
135 "stp x6, x7, [sp, #48]\n\t"
Andreas Gampe6cf80102014-05-19 11:32:41 -0700136
Andreas Gampef39b3782014-06-03 14:38:30 -0700137 "sub sp, sp, #16\n\t" // Reserve stack space, 16B aligned
138 ".cfi_adjust_cfa_offset 16\n\t"
139 "str %[referrer], [sp]\n\t" // referrer
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700140
141 // Push everything on the stack, so we don't rely on the order. What a mess. :-(
142 "sub sp, sp, #48\n\t"
Andreas Gampe6cf80102014-05-19 11:32:41 -0700143 ".cfi_adjust_cfa_offset 48\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700144 // All things are "r" constraints, so direct str/stp should work.
145 "stp %[arg0], %[arg1], [sp]\n\t"
146 "stp %[arg2], %[code], [sp, #16]\n\t"
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700147 "str %[self], [sp, #32]\n\t"
Andreas Gampe6cf80102014-05-19 11:32:41 -0700148
149 // Now we definitely have x0-x3 free, use it to garble d8 - d15
150 "movk x0, #0xfad0\n\t"
151 "movk x0, #0xebad, lsl #16\n\t"
152 "movk x0, #0xfad0, lsl #32\n\t"
153 "movk x0, #0xebad, lsl #48\n\t"
154 "fmov d8, x0\n\t"
155 "add x0, x0, 1\n\t"
156 "fmov d9, x0\n\t"
157 "add x0, x0, 1\n\t"
158 "fmov d10, x0\n\t"
159 "add x0, x0, 1\n\t"
160 "fmov d11, x0\n\t"
161 "add x0, x0, 1\n\t"
162 "fmov d12, x0\n\t"
163 "add x0, x0, 1\n\t"
164 "fmov d13, x0\n\t"
165 "add x0, x0, 1\n\t"
166 "fmov d14, x0\n\t"
167 "add x0, x0, 1\n\t"
168 "fmov d15, x0\n\t"
169
Andreas Gampef39b3782014-06-03 14:38:30 -0700170 // Load call params into the right registers.
171 "ldp x0, x1, [sp]\n\t"
172 "ldp x2, x3, [sp, #16]\n\t"
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700173 "ldr x18, [sp, #32]\n\t"
174 "add sp, sp, #48\n\t"
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700175 ".cfi_adjust_cfa_offset -48\n\t"
176
Andreas Gampe6cf80102014-05-19 11:32:41 -0700177
178 "blr x3\n\t" // Call the stub
Andreas Gampef39b3782014-06-03 14:38:30 -0700179 "mov x8, x0\n\t" // Store result
180 "add sp, sp, #16\n\t" // Drop the quick "frame"
181 ".cfi_adjust_cfa_offset -16\n\t"
Andreas Gampe6cf80102014-05-19 11:32:41 -0700182
183 // Test d8 - d15. We can use x1 and x2.
184 "movk x1, #0xfad0\n\t"
185 "movk x1, #0xebad, lsl #16\n\t"
186 "movk x1, #0xfad0, lsl #32\n\t"
187 "movk x1, #0xebad, lsl #48\n\t"
188 "fmov x2, d8\n\t"
189 "cmp x1, x2\n\t"
190 "b.ne 1f\n\t"
191 "add x1, x1, 1\n\t"
192
193 "fmov x2, d9\n\t"
194 "cmp x1, x2\n\t"
195 "b.ne 1f\n\t"
196 "add x1, x1, 1\n\t"
197
198 "fmov x2, d10\n\t"
199 "cmp x1, x2\n\t"
200 "b.ne 1f\n\t"
201 "add x1, x1, 1\n\t"
202
203 "fmov x2, d11\n\t"
204 "cmp x1, x2\n\t"
205 "b.ne 1f\n\t"
206 "add x1, x1, 1\n\t"
207
208 "fmov x2, d12\n\t"
209 "cmp x1, x2\n\t"
210 "b.ne 1f\n\t"
211 "add x1, x1, 1\n\t"
212
213 "fmov x2, d13\n\t"
214 "cmp x1, x2\n\t"
215 "b.ne 1f\n\t"
216 "add x1, x1, 1\n\t"
217
218 "fmov x2, d14\n\t"
219 "cmp x1, x2\n\t"
220 "b.ne 1f\n\t"
221 "add x1, x1, 1\n\t"
222
223 "fmov x2, d15\n\t"
224 "cmp x1, x2\n\t"
225 "b.ne 1f\n\t"
226
Andreas Gampef39b3782014-06-03 14:38:30 -0700227 "mov x9, #0\n\t" // Use x9 as flag, in clobber list
Andreas Gampe6cf80102014-05-19 11:32:41 -0700228
229 // Finish up.
230 "2:\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700231 "ldp x0, x1, [sp]\n\t" // Restore stuff not named clobbered, may contain fpr_result
232 "ldp x2, x3, [sp, #16]\n\t"
233 "ldp x4, x5, [sp, #32]\n\t"
234 "ldp x6, x7, [sp, #48]\n\t"
235 "add sp, sp, #64\n\t" // Free stack space, now sp as on entry
Andreas Gampe6cf80102014-05-19 11:32:41 -0700236 ".cfi_adjust_cfa_offset -64\n\t"
237
Andreas Gampef39b3782014-06-03 14:38:30 -0700238 "str x9, %[fpr_result]\n\t" // Store the FPR comparison result
239 "mov %[result], x8\n\t" // Store the call result
240
Andreas Gampe6cf80102014-05-19 11:32:41 -0700241 "b 3f\n\t" // Goto end
242
243 // Failed fpr verification.
244 "1:\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700245 "mov x9, #1\n\t"
Andreas Gampe6cf80102014-05-19 11:32:41 -0700246 "b 2b\n\t" // Goto finish-up
247
248 // End
249 "3:\n\t"
Andreas Gampecf4035a2014-05-28 22:43:01 -0700250 : [result] "=r" (result)
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700251 // Use the result from r0
252 : [arg0] "0"(arg0), [arg1] "r"(arg1), [arg2] "r"(arg2), [code] "r"(code), [self] "r"(self),
Andreas Gampecf4035a2014-05-28 22:43:01 -0700253 [referrer] "r"(referrer), [fpr_result] "m" (fpr_result)
Andreas Gampef39b3782014-06-03 14:38:30 -0700254 : "x8", "x9", "x10", "x11", "x12", "x13", "x14", "x15", "x16", "x17", "x18", "x19", "x20",
255 "x21", "x22", "x23", "x24", "x25", "x26", "x27", "x28", "x30",
256 "d0", "d1", "d2", "d3", "d4", "d5", "d6", "d7",
257 "d8", "d9", "d10", "d11", "d12", "d13", "d14", "d15",
258 "d16", "d17", "d18", "d19", "d20", "d21", "d22", "d23",
Andreas Gampe2f6e3512014-06-07 01:32:33 -0700259 "d24", "d25", "d26", "d27", "d28", "d29", "d30", "d31",
260 "memory"); // clobber.
Ian Rogersc3ccc102014-06-25 11:52:14 -0700261#elif defined(__x86_64__) && !defined(__APPLE__)
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700262 // Note: Uses the native convention
263 // TODO: Set the thread?
264 __asm__ __volatile__(
265 "pushq %[referrer]\n\t" // Push referrer
266 "pushq (%%rsp)\n\t" // & 16B alignment padding
267 ".cfi_adjust_cfa_offset 16\n\t"
268 "call *%%rax\n\t" // Call the stub
269 "addq $16, %%rsp\n\t" // Pop nullptr and padding
270 ".cfi_adjust_cfa_offset -16\n\t"
271 : "=a" (result)
272 // Use the result from rax
Andreas Gampe2f6e3512014-06-07 01:32:33 -0700273 : "D"(arg0), "S"(arg1), "d"(arg2), "a"(code), [referrer] "m"(referrer)
274 // This places arg0 into rdi, arg1 into rsi, arg2 into rdx, and code into rax
275 : "rbx", "rcx", "rbp", "r8", "r9", "r10", "r11", "r12", "r13", "r14", "r15",
276 "memory"); // clobber all
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700277 // TODO: Should we clobber the other registers?
278#else
279 LOG(WARNING) << "Was asked to invoke for an architecture I do not understand.";
280 result = 0;
281#endif
282 // Pop transition.
283 self->PopManagedStackFragment(fragment);
Andreas Gampe6cf80102014-05-19 11:32:41 -0700284
285 fp_result = fpr_result;
286 EXPECT_EQ(0U, fp_result);
287
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700288 return result;
289 }
290
Andreas Gampe51f76352014-05-21 08:28:48 -0700291 // TODO: Set up a frame according to referrer's specs.
292 size_t Invoke3WithReferrerAndHidden(size_t arg0, size_t arg1, size_t arg2, uintptr_t code,
293 Thread* self, mirror::ArtMethod* referrer, size_t hidden) {
294 // Push a transition back into managed code onto the linked list in thread.
295 ManagedStack fragment;
296 self->PushManagedStackFragment(&fragment);
297
298 size_t result;
299 size_t fpr_result = 0;
300#if defined(__i386__)
301 // TODO: Set the thread?
302 __asm__ __volatile__(
303 "movd %[hidden], %%xmm0\n\t"
Ian Rogersc5f17732014-06-05 20:48:42 -0700304 "subl $12, %%esp\n\t" // Align stack.
Andreas Gampe51f76352014-05-21 08:28:48 -0700305 "pushl %[referrer]\n\t" // Store referrer
306 "call *%%edi\n\t" // Call the stub
Ian Rogersc5f17732014-06-05 20:48:42 -0700307 "addl $16, %%esp" // Pop referrer
Andreas Gampe51f76352014-05-21 08:28:48 -0700308 : "=a" (result)
309 // Use the result from eax
Andreas Gampe2f6e3512014-06-07 01:32:33 -0700310 : "a"(arg0), "c"(arg1), "d"(arg2), "D"(code), [referrer]"m"(referrer), [hidden]"r"(hidden)
311 // This places code into edi, arg0 into eax, arg1 into ecx, and arg2 into edx
312 : "memory"); // clobber.
Andreas Gampe51f76352014-05-21 08:28:48 -0700313 // TODO: Should we clobber the other registers? EBX gets clobbered by some of the stubs,
314 // but compilation fails when declaring that.
315#elif defined(__arm__)
316 __asm__ __volatile__(
317 "push {r1-r12, lr}\n\t" // Save state, 13*4B = 52B
318 ".cfi_adjust_cfa_offset 52\n\t"
319 "push {r9}\n\t"
320 ".cfi_adjust_cfa_offset 4\n\t"
321 "mov r9, %[referrer]\n\n"
322 "str r9, [sp, #-8]!\n\t" // Push referrer, +8B padding so 16B aligned
323 ".cfi_adjust_cfa_offset 8\n\t"
324 "ldr r9, [sp, #8]\n\t"
325
326 // Push everything on the stack, so we don't rely on the order. What a mess. :-(
327 "sub sp, sp, #24\n\t"
328 "str %[arg0], [sp]\n\t"
329 "str %[arg1], [sp, #4]\n\t"
330 "str %[arg2], [sp, #8]\n\t"
331 "str %[code], [sp, #12]\n\t"
332 "str %[self], [sp, #16]\n\t"
333 "str %[hidden], [sp, #20]\n\t"
334 "ldr r0, [sp]\n\t"
335 "ldr r1, [sp, #4]\n\t"
336 "ldr r2, [sp, #8]\n\t"
337 "ldr r3, [sp, #12]\n\t"
338 "ldr r9, [sp, #16]\n\t"
339 "ldr r12, [sp, #20]\n\t"
340 "add sp, sp, #24\n\t"
341
342 "blx r3\n\t" // Call the stub
343 "add sp, sp, #12\n\t" // Pop nullptr and padding
344 ".cfi_adjust_cfa_offset -12\n\t"
345 "pop {r1-r12, lr}\n\t" // Restore state
346 ".cfi_adjust_cfa_offset -52\n\t"
347 "mov %[result], r0\n\t" // Save the result
348 : [result] "=r" (result)
349 // Use the result from r0
Andreas Gampe2f6e3512014-06-07 01:32:33 -0700350 : [arg0] "r"(arg0), [arg1] "r"(arg1), [arg2] "r"(arg2), [code] "r"(code), [self] "r"(self),
351 [referrer] "r"(referrer), [hidden] "r"(hidden)
352 : "memory"); // clobber.
Andreas Gampe51f76352014-05-21 08:28:48 -0700353#elif defined(__aarch64__)
354 __asm__ __volatile__(
Andreas Gampef39b3782014-06-03 14:38:30 -0700355 // Spill x0-x7 which we say we don't clobber. May contain args.
Andreas Gampe51f76352014-05-21 08:28:48 -0700356 "sub sp, sp, #64\n\t"
357 ".cfi_adjust_cfa_offset 64\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700358 "stp x0, x1, [sp]\n\t"
359 "stp x2, x3, [sp, #16]\n\t"
360 "stp x4, x5, [sp, #32]\n\t"
361 "stp x6, x7, [sp, #48]\n\t"
Andreas Gampe51f76352014-05-21 08:28:48 -0700362
Andreas Gampef39b3782014-06-03 14:38:30 -0700363 "sub sp, sp, #16\n\t" // Reserve stack space, 16B aligned
364 ".cfi_adjust_cfa_offset 16\n\t"
365 "str %[referrer], [sp]\n\t" // referrer
Andreas Gampe51f76352014-05-21 08:28:48 -0700366
367 // Push everything on the stack, so we don't rely on the order. What a mess. :-(
368 "sub sp, sp, #48\n\t"
369 ".cfi_adjust_cfa_offset 48\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700370 // All things are "r" constraints, so direct str/stp should work.
371 "stp %[arg0], %[arg1], [sp]\n\t"
372 "stp %[arg2], %[code], [sp, #16]\n\t"
373 "stp %[self], %[hidden], [sp, #32]\n\t"
Andreas Gampe51f76352014-05-21 08:28:48 -0700374
375 // Now we definitely have x0-x3 free, use it to garble d8 - d15
376 "movk x0, #0xfad0\n\t"
377 "movk x0, #0xebad, lsl #16\n\t"
378 "movk x0, #0xfad0, lsl #32\n\t"
379 "movk x0, #0xebad, lsl #48\n\t"
380 "fmov d8, x0\n\t"
381 "add x0, x0, 1\n\t"
382 "fmov d9, x0\n\t"
383 "add x0, x0, 1\n\t"
384 "fmov d10, x0\n\t"
385 "add x0, x0, 1\n\t"
386 "fmov d11, x0\n\t"
387 "add x0, x0, 1\n\t"
388 "fmov d12, x0\n\t"
389 "add x0, x0, 1\n\t"
390 "fmov d13, x0\n\t"
391 "add x0, x0, 1\n\t"
392 "fmov d14, x0\n\t"
393 "add x0, x0, 1\n\t"
394 "fmov d15, x0\n\t"
395
Andreas Gampef39b3782014-06-03 14:38:30 -0700396 // Load call params into the right registers.
397 "ldp x0, x1, [sp]\n\t"
398 "ldp x2, x3, [sp, #16]\n\t"
399 "ldp x18, x12, [sp, #32]\n\t"
Andreas Gampe51f76352014-05-21 08:28:48 -0700400 "add sp, sp, #48\n\t"
401 ".cfi_adjust_cfa_offset -48\n\t"
402
Andreas Gampe51f76352014-05-21 08:28:48 -0700403 "blr x3\n\t" // Call the stub
Andreas Gampef39b3782014-06-03 14:38:30 -0700404 "mov x8, x0\n\t" // Store result
405 "add sp, sp, #16\n\t" // Drop the quick "frame"
406 ".cfi_adjust_cfa_offset -16\n\t"
Andreas Gampe51f76352014-05-21 08:28:48 -0700407
408 // Test d8 - d15. We can use x1 and x2.
409 "movk x1, #0xfad0\n\t"
410 "movk x1, #0xebad, lsl #16\n\t"
411 "movk x1, #0xfad0, lsl #32\n\t"
412 "movk x1, #0xebad, lsl #48\n\t"
413 "fmov x2, d8\n\t"
414 "cmp x1, x2\n\t"
415 "b.ne 1f\n\t"
416 "add x1, x1, 1\n\t"
417
418 "fmov x2, d9\n\t"
419 "cmp x1, x2\n\t"
420 "b.ne 1f\n\t"
421 "add x1, x1, 1\n\t"
422
423 "fmov x2, d10\n\t"
424 "cmp x1, x2\n\t"
425 "b.ne 1f\n\t"
426 "add x1, x1, 1\n\t"
427
428 "fmov x2, d11\n\t"
429 "cmp x1, x2\n\t"
430 "b.ne 1f\n\t"
431 "add x1, x1, 1\n\t"
432
433 "fmov x2, d12\n\t"
434 "cmp x1, x2\n\t"
435 "b.ne 1f\n\t"
436 "add x1, x1, 1\n\t"
437
438 "fmov x2, d13\n\t"
439 "cmp x1, x2\n\t"
440 "b.ne 1f\n\t"
441 "add x1, x1, 1\n\t"
442
443 "fmov x2, d14\n\t"
444 "cmp x1, x2\n\t"
445 "b.ne 1f\n\t"
446 "add x1, x1, 1\n\t"
447
448 "fmov x2, d15\n\t"
449 "cmp x1, x2\n\t"
450 "b.ne 1f\n\t"
451
Andreas Gampef39b3782014-06-03 14:38:30 -0700452 "mov x9, #0\n\t" // Use x9 as flag, in clobber list
Andreas Gampe51f76352014-05-21 08:28:48 -0700453
454 // Finish up.
455 "2:\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700456 "ldp x0, x1, [sp]\n\t" // Restore stuff not named clobbered, may contain fpr_result
457 "ldp x2, x3, [sp, #16]\n\t"
458 "ldp x4, x5, [sp, #32]\n\t"
459 "ldp x6, x7, [sp, #48]\n\t"
460 "add sp, sp, #64\n\t" // Free stack space, now sp as on entry
Andreas Gampe51f76352014-05-21 08:28:48 -0700461 ".cfi_adjust_cfa_offset -64\n\t"
462
Andreas Gampef39b3782014-06-03 14:38:30 -0700463 "str x9, %[fpr_result]\n\t" // Store the FPR comparison result
464 "mov %[result], x8\n\t" // Store the call result
465
Andreas Gampe51f76352014-05-21 08:28:48 -0700466 "b 3f\n\t" // Goto end
467
468 // Failed fpr verification.
469 "1:\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700470 "mov x9, #1\n\t"
Andreas Gampe51f76352014-05-21 08:28:48 -0700471 "b 2b\n\t" // Goto finish-up
472
473 // End
474 "3:\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700475 : [result] "=r" (result)
476 // Use the result from r0
Andreas Gampe51f76352014-05-21 08:28:48 -0700477 : [arg0] "0"(arg0), [arg1] "r"(arg1), [arg2] "r"(arg2), [code] "r"(code), [self] "r"(self),
Andreas Gampef39b3782014-06-03 14:38:30 -0700478 [referrer] "r"(referrer), [hidden] "r"(hidden), [fpr_result] "m" (fpr_result)
479 : "x8", "x9", "x10", "x11", "x12", "x13", "x14", "x15", "x16", "x17", "x18", "x19", "x20",
480 "x21", "x22", "x23", "x24", "x25", "x26", "x27", "x28", "x30",
481 "d0", "d1", "d2", "d3", "d4", "d5", "d6", "d7",
482 "d8", "d9", "d10", "d11", "d12", "d13", "d14", "d15",
483 "d16", "d17", "d18", "d19", "d20", "d21", "d22", "d23",
Andreas Gampe2f6e3512014-06-07 01:32:33 -0700484 "d24", "d25", "d26", "d27", "d28", "d29", "d30", "d31",
485 "memory"); // clobber.
Ian Rogersc3ccc102014-06-25 11:52:14 -0700486#elif defined(__x86_64__) && !defined(__APPLE__)
Andreas Gampe51f76352014-05-21 08:28:48 -0700487 // Note: Uses the native convention
488 // TODO: Set the thread?
489 __asm__ __volatile__(
490 "movq %[hidden], %%r9\n\t" // No need to save r9, listed as clobbered
491 "movd %%r9, %%xmm0\n\t"
492 "pushq %[referrer]\n\t" // Push referrer
493 "pushq (%%rsp)\n\t" // & 16B alignment padding
494 ".cfi_adjust_cfa_offset 16\n\t"
495 "call *%%rax\n\t" // Call the stub
496 "addq $16, %%rsp\n\t" // Pop nullptr and padding
497 ".cfi_adjust_cfa_offset -16\n\t"
498 : "=a" (result)
499 // Use the result from rax
500 : "D"(arg0), "S"(arg1), "d"(arg2), "a"(code), [referrer] "m"(referrer), [hidden] "m"(hidden)
501 // This places arg0 into rdi, arg1 into rsi, arg2 into rdx, and code into rax
Andreas Gampe2f6e3512014-06-07 01:32:33 -0700502 : "rbx", "rcx", "rbp", "r8", "r9", "r10", "r11", "r12", "r13", "r14", "r15",
503 "memory"); // clobber all
Andreas Gampe51f76352014-05-21 08:28:48 -0700504 // TODO: Should we clobber the other registers?
505#else
506 LOG(WARNING) << "Was asked to invoke for an architecture I do not understand.";
507 result = 0;
508#endif
509 // Pop transition.
510 self->PopManagedStackFragment(fragment);
511
512 fp_result = fpr_result;
513 EXPECT_EQ(0U, fp_result);
514
515 return result;
516 }
517
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700518 // Method with 32b arg0, 64b arg1
519 size_t Invoke3UWithReferrer(size_t arg0, uint64_t arg1, uintptr_t code, Thread* self,
520 mirror::ArtMethod* referrer) {
Ian Rogersc3ccc102014-06-25 11:52:14 -0700521#if (defined(__x86_64__) && !defined(__APPLE__)) || defined(__aarch64__)
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700522 // Just pass through.
523 return Invoke3WithReferrer(arg0, arg1, 0U, code, self, referrer);
524#else
525 // Need to split up arguments.
526 uint32_t lower = static_cast<uint32_t>(arg1 & 0xFFFFFFFF);
527 uint32_t upper = static_cast<uint32_t>((arg1 >> 32) & 0xFFFFFFFF);
528
529 return Invoke3WithReferrer(arg0, lower, upper, code, self, referrer);
530#endif
531 }
532
533 // Method with 32b arg0, 32b arg1, 64b arg2
534 size_t Invoke3UUWithReferrer(uint32_t arg0, uint32_t arg1, uint64_t arg2, uintptr_t code,
535 Thread* self, mirror::ArtMethod* referrer) {
Ian Rogersc3ccc102014-06-25 11:52:14 -0700536#if (defined(__x86_64__) && !defined(__APPLE__)) || defined(__aarch64__)
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700537 // Just pass through.
538 return Invoke3WithReferrer(arg0, arg1, arg2, code, self, referrer);
539#else
540 // TODO: Needs 4-param invoke.
541 return 0;
542#endif
543 }
Andreas Gampe6cf80102014-05-19 11:32:41 -0700544
545 protected:
546 size_t fp_result;
Andreas Gampe525cde22014-04-22 15:44:50 -0700547};
548
549
Ian Rogersc3ccc102014-06-25 11:52:14 -0700550#if defined(__i386__) || (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe525cde22014-04-22 15:44:50 -0700551extern "C" void art_quick_memcpy(void);
552#endif
553
554TEST_F(StubTest, Memcpy) {
Ian Rogersc3ccc102014-06-25 11:52:14 -0700555#if defined(__i386__) || (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe525cde22014-04-22 15:44:50 -0700556 Thread* self = Thread::Current();
557
558 uint32_t orig[20];
559 uint32_t trg[20];
560 for (size_t i = 0; i < 20; ++i) {
561 orig[i] = i;
562 trg[i] = 0;
563 }
564
565 Invoke3(reinterpret_cast<size_t>(&trg[4]), reinterpret_cast<size_t>(&orig[4]),
566 10 * sizeof(uint32_t), reinterpret_cast<uintptr_t>(&art_quick_memcpy), self);
567
568 EXPECT_EQ(orig[0], trg[0]);
569
570 for (size_t i = 1; i < 4; ++i) {
571 EXPECT_NE(orig[i], trg[i]);
572 }
573
574 for (size_t i = 4; i < 14; ++i) {
575 EXPECT_EQ(orig[i], trg[i]);
576 }
577
578 for (size_t i = 14; i < 20; ++i) {
579 EXPECT_NE(orig[i], trg[i]);
580 }
581
582 // TODO: Test overlapping?
583
584#else
585 LOG(INFO) << "Skipping memcpy as I don't know how to do that on " << kRuntimeISA;
586 // Force-print to std::cout so it's also outside the logcat.
587 std::cout << "Skipping memcpy as I don't know how to do that on " << kRuntimeISA << std::endl;
588#endif
589}
590
Ian Rogersc3ccc102014-06-25 11:52:14 -0700591#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe525cde22014-04-22 15:44:50 -0700592extern "C" void art_quick_lock_object(void);
593#endif
594
595TEST_F(StubTest, LockObject) {
Ian Rogersc3ccc102014-06-25 11:52:14 -0700596#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -0700597 static constexpr size_t kThinLockLoops = 100;
598
Andreas Gampe525cde22014-04-22 15:44:50 -0700599 Thread* self = Thread::Current();
600 // Create an object
601 ScopedObjectAccess soa(self);
602 // garbage is created during ClassLinker::Init
603
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700604 StackHandleScope<2> hs(soa.Self());
605 Handle<mirror::String> obj(
606 hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), "hello, world!")));
Andreas Gampe525cde22014-04-22 15:44:50 -0700607 LockWord lock = obj->GetLockWord(false);
608 LockWord::LockState old_state = lock.GetState();
609 EXPECT_EQ(LockWord::LockState::kUnlocked, old_state);
610
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700611 Invoke3(reinterpret_cast<size_t>(obj.Get()), 0U, 0U,
Andreas Gampe525cde22014-04-22 15:44:50 -0700612 reinterpret_cast<uintptr_t>(&art_quick_lock_object), self);
613
614 LockWord lock_after = obj->GetLockWord(false);
615 LockWord::LockState new_state = lock_after.GetState();
616 EXPECT_EQ(LockWord::LockState::kThinLocked, new_state);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700617 EXPECT_EQ(lock_after.ThinLockCount(), 0U); // Thin lock starts count at zero
618
619 for (size_t i = 1; i < kThinLockLoops; ++i) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700620 Invoke3(reinterpret_cast<size_t>(obj.Get()), 0U, 0U,
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700621 reinterpret_cast<uintptr_t>(&art_quick_lock_object), self);
622
623 // Check we're at lock count i
624
625 LockWord l_inc = obj->GetLockWord(false);
626 LockWord::LockState l_inc_state = l_inc.GetState();
627 EXPECT_EQ(LockWord::LockState::kThinLocked, l_inc_state);
628 EXPECT_EQ(l_inc.ThinLockCount(), i);
629 }
630
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700631 // Force a fat lock by running identity hashcode to fill up lock word.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700632 Handle<mirror::String> obj2(hs.NewHandle(
633 mirror::String::AllocFromModifiedUtf8(soa.Self(), "hello, world!")));
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700634
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700635 obj2->IdentityHashCode();
636
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700637 Invoke3(reinterpret_cast<size_t>(obj2.Get()), 0U, 0U,
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700638 reinterpret_cast<uintptr_t>(&art_quick_lock_object), self);
639
640 LockWord lock_after2 = obj2->GetLockWord(false);
641 LockWord::LockState new_state2 = lock_after2.GetState();
642 EXPECT_EQ(LockWord::LockState::kFatLocked, new_state2);
643 EXPECT_NE(lock_after2.FatLockMonitor(), static_cast<Monitor*>(nullptr));
644
645 // Test done.
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700646#else
647 LOG(INFO) << "Skipping lock_object as I don't know how to do that on " << kRuntimeISA;
648 // Force-print to std::cout so it's also outside the logcat.
649 std::cout << "Skipping lock_object as I don't know how to do that on " << kRuntimeISA << std::endl;
650#endif
651}
652
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700653
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700654class RandGen {
655 public:
656 explicit RandGen(uint32_t seed) : val_(seed) {}
657
658 uint32_t next() {
659 val_ = val_ * 48271 % 2147483647 + 13;
660 return val_;
661 }
662
663 uint32_t val_;
664};
665
666
Ian Rogersc3ccc102014-06-25 11:52:14 -0700667#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700668extern "C" void art_quick_lock_object(void);
669extern "C" void art_quick_unlock_object(void);
670#endif
671
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700672// NO_THREAD_SAFETY_ANALYSIS as we do not want to grab exclusive mutator lock for MonitorInfo.
673static void TestUnlockObject(StubTest* test) NO_THREAD_SAFETY_ANALYSIS {
Ian Rogersc3ccc102014-06-25 11:52:14 -0700674#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -0700675 static constexpr size_t kThinLockLoops = 100;
676
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700677 Thread* self = Thread::Current();
678 // Create an object
679 ScopedObjectAccess soa(self);
680 // garbage is created during ClassLinker::Init
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700681 static constexpr size_t kNumberOfLocks = 10; // Number of objects = lock
682 StackHandleScope<kNumberOfLocks + 1> hs(self);
683 Handle<mirror::String> obj(
684 hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), "hello, world!")));
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700685 LockWord lock = obj->GetLockWord(false);
686 LockWord::LockState old_state = lock.GetState();
687 EXPECT_EQ(LockWord::LockState::kUnlocked, old_state);
688
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700689 test->Invoke3(reinterpret_cast<size_t>(obj.Get()), 0U, 0U,
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700690 reinterpret_cast<uintptr_t>(&art_quick_unlock_object), self);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700691 // This should be an illegal monitor state.
692 EXPECT_TRUE(self->IsExceptionPending());
693 self->ClearException();
694
695 LockWord lock_after = obj->GetLockWord(false);
696 LockWord::LockState new_state = lock_after.GetState();
697 EXPECT_EQ(LockWord::LockState::kUnlocked, new_state);
Andreas Gampe525cde22014-04-22 15:44:50 -0700698
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700699 test->Invoke3(reinterpret_cast<size_t>(obj.Get()), 0U, 0U,
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700700 reinterpret_cast<uintptr_t>(&art_quick_lock_object), self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700701
702 LockWord lock_after2 = obj->GetLockWord(false);
703 LockWord::LockState new_state2 = lock_after2.GetState();
704 EXPECT_EQ(LockWord::LockState::kThinLocked, new_state2);
705
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700706 test->Invoke3(reinterpret_cast<size_t>(obj.Get()), 0U, 0U,
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700707 reinterpret_cast<uintptr_t>(&art_quick_unlock_object), self);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700708
709 LockWord lock_after3 = obj->GetLockWord(false);
710 LockWord::LockState new_state3 = lock_after3.GetState();
711 EXPECT_EQ(LockWord::LockState::kUnlocked, new_state3);
712
713 // Stress test:
714 // Keep a number of objects and their locks in flight. Randomly lock or unlock one of them in
715 // each step.
716
717 RandGen r(0x1234);
718
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700719 constexpr size_t kIterations = 10000; // Number of iterations
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700720 constexpr size_t kMoveToFat = 1000; // Chance of 1:kMoveFat to make a lock fat.
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700721
722 size_t counts[kNumberOfLocks];
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700723 bool fat[kNumberOfLocks]; // Whether a lock should be thin or fat.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700724 Handle<mirror::String> objects[kNumberOfLocks];
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700725
726 // Initialize = allocate.
727 for (size_t i = 0; i < kNumberOfLocks; ++i) {
728 counts[i] = 0;
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700729 fat[i] = false;
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700730 objects[i] = hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), ""));
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700731 }
732
733 for (size_t i = 0; i < kIterations; ++i) {
734 // Select which lock to update.
735 size_t index = r.next() % kNumberOfLocks;
736
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700737 // Make lock fat?
738 if (!fat[index] && (r.next() % kMoveToFat == 0)) {
739 fat[index] = true;
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700740 objects[index]->IdentityHashCode();
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700741
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700742 LockWord lock_iter = objects[index]->GetLockWord(false);
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700743 LockWord::LockState iter_state = lock_iter.GetState();
744 if (counts[index] == 0) {
745 EXPECT_EQ(LockWord::LockState::kHashCode, iter_state);
746 } else {
747 EXPECT_EQ(LockWord::LockState::kFatLocked, iter_state);
748 }
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700749 } else {
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700750 bool lock; // Whether to lock or unlock in this step.
751 if (counts[index] == 0) {
752 lock = true;
753 } else if (counts[index] == kThinLockLoops) {
754 lock = false;
755 } else {
756 // Randomly.
757 lock = r.next() % 2 == 0;
758 }
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700759
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700760 if (lock) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700761 test->Invoke3(reinterpret_cast<size_t>(objects[index].Get()), 0U, 0U,
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700762 reinterpret_cast<uintptr_t>(&art_quick_lock_object), self);
763 counts[index]++;
764 } else {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700765 test->Invoke3(reinterpret_cast<size_t>(objects[index].Get()), 0U, 0U,
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700766 reinterpret_cast<uintptr_t>(&art_quick_unlock_object), self);
767 counts[index]--;
768 }
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700769
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700770 EXPECT_FALSE(self->IsExceptionPending());
771
772 // Check the new state.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700773 LockWord lock_iter = objects[index]->GetLockWord(true);
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700774 LockWord::LockState iter_state = lock_iter.GetState();
775 if (fat[index]) {
776 // Abuse MonitorInfo.
777 EXPECT_EQ(LockWord::LockState::kFatLocked, iter_state) << index;
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700778 MonitorInfo info(objects[index].Get());
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700779 EXPECT_EQ(counts[index], info.entry_count_) << index;
780 } else {
781 if (counts[index] > 0) {
782 EXPECT_EQ(LockWord::LockState::kThinLocked, iter_state);
783 EXPECT_EQ(counts[index] - 1, lock_iter.ThinLockCount());
784 } else {
785 EXPECT_EQ(LockWord::LockState::kUnlocked, iter_state);
786 }
787 }
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700788 }
789 }
790
791 // Unlock the remaining count times and then check it's unlocked. Then deallocate.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700792 // Go reverse order to correctly handle Handles.
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700793 for (size_t i = 0; i < kNumberOfLocks; ++i) {
794 size_t index = kNumberOfLocks - 1 - i;
795 size_t count = counts[index];
796 while (count > 0) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700797 test->Invoke3(reinterpret_cast<size_t>(objects[index].Get()), 0U, 0U,
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700798 reinterpret_cast<uintptr_t>(&art_quick_unlock_object), self);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700799 count--;
800 }
801
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700802 LockWord lock_after4 = objects[index]->GetLockWord(false);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700803 LockWord::LockState new_state4 = lock_after4.GetState();
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700804 EXPECT_TRUE(LockWord::LockState::kUnlocked == new_state4
805 || LockWord::LockState::kFatLocked == new_state4);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700806 }
807
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700808 // Test done.
Andreas Gampe525cde22014-04-22 15:44:50 -0700809#else
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700810 LOG(INFO) << "Skipping unlock_object as I don't know how to do that on " << kRuntimeISA;
Andreas Gampe525cde22014-04-22 15:44:50 -0700811 // Force-print to std::cout so it's also outside the logcat.
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700812 std::cout << "Skipping unlock_object as I don't know how to do that on " << kRuntimeISA << std::endl;
Andreas Gampe525cde22014-04-22 15:44:50 -0700813#endif
814}
815
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700816TEST_F(StubTest, UnlockObject) {
817 TestUnlockObject(this);
818}
Andreas Gampe525cde22014-04-22 15:44:50 -0700819
Ian Rogersc3ccc102014-06-25 11:52:14 -0700820#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe525cde22014-04-22 15:44:50 -0700821extern "C" void art_quick_check_cast(void);
822#endif
823
824TEST_F(StubTest, CheckCast) {
Ian Rogersc3ccc102014-06-25 11:52:14 -0700825#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe525cde22014-04-22 15:44:50 -0700826 Thread* self = Thread::Current();
827 // Find some classes.
828 ScopedObjectAccess soa(self);
829 // garbage is created during ClassLinker::Init
830
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700831 StackHandleScope<2> hs(soa.Self());
832 Handle<mirror::Class> c(
833 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "[Ljava/lang/Object;")));
834 Handle<mirror::Class> c2(
835 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "[Ljava/lang/String;")));
Andreas Gampe525cde22014-04-22 15:44:50 -0700836
837 EXPECT_FALSE(self->IsExceptionPending());
838
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700839 Invoke3(reinterpret_cast<size_t>(c.Get()), reinterpret_cast<size_t>(c.Get()), 0U,
Andreas Gampe525cde22014-04-22 15:44:50 -0700840 reinterpret_cast<uintptr_t>(&art_quick_check_cast), self);
841
842 EXPECT_FALSE(self->IsExceptionPending());
843
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700844 Invoke3(reinterpret_cast<size_t>(c2.Get()), reinterpret_cast<size_t>(c2.Get()), 0U,
Andreas Gampe525cde22014-04-22 15:44:50 -0700845 reinterpret_cast<uintptr_t>(&art_quick_check_cast), self);
846
847 EXPECT_FALSE(self->IsExceptionPending());
848
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700849 Invoke3(reinterpret_cast<size_t>(c.Get()), reinterpret_cast<size_t>(c2.Get()), 0U,
Andreas Gampe525cde22014-04-22 15:44:50 -0700850 reinterpret_cast<uintptr_t>(&art_quick_check_cast), self);
851
852 EXPECT_FALSE(self->IsExceptionPending());
853
854 // TODO: Make the following work. But that would require correct managed frames.
855
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700856 Invoke3(reinterpret_cast<size_t>(c2.Get()), reinterpret_cast<size_t>(c.Get()), 0U,
Andreas Gampe525cde22014-04-22 15:44:50 -0700857 reinterpret_cast<uintptr_t>(&art_quick_check_cast), self);
858
859 EXPECT_TRUE(self->IsExceptionPending());
860 self->ClearException();
861
862#else
863 LOG(INFO) << "Skipping check_cast as I don't know how to do that on " << kRuntimeISA;
864 // Force-print to std::cout so it's also outside the logcat.
865 std::cout << "Skipping check_cast as I don't know how to do that on " << kRuntimeISA << std::endl;
866#endif
867}
868
869
Ian Rogersc3ccc102014-06-25 11:52:14 -0700870#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe525cde22014-04-22 15:44:50 -0700871extern "C" void art_quick_aput_obj_with_null_and_bound_check(void);
872// Do not check non-checked ones, we'd need handlers and stuff...
873#endif
874
875TEST_F(StubTest, APutObj) {
Hiroshi Yamauchid6881ae2014-04-28 17:21:48 -0700876 TEST_DISABLED_FOR_HEAP_REFERENCE_POISONING();
877
Ian Rogersc3ccc102014-06-25 11:52:14 -0700878#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe525cde22014-04-22 15:44:50 -0700879 Thread* self = Thread::Current();
880 // Create an object
881 ScopedObjectAccess soa(self);
882 // garbage is created during ClassLinker::Init
883
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700884 StackHandleScope<5> hs(soa.Self());
885 Handle<mirror::Class> c(
886 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "Ljava/lang/Object;")));
887 Handle<mirror::Class> ca(
888 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "[Ljava/lang/String;")));
Andreas Gampe525cde22014-04-22 15:44:50 -0700889
890 // Build a string array of size 1
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700891 Handle<mirror::ObjectArray<mirror::Object>> array(
892 hs.NewHandle(mirror::ObjectArray<mirror::Object>::Alloc(soa.Self(), ca.Get(), 10)));
Andreas Gampe525cde22014-04-22 15:44:50 -0700893
894 // Build a string -> should be assignable
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700895 Handle<mirror::String> str_obj(
896 hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), "hello, world!")));
Andreas Gampe525cde22014-04-22 15:44:50 -0700897
898 // Build a generic object -> should fail assigning
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700899 Handle<mirror::Object> obj_obj(hs.NewHandle(c->AllocObject(soa.Self())));
Andreas Gampe525cde22014-04-22 15:44:50 -0700900
901 // Play with it...
902
903 // 1) Success cases
Andreas Gampef4e910b2014-04-29 16:55:52 -0700904 // 1.1) Assign str_obj to array[0..3]
Andreas Gampe525cde22014-04-22 15:44:50 -0700905
906 EXPECT_FALSE(self->IsExceptionPending());
907
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700908 Invoke3(reinterpret_cast<size_t>(array.Get()), 0U, reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampe525cde22014-04-22 15:44:50 -0700909 reinterpret_cast<uintptr_t>(&art_quick_aput_obj_with_null_and_bound_check), self);
910
911 EXPECT_FALSE(self->IsExceptionPending());
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700912 EXPECT_EQ(str_obj.Get(), array->Get(0));
Andreas Gampe525cde22014-04-22 15:44:50 -0700913
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700914 Invoke3(reinterpret_cast<size_t>(array.Get()), 1U, reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampef4e910b2014-04-29 16:55:52 -0700915 reinterpret_cast<uintptr_t>(&art_quick_aput_obj_with_null_and_bound_check), self);
916
917 EXPECT_FALSE(self->IsExceptionPending());
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700918 EXPECT_EQ(str_obj.Get(), array->Get(1));
Andreas Gampef4e910b2014-04-29 16:55:52 -0700919
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700920 Invoke3(reinterpret_cast<size_t>(array.Get()), 2U, reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampef4e910b2014-04-29 16:55:52 -0700921 reinterpret_cast<uintptr_t>(&art_quick_aput_obj_with_null_and_bound_check), self);
922
923 EXPECT_FALSE(self->IsExceptionPending());
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700924 EXPECT_EQ(str_obj.Get(), array->Get(2));
Andreas Gampef4e910b2014-04-29 16:55:52 -0700925
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700926 Invoke3(reinterpret_cast<size_t>(array.Get()), 3U, reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampef4e910b2014-04-29 16:55:52 -0700927 reinterpret_cast<uintptr_t>(&art_quick_aput_obj_with_null_and_bound_check), self);
928
929 EXPECT_FALSE(self->IsExceptionPending());
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700930 EXPECT_EQ(str_obj.Get(), array->Get(3));
Andreas Gampef4e910b2014-04-29 16:55:52 -0700931
932 // 1.2) Assign null to array[0..3]
Andreas Gampe525cde22014-04-22 15:44:50 -0700933
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700934 Invoke3(reinterpret_cast<size_t>(array.Get()), 0U, reinterpret_cast<size_t>(nullptr),
Andreas Gampe525cde22014-04-22 15:44:50 -0700935 reinterpret_cast<uintptr_t>(&art_quick_aput_obj_with_null_and_bound_check), self);
936
937 EXPECT_FALSE(self->IsExceptionPending());
Andreas Gampef4e910b2014-04-29 16:55:52 -0700938 EXPECT_EQ(nullptr, array->Get(0));
939
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700940 Invoke3(reinterpret_cast<size_t>(array.Get()), 1U, reinterpret_cast<size_t>(nullptr),
Andreas Gampef4e910b2014-04-29 16:55:52 -0700941 reinterpret_cast<uintptr_t>(&art_quick_aput_obj_with_null_and_bound_check), self);
942
943 EXPECT_FALSE(self->IsExceptionPending());
944 EXPECT_EQ(nullptr, array->Get(1));
945
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700946 Invoke3(reinterpret_cast<size_t>(array.Get()), 2U, reinterpret_cast<size_t>(nullptr),
Andreas Gampef4e910b2014-04-29 16:55:52 -0700947 reinterpret_cast<uintptr_t>(&art_quick_aput_obj_with_null_and_bound_check), self);
948
949 EXPECT_FALSE(self->IsExceptionPending());
950 EXPECT_EQ(nullptr, array->Get(2));
951
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700952 Invoke3(reinterpret_cast<size_t>(array.Get()), 3U, reinterpret_cast<size_t>(nullptr),
Andreas Gampef4e910b2014-04-29 16:55:52 -0700953 reinterpret_cast<uintptr_t>(&art_quick_aput_obj_with_null_and_bound_check), self);
954
955 EXPECT_FALSE(self->IsExceptionPending());
956 EXPECT_EQ(nullptr, array->Get(3));
Andreas Gampe525cde22014-04-22 15:44:50 -0700957
958 // TODO: Check _which_ exception is thrown. Then make 3) check that it's the right check order.
959
960 // 2) Failure cases (str into str[])
961 // 2.1) Array = null
962 // TODO: Throwing NPE needs actual DEX code
963
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700964// Invoke3(reinterpret_cast<size_t>(nullptr), 0U, reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampe525cde22014-04-22 15:44:50 -0700965// reinterpret_cast<uintptr_t>(&art_quick_aput_obj_with_null_and_bound_check), self);
966//
967// EXPECT_TRUE(self->IsExceptionPending());
968// self->ClearException();
969
970 // 2.2) Index < 0
971
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700972 Invoke3(reinterpret_cast<size_t>(array.Get()), static_cast<size_t>(-1),
973 reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampe525cde22014-04-22 15:44:50 -0700974 reinterpret_cast<uintptr_t>(&art_quick_aput_obj_with_null_and_bound_check), self);
975
976 EXPECT_TRUE(self->IsExceptionPending());
977 self->ClearException();
978
979 // 2.3) Index > 0
980
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700981 Invoke3(reinterpret_cast<size_t>(array.Get()), 10U, reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampe525cde22014-04-22 15:44:50 -0700982 reinterpret_cast<uintptr_t>(&art_quick_aput_obj_with_null_and_bound_check), self);
983
984 EXPECT_TRUE(self->IsExceptionPending());
985 self->ClearException();
986
987 // 3) Failure cases (obj into str[])
988
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700989 Invoke3(reinterpret_cast<size_t>(array.Get()), 0U, reinterpret_cast<size_t>(obj_obj.Get()),
Andreas Gampe525cde22014-04-22 15:44:50 -0700990 reinterpret_cast<uintptr_t>(&art_quick_aput_obj_with_null_and_bound_check), self);
991
992 EXPECT_TRUE(self->IsExceptionPending());
993 self->ClearException();
994
995 // Tests done.
996#else
997 LOG(INFO) << "Skipping aput_obj as I don't know how to do that on " << kRuntimeISA;
998 // Force-print to std::cout so it's also outside the logcat.
999 std::cout << "Skipping aput_obj as I don't know how to do that on " << kRuntimeISA << std::endl;
1000#endif
1001}
1002
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001003TEST_F(StubTest, AllocObject) {
1004 TEST_DISABLED_FOR_HEAP_REFERENCE_POISONING();
1005
Ian Rogersc3ccc102014-06-25 11:52:14 -07001006#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001007 // TODO: Check the "Unresolved" allocation stubs
1008
1009 Thread* self = Thread::Current();
1010 // Create an object
1011 ScopedObjectAccess soa(self);
1012 // garbage is created during ClassLinker::Init
1013
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001014 StackHandleScope<2> hs(soa.Self());
1015 Handle<mirror::Class> c(
1016 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "Ljava/lang/Object;")));
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001017
1018 // Play with it...
1019
1020 EXPECT_FALSE(self->IsExceptionPending());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001021 {
1022 // Use an arbitrary method from c to use as referrer
1023 size_t result = Invoke3(static_cast<size_t>(c->GetDexTypeIndex()), // type_idx
1024 reinterpret_cast<size_t>(c->GetVirtualMethod(0)), // arbitrary
1025 0U,
Mathieu Chartier119c6bd2014-05-09 14:11:47 -07001026 reinterpret_cast<uintptr_t>(GetTlsPtr(self)->quick_entrypoints.pAllocObject),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001027 self);
1028
1029 EXPECT_FALSE(self->IsExceptionPending());
1030 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
1031 mirror::Object* obj = reinterpret_cast<mirror::Object*>(result);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001032 EXPECT_EQ(c.Get(), obj->GetClass());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001033 VerifyObject(obj);
1034 }
1035
1036 {
1037 // We can use nullptr in the second argument as we do not need a method here (not used in
1038 // resolved/initialized cases)
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001039 size_t result = Invoke3(reinterpret_cast<size_t>(c.Get()), reinterpret_cast<size_t>(nullptr), 0U,
Mathieu Chartier119c6bd2014-05-09 14:11:47 -07001040 reinterpret_cast<uintptr_t>(GetTlsPtr(self)->quick_entrypoints.pAllocObjectResolved),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001041 self);
1042
1043 EXPECT_FALSE(self->IsExceptionPending());
1044 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
1045 mirror::Object* obj = reinterpret_cast<mirror::Object*>(result);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001046 EXPECT_EQ(c.Get(), obj->GetClass());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001047 VerifyObject(obj);
1048 }
1049
1050 {
1051 // We can use nullptr in the second argument as we do not need a method here (not used in
1052 // resolved/initialized cases)
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001053 size_t result = Invoke3(reinterpret_cast<size_t>(c.Get()), reinterpret_cast<size_t>(nullptr), 0U,
Mathieu Chartier119c6bd2014-05-09 14:11:47 -07001054 reinterpret_cast<uintptr_t>(GetTlsPtr(self)->quick_entrypoints.pAllocObjectInitialized),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001055 self);
1056
1057 EXPECT_FALSE(self->IsExceptionPending());
1058 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
1059 mirror::Object* obj = reinterpret_cast<mirror::Object*>(result);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001060 EXPECT_EQ(c.Get(), obj->GetClass());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001061 VerifyObject(obj);
1062 }
1063
1064 // Failure tests.
1065
1066 // Out-of-memory.
1067 {
1068 Runtime::Current()->GetHeap()->SetIdealFootprint(1 * GB);
1069
1070 // Array helps to fill memory faster.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001071 Handle<mirror::Class> ca(
1072 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "[Ljava/lang/Object;")));
1073
1074 // Use arbitrary large amount for now.
1075 static const size_t kMaxHandles = 1000000;
Ian Rogers700a4022014-05-19 16:49:03 -07001076 std::unique_ptr<StackHandleScope<kMaxHandles>> hsp(new StackHandleScope<kMaxHandles>(self));
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001077
1078 std::vector<Handle<mirror::Object>> handles;
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001079 // Start allocating with 128K
1080 size_t length = 128 * KB / 4;
1081 while (length > 10) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001082 Handle<mirror::Object> h(hsp->NewHandle<mirror::Object>(
1083 mirror::ObjectArray<mirror::Object>::Alloc(soa.Self(), ca.Get(), length / 4)));
1084 if (self->IsExceptionPending() || h.Get() == nullptr) {
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001085 self->ClearException();
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001086
1087 // Try a smaller length
1088 length = length / 8;
1089 // Use at most half the reported free space.
1090 size_t mem = Runtime::Current()->GetHeap()->GetFreeMemory();
1091 if (length * 8 > mem) {
1092 length = mem / 8;
1093 }
1094 } else {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001095 handles.push_back(h);
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001096 }
1097 }
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001098 LOG(INFO) << "Used " << handles.size() << " arrays to fill space.";
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001099
1100 // Allocate simple objects till it fails.
1101 while (!self->IsExceptionPending()) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001102 Handle<mirror::Object> h = hsp->NewHandle(c->AllocObject(soa.Self()));
1103 if (!self->IsExceptionPending() && h.Get() != nullptr) {
1104 handles.push_back(h);
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001105 }
1106 }
1107 self->ClearException();
1108
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001109 size_t result = Invoke3(reinterpret_cast<size_t>(c.Get()), reinterpret_cast<size_t>(nullptr), 0U,
Mathieu Chartier119c6bd2014-05-09 14:11:47 -07001110 reinterpret_cast<uintptr_t>(GetTlsPtr(self)->quick_entrypoints.pAllocObjectInitialized),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001111 self);
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001112 EXPECT_TRUE(self->IsExceptionPending());
1113 self->ClearException();
1114 EXPECT_EQ(reinterpret_cast<size_t>(nullptr), result);
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001115 }
1116
1117 // Tests done.
1118#else
1119 LOG(INFO) << "Skipping alloc_object as I don't know how to do that on " << kRuntimeISA;
1120 // Force-print to std::cout so it's also outside the logcat.
1121 std::cout << "Skipping alloc_object as I don't know how to do that on " << kRuntimeISA << std::endl;
1122#endif
1123}
1124
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001125TEST_F(StubTest, AllocObjectArray) {
1126 TEST_DISABLED_FOR_HEAP_REFERENCE_POISONING();
1127
Ian Rogersc3ccc102014-06-25 11:52:14 -07001128#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001129 // TODO: Check the "Unresolved" allocation stubs
1130
1131 Thread* self = Thread::Current();
1132 // Create an object
1133 ScopedObjectAccess soa(self);
1134 // garbage is created during ClassLinker::Init
1135
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001136 StackHandleScope<2> hs(self);
1137 Handle<mirror::Class> c(
1138 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "[Ljava/lang/Object;")));
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001139
1140 // Needed to have a linked method.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001141 Handle<mirror::Class> c_obj(
1142 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "Ljava/lang/Object;")));
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001143
1144 // Play with it...
1145
1146 EXPECT_FALSE(self->IsExceptionPending());
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001147
1148 // For some reason this does not work, as the type_idx is artificial and outside what the
1149 // resolved types of c_obj allow...
1150
1151 if (false) {
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001152 // Use an arbitrary method from c to use as referrer
1153 size_t result = Invoke3(static_cast<size_t>(c->GetDexTypeIndex()), // type_idx
1154 reinterpret_cast<size_t>(c_obj->GetVirtualMethod(0)), // arbitrary
1155 10U,
Mathieu Chartier119c6bd2014-05-09 14:11:47 -07001156 reinterpret_cast<uintptr_t>(GetTlsPtr(self)->quick_entrypoints.pAllocArray),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001157 self);
1158
1159 EXPECT_FALSE(self->IsExceptionPending());
1160 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
1161 mirror::Array* obj = reinterpret_cast<mirror::Array*>(result);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001162 EXPECT_EQ(c.Get(), obj->GetClass());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001163 VerifyObject(obj);
1164 EXPECT_EQ(obj->GetLength(), 10);
1165 }
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001166
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001167 {
1168 // We can use nullptr in the second argument as we do not need a method here (not used in
1169 // resolved/initialized cases)
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001170 size_t result = Invoke3(reinterpret_cast<size_t>(c.Get()), reinterpret_cast<size_t>(nullptr), 10U,
Mathieu Chartier119c6bd2014-05-09 14:11:47 -07001171 reinterpret_cast<uintptr_t>(GetTlsPtr(self)->quick_entrypoints.pAllocArrayResolved),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001172 self);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001173 EXPECT_FALSE(self->IsExceptionPending()) << PrettyTypeOf(self->GetException(nullptr));
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001174 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
1175 mirror::Object* obj = reinterpret_cast<mirror::Object*>(result);
1176 EXPECT_TRUE(obj->IsArrayInstance());
1177 EXPECT_TRUE(obj->IsObjectArray());
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001178 EXPECT_EQ(c.Get(), obj->GetClass());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001179 VerifyObject(obj);
1180 mirror::Array* array = reinterpret_cast<mirror::Array*>(result);
1181 EXPECT_EQ(array->GetLength(), 10);
1182 }
1183
1184 // Failure tests.
1185
1186 // Out-of-memory.
1187 {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001188 size_t result = Invoke3(reinterpret_cast<size_t>(c.Get()), reinterpret_cast<size_t>(nullptr),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001189 GB, // that should fail...
Mathieu Chartier119c6bd2014-05-09 14:11:47 -07001190 reinterpret_cast<uintptr_t>(GetTlsPtr(self)->quick_entrypoints.pAllocArrayResolved),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001191 self);
1192
1193 EXPECT_TRUE(self->IsExceptionPending());
1194 self->ClearException();
1195 EXPECT_EQ(reinterpret_cast<size_t>(nullptr), result);
1196 }
1197
1198 // Tests done.
1199#else
1200 LOG(INFO) << "Skipping alloc_array as I don't know how to do that on " << kRuntimeISA;
1201 // Force-print to std::cout so it's also outside the logcat.
1202 std::cout << "Skipping alloc_array as I don't know how to do that on " << kRuntimeISA << std::endl;
1203#endif
1204}
1205
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001206
Ian Rogersc3ccc102014-06-25 11:52:14 -07001207#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001208extern "C" void art_quick_string_compareto(void);
1209#endif
1210
1211TEST_F(StubTest, StringCompareTo) {
1212 TEST_DISABLED_FOR_HEAP_REFERENCE_POISONING();
1213
Ian Rogersc3ccc102014-06-25 11:52:14 -07001214#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001215 // TODO: Check the "Unresolved" allocation stubs
1216
1217 Thread* self = Thread::Current();
1218 ScopedObjectAccess soa(self);
1219 // garbage is created during ClassLinker::Init
1220
1221 // Create some strings
1222 // Use array so we can index into it and use a matrix for expected results
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -07001223 // Setup: The first half is standard. The second half uses a non-zero offset.
1224 // TODO: Shared backing arrays.
Serban Constantinescu86797a72014-06-19 16:17:56 +01001225 static constexpr size_t kBaseStringCount = 8;
1226 const char* c[kBaseStringCount] = { "", "", "a", "aa", "ab",
1227 "aacaacaacaacaacaac", // This one's under the default limit to go to __memcmp16.
1228 "aacaacaacaacaacaacaacaacaacaacaacaac", // This one's over.
1229 "aacaacaacaacaacaacaacaacaacaacaacaaca" }; // As is this one. We need a separate one to
1230 // defeat object-equal optimizations.
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -07001231
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001232 static constexpr size_t kStringCount = 2 * kBaseStringCount;
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001233
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001234 StackHandleScope<kStringCount> hs(self);
1235 Handle<mirror::String> s[kStringCount];
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001236
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001237 for (size_t i = 0; i < kBaseStringCount; ++i) {
1238 s[i] = hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), c[i]));
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001239 }
1240
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -07001241 RandGen r(0x1234);
1242
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001243 for (size_t i = kBaseStringCount; i < kStringCount; ++i) {
1244 s[i] = hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), c[i - kBaseStringCount]));
1245 int32_t length = s[i]->GetLength();
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -07001246 if (length > 1) {
1247 // Set a random offset and length.
1248 int32_t new_offset = 1 + (r.next() % (length - 1));
1249 int32_t rest = length - new_offset - 1;
1250 int32_t new_length = 1 + (rest > 0 ? r.next() % rest : 0);
1251
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001252 s[i]->SetField32<false>(mirror::String::CountOffset(), new_length);
1253 s[i]->SetField32<false>(mirror::String::OffsetOffset(), new_offset);
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -07001254 }
1255 }
1256
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001257 // TODO: wide characters
1258
1259 // Matrix of expectations. First component is first parameter. Note we only check against the
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -07001260 // sign, not the value. As we are testing random offsets, we need to compute this and need to
1261 // rely on String::CompareTo being correct.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001262 int32_t expected[kStringCount][kStringCount];
1263 for (size_t x = 0; x < kStringCount; ++x) {
1264 for (size_t y = 0; y < kStringCount; ++y) {
1265 expected[x][y] = s[x]->CompareTo(s[y].Get());
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -07001266 }
1267 }
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001268
1269 // Play with it...
1270
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001271 for (size_t x = 0; x < kStringCount; ++x) {
1272 for (size_t y = 0; y < kStringCount; ++y) {
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001273 // Test string_compareto x y
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001274 size_t result = Invoke3(reinterpret_cast<size_t>(s[x].Get()),
1275 reinterpret_cast<size_t>(s[y].Get()), 0U,
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001276 reinterpret_cast<uintptr_t>(&art_quick_string_compareto), self);
1277
1278 EXPECT_FALSE(self->IsExceptionPending());
1279
1280 // The result is a 32b signed integer
1281 union {
1282 size_t r;
1283 int32_t i;
1284 } conv;
1285 conv.r = result;
1286 int32_t e = expected[x][y];
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -07001287 EXPECT_TRUE(e == 0 ? conv.i == 0 : true) << "x=" << c[x] << " y=" << c[y] << " res=" <<
1288 conv.r;
1289 EXPECT_TRUE(e < 0 ? conv.i < 0 : true) << "x=" << c[x] << " y=" << c[y] << " res=" <<
1290 conv.r;
1291 EXPECT_TRUE(e > 0 ? conv.i > 0 : true) << "x=" << c[x] << " y=" << c[y] << " res=" <<
1292 conv.r;
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001293 }
1294 }
1295
Andreas Gampe7177d7c2014-05-02 12:10:02 -07001296 // TODO: Deallocate things.
1297
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001298 // Tests done.
1299#else
1300 LOG(INFO) << "Skipping string_compareto as I don't know how to do that on " << kRuntimeISA;
1301 // Force-print to std::cout so it's also outside the logcat.
1302 std::cout << "Skipping string_compareto as I don't know how to do that on " << kRuntimeISA <<
1303 std::endl;
1304#endif
1305}
1306
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001307
Ian Rogersc3ccc102014-06-25 11:52:14 -07001308#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001309extern "C" void art_quick_set32_static(void);
1310extern "C" void art_quick_get32_static(void);
1311#endif
1312
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001313static void GetSet32Static(Handle<mirror::Object>* obj, Handle<mirror::ArtField>* f, Thread* self,
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001314 mirror::ArtMethod* referrer, StubTest* test)
1315 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Ian Rogersc3ccc102014-06-25 11:52:14 -07001316#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001317 constexpr size_t num_values = 7;
1318 uint32_t values[num_values] = { 0, 1, 2, 255, 32768, 1000000, 0xFFFFFFFF };
1319
1320 for (size_t i = 0; i < num_values; ++i) {
1321 test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
1322 static_cast<size_t>(values[i]),
1323 0U,
1324 reinterpret_cast<uintptr_t>(&art_quick_set32_static),
1325 self,
1326 referrer);
1327
1328 size_t res = test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
1329 0U, 0U,
1330 reinterpret_cast<uintptr_t>(&art_quick_get32_static),
1331 self,
1332 referrer);
1333
1334 EXPECT_EQ(res, values[i]) << "Iteration " << i;
1335 }
1336#else
1337 LOG(INFO) << "Skipping set32static as I don't know how to do that on " << kRuntimeISA;
1338 // Force-print to std::cout so it's also outside the logcat.
1339 std::cout << "Skipping set32static as I don't know how to do that on " << kRuntimeISA << std::endl;
1340#endif
1341}
1342
1343
Ian Rogersc3ccc102014-06-25 11:52:14 -07001344#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001345extern "C" void art_quick_set32_instance(void);
1346extern "C" void art_quick_get32_instance(void);
1347#endif
1348
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001349static void GetSet32Instance(Handle<mirror::Object>* obj, Handle<mirror::ArtField>* f,
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001350 Thread* self, mirror::ArtMethod* referrer, StubTest* test)
1351 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Ian Rogersc3ccc102014-06-25 11:52:14 -07001352#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001353 constexpr size_t num_values = 7;
1354 uint32_t values[num_values] = { 0, 1, 2, 255, 32768, 1000000, 0xFFFFFFFF };
1355
1356 for (size_t i = 0; i < num_values; ++i) {
1357 test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001358 reinterpret_cast<size_t>(obj->Get()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001359 static_cast<size_t>(values[i]),
1360 reinterpret_cast<uintptr_t>(&art_quick_set32_instance),
1361 self,
1362 referrer);
1363
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001364 int32_t res = f->Get()->GetInt(obj->Get());
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001365 EXPECT_EQ(res, static_cast<int32_t>(values[i])) << "Iteration " << i;
1366
1367 res++;
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001368 f->Get()->SetInt<false>(obj->Get(), res);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001369
1370 size_t res2 = test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001371 reinterpret_cast<size_t>(obj->Get()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001372 0U,
1373 reinterpret_cast<uintptr_t>(&art_quick_get32_instance),
1374 self,
1375 referrer);
1376 EXPECT_EQ(res, static_cast<int32_t>(res2));
1377 }
1378#else
1379 LOG(INFO) << "Skipping set32instance as I don't know how to do that on " << kRuntimeISA;
1380 // Force-print to std::cout so it's also outside the logcat.
1381 std::cout << "Skipping set32instance as I don't know how to do that on " << kRuntimeISA << std::endl;
1382#endif
1383}
1384
1385
Ian Rogersc3ccc102014-06-25 11:52:14 -07001386#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001387extern "C" void art_quick_set_obj_static(void);
1388extern "C" void art_quick_get_obj_static(void);
1389
1390static void set_and_check_static(uint32_t f_idx, mirror::Object* val, Thread* self,
1391 mirror::ArtMethod* referrer, StubTest* test)
1392 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1393 test->Invoke3WithReferrer(static_cast<size_t>(f_idx),
1394 reinterpret_cast<size_t>(val),
1395 0U,
1396 reinterpret_cast<uintptr_t>(&art_quick_set_obj_static),
1397 self,
1398 referrer);
1399
1400 size_t res = test->Invoke3WithReferrer(static_cast<size_t>(f_idx),
1401 0U, 0U,
1402 reinterpret_cast<uintptr_t>(&art_quick_get_obj_static),
1403 self,
1404 referrer);
1405
1406 EXPECT_EQ(res, reinterpret_cast<size_t>(val)) << "Value " << val;
1407}
1408#endif
1409
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001410static void GetSetObjStatic(Handle<mirror::Object>* obj, Handle<mirror::ArtField>* f, Thread* self,
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001411 mirror::ArtMethod* referrer, StubTest* test)
1412 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Ian Rogersc3ccc102014-06-25 11:52:14 -07001413#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001414 set_and_check_static((*f)->GetDexFieldIndex(), nullptr, self, referrer, test);
1415
1416 // Allocate a string object for simplicity.
1417 mirror::String* str = mirror::String::AllocFromModifiedUtf8(self, "Test");
1418 set_and_check_static((*f)->GetDexFieldIndex(), str, self, referrer, test);
1419
1420 set_and_check_static((*f)->GetDexFieldIndex(), nullptr, self, referrer, test);
1421#else
1422 LOG(INFO) << "Skipping setObjstatic as I don't know how to do that on " << kRuntimeISA;
1423 // Force-print to std::cout so it's also outside the logcat.
1424 std::cout << "Skipping setObjstatic as I don't know how to do that on " << kRuntimeISA << std::endl;
1425#endif
1426}
1427
1428
Ian Rogersc3ccc102014-06-25 11:52:14 -07001429#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001430extern "C" void art_quick_set_obj_instance(void);
1431extern "C" void art_quick_get_obj_instance(void);
1432
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001433static void set_and_check_instance(Handle<mirror::ArtField>* f, mirror::Object* trg,
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001434 mirror::Object* val, Thread* self, mirror::ArtMethod* referrer,
1435 StubTest* test)
1436 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1437 test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
1438 reinterpret_cast<size_t>(trg),
1439 reinterpret_cast<size_t>(val),
1440 reinterpret_cast<uintptr_t>(&art_quick_set_obj_instance),
1441 self,
1442 referrer);
1443
1444 size_t res = test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
1445 reinterpret_cast<size_t>(trg),
1446 0U,
1447 reinterpret_cast<uintptr_t>(&art_quick_get_obj_instance),
1448 self,
1449 referrer);
1450
1451 EXPECT_EQ(res, reinterpret_cast<size_t>(val)) << "Value " << val;
1452
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001453 EXPECT_EQ(val, f->Get()->GetObj(trg));
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001454}
1455#endif
1456
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001457static void GetSetObjInstance(Handle<mirror::Object>* obj, Handle<mirror::ArtField>* f,
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001458 Thread* self, mirror::ArtMethod* referrer, StubTest* test)
1459 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Ian Rogersc3ccc102014-06-25 11:52:14 -07001460#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001461 set_and_check_instance(f, obj->Get(), nullptr, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001462
1463 // Allocate a string object for simplicity.
1464 mirror::String* str = mirror::String::AllocFromModifiedUtf8(self, "Test");
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001465 set_and_check_instance(f, obj->Get(), str, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001466
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001467 set_and_check_instance(f, obj->Get(), nullptr, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001468#else
1469 LOG(INFO) << "Skipping setObjinstance as I don't know how to do that on " << kRuntimeISA;
1470 // Force-print to std::cout so it's also outside the logcat.
1471 std::cout << "Skipping setObjinstance as I don't know how to do that on " << kRuntimeISA << std::endl;
1472#endif
1473}
1474
1475
1476// TODO: Complete these tests for 32b architectures.
1477
Ian Rogersc3ccc102014-06-25 11:52:14 -07001478#if (defined(__x86_64__) && !defined(__APPLE__)) || defined(__aarch64__)
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001479extern "C" void art_quick_set64_static(void);
1480extern "C" void art_quick_get64_static(void);
1481#endif
1482
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001483static void GetSet64Static(Handle<mirror::Object>* obj, Handle<mirror::ArtField>* f, Thread* self,
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001484 mirror::ArtMethod* referrer, StubTest* test)
1485 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Ian Rogersc3ccc102014-06-25 11:52:14 -07001486#if (defined(__x86_64__) && !defined(__APPLE__)) || defined(__aarch64__)
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001487 constexpr size_t num_values = 8;
1488 uint64_t values[num_values] = { 0, 1, 2, 255, 32768, 1000000, 0xFFFFFFFF, 0xFFFFFFFFFFFF };
1489
1490 for (size_t i = 0; i < num_values; ++i) {
1491 test->Invoke3UWithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
1492 values[i],
1493 reinterpret_cast<uintptr_t>(&art_quick_set64_static),
1494 self,
1495 referrer);
1496
1497 size_t res = test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
1498 0U, 0U,
1499 reinterpret_cast<uintptr_t>(&art_quick_get64_static),
1500 self,
1501 referrer);
1502
1503 EXPECT_EQ(res, values[i]) << "Iteration " << i;
1504 }
1505#else
1506 LOG(INFO) << "Skipping set64static as I don't know how to do that on " << kRuntimeISA;
1507 // Force-print to std::cout so it's also outside the logcat.
1508 std::cout << "Skipping set64static as I don't know how to do that on " << kRuntimeISA << std::endl;
1509#endif
1510}
1511
1512
Ian Rogersc3ccc102014-06-25 11:52:14 -07001513#if (defined(__x86_64__) && !defined(__APPLE__)) || defined(__aarch64__)
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001514extern "C" void art_quick_set64_instance(void);
1515extern "C" void art_quick_get64_instance(void);
1516#endif
1517
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001518static void GetSet64Instance(Handle<mirror::Object>* obj, Handle<mirror::ArtField>* f,
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001519 Thread* self, mirror::ArtMethod* referrer, StubTest* test)
1520 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Ian Rogersc3ccc102014-06-25 11:52:14 -07001521#if (defined(__x86_64__) && !defined(__APPLE__)) || defined(__aarch64__)
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001522 constexpr size_t num_values = 8;
1523 uint64_t values[num_values] = { 0, 1, 2, 255, 32768, 1000000, 0xFFFFFFFF, 0xFFFFFFFFFFFF };
1524
1525 for (size_t i = 0; i < num_values; ++i) {
1526 test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001527 reinterpret_cast<size_t>(obj->Get()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001528 static_cast<size_t>(values[i]),
1529 reinterpret_cast<uintptr_t>(&art_quick_set64_instance),
1530 self,
1531 referrer);
1532
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001533 int64_t res = f->Get()->GetLong(obj->Get());
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001534 EXPECT_EQ(res, static_cast<int64_t>(values[i])) << "Iteration " << i;
1535
1536 res++;
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001537 f->Get()->SetLong<false>(obj->Get(), res);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001538
1539 size_t res2 = test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001540 reinterpret_cast<size_t>(obj->Get()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001541 0U,
1542 reinterpret_cast<uintptr_t>(&art_quick_get64_instance),
1543 self,
1544 referrer);
1545 EXPECT_EQ(res, static_cast<int64_t>(res2));
1546 }
1547#else
1548 LOG(INFO) << "Skipping set64instance as I don't know how to do that on " << kRuntimeISA;
1549 // Force-print to std::cout so it's also outside the logcat.
1550 std::cout << "Skipping set64instance as I don't know how to do that on " << kRuntimeISA << std::endl;
1551#endif
1552}
1553
1554static void TestFields(Thread* self, StubTest* test, Primitive::Type test_type) {
1555 // garbage is created during ClassLinker::Init
1556
1557 JNIEnv* env = Thread::Current()->GetJniEnv();
1558 jclass jc = env->FindClass("AllFields");
1559 CHECK(jc != NULL);
1560 jobject o = env->AllocObject(jc);
1561 CHECK(o != NULL);
1562
1563 ScopedObjectAccess soa(self);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001564 StackHandleScope<5> hs(self);
1565 Handle<mirror::Object> obj(hs.NewHandle(soa.Decode<mirror::Object*>(o)));
1566 Handle<mirror::Class> c(hs.NewHandle(obj->GetClass()));
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001567 // Need a method as a referrer
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001568 Handle<mirror::ArtMethod> m(hs.NewHandle(c->GetDirectMethod(0)));
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001569
1570 // Play with it...
1571
1572 // Static fields.
1573 {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001574 Handle<mirror::ObjectArray<mirror::ArtField>> fields(hs.NewHandle(c.Get()->GetSFields()));
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001575 int32_t num_fields = fields->GetLength();
1576 for (int32_t i = 0; i < num_fields; ++i) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001577 StackHandleScope<1> hs(self);
1578 Handle<mirror::ArtField> f(hs.NewHandle(fields->Get(i)));
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001579
Mathieu Chartier61c5ebc2014-06-05 17:42:53 -07001580 Primitive::Type type = f->GetTypeAsPrimitiveType();
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001581 switch (type) {
1582 case Primitive::Type::kPrimInt:
1583 if (test_type == type) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001584 GetSet32Static(&obj, &f, self, m.Get(), test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001585 }
1586 break;
1587
1588 case Primitive::Type::kPrimLong:
1589 if (test_type == type) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001590 GetSet64Static(&obj, &f, self, m.Get(), test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001591 }
1592 break;
1593
1594 case Primitive::Type::kPrimNot:
1595 // Don't try array.
Mathieu Chartier61c5ebc2014-06-05 17:42:53 -07001596 if (test_type == type && f->GetTypeDescriptor()[0] != '[') {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001597 GetSetObjStatic(&obj, &f, self, m.Get(), test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001598 }
1599 break;
1600
1601 default:
1602 break; // Skip.
1603 }
1604 }
1605 }
1606
1607 // Instance fields.
1608 {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001609 Handle<mirror::ObjectArray<mirror::ArtField>> fields(hs.NewHandle(c.Get()->GetIFields()));
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001610 int32_t num_fields = fields->GetLength();
1611 for (int32_t i = 0; i < num_fields; ++i) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001612 StackHandleScope<1> hs(self);
1613 Handle<mirror::ArtField> f(hs.NewHandle(fields->Get(i)));
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001614
Mathieu Chartier61c5ebc2014-06-05 17:42:53 -07001615 Primitive::Type type = f->GetTypeAsPrimitiveType();
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001616 switch (type) {
1617 case Primitive::Type::kPrimInt:
1618 if (test_type == type) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001619 GetSet32Instance(&obj, &f, self, m.Get(), test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001620 }
1621 break;
1622
1623 case Primitive::Type::kPrimLong:
1624 if (test_type == type) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001625 GetSet64Instance(&obj, &f, self, m.Get(), test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001626 }
1627 break;
1628
1629 case Primitive::Type::kPrimNot:
1630 // Don't try array.
Mathieu Chartier61c5ebc2014-06-05 17:42:53 -07001631 if (test_type == type && f->GetTypeDescriptor()[0] != '[') {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001632 GetSetObjInstance(&obj, &f, self, m.Get(), test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001633 }
1634 break;
1635
1636 default:
1637 break; // Skip.
1638 }
1639 }
1640 }
1641
1642 // TODO: Deallocate things.
1643}
1644
1645
1646TEST_F(StubTest, Fields32) {
1647 TEST_DISABLED_FOR_HEAP_REFERENCE_POISONING();
1648
1649 Thread* self = Thread::Current();
1650
1651 self->TransitionFromSuspendedToRunnable();
1652 LoadDex("AllFields");
1653 bool started = runtime_->Start();
1654 CHECK(started);
1655
1656 TestFields(self, this, Primitive::Type::kPrimInt);
1657}
1658
1659TEST_F(StubTest, FieldsObj) {
1660 TEST_DISABLED_FOR_HEAP_REFERENCE_POISONING();
1661
1662 Thread* self = Thread::Current();
1663
1664 self->TransitionFromSuspendedToRunnable();
1665 LoadDex("AllFields");
1666 bool started = runtime_->Start();
1667 CHECK(started);
1668
1669 TestFields(self, this, Primitive::Type::kPrimNot);
1670}
1671
1672TEST_F(StubTest, Fields64) {
1673 TEST_DISABLED_FOR_HEAP_REFERENCE_POISONING();
1674
1675 Thread* self = Thread::Current();
1676
1677 self->TransitionFromSuspendedToRunnable();
1678 LoadDex("AllFields");
1679 bool started = runtime_->Start();
1680 CHECK(started);
1681
1682 TestFields(self, this, Primitive::Type::kPrimLong);
1683}
1684
Ian Rogersc3ccc102014-06-25 11:52:14 -07001685#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe51f76352014-05-21 08:28:48 -07001686extern "C" void art_quick_imt_conflict_trampoline(void);
1687#endif
1688
1689TEST_F(StubTest, IMT) {
Ian Rogersc3ccc102014-06-25 11:52:14 -07001690#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe51f76352014-05-21 08:28:48 -07001691 TEST_DISABLED_FOR_HEAP_REFERENCE_POISONING();
1692
1693 Thread* self = Thread::Current();
1694
1695 ScopedObjectAccess soa(self);
1696 StackHandleScope<7> hs(self);
1697
1698 JNIEnv* env = Thread::Current()->GetJniEnv();
1699
1700 // ArrayList
1701
1702 // Load ArrayList and used methods (JNI).
1703 jclass arraylist_jclass = env->FindClass("java/util/ArrayList");
1704 ASSERT_NE(nullptr, arraylist_jclass);
1705 jmethodID arraylist_constructor = env->GetMethodID(arraylist_jclass, "<init>", "()V");
1706 ASSERT_NE(nullptr, arraylist_constructor);
1707 jmethodID contains_jmethod = env->GetMethodID(arraylist_jclass, "contains", "(Ljava/lang/Object;)Z");
1708 ASSERT_NE(nullptr, contains_jmethod);
1709 jmethodID add_jmethod = env->GetMethodID(arraylist_jclass, "add", "(Ljava/lang/Object;)Z");
1710 ASSERT_NE(nullptr, add_jmethod);
1711
1712 // Get mirror representation.
1713 Handle<mirror::ArtMethod> contains_amethod(hs.NewHandle(soa.DecodeMethod(contains_jmethod)));
1714
1715 // Patch up ArrayList.contains.
1716 if (contains_amethod.Get()->GetEntryPointFromQuickCompiledCode() == nullptr) {
1717 contains_amethod.Get()->SetEntryPointFromQuickCompiledCode(reinterpret_cast<void*>(
1718 GetTlsPtr(self)->quick_entrypoints.pQuickToInterpreterBridge));
1719 }
1720
1721 // List
1722
1723 // Load List and used methods (JNI).
1724 jclass list_jclass = env->FindClass("java/util/List");
1725 ASSERT_NE(nullptr, list_jclass);
1726 jmethodID inf_contains_jmethod = env->GetMethodID(list_jclass, "contains", "(Ljava/lang/Object;)Z");
1727 ASSERT_NE(nullptr, inf_contains_jmethod);
1728
1729 // Get mirror representation.
1730 Handle<mirror::ArtMethod> inf_contains(hs.NewHandle(soa.DecodeMethod(inf_contains_jmethod)));
1731
1732 // Object
1733
1734 jclass obj_jclass = env->FindClass("java/lang/Object");
1735 ASSERT_NE(nullptr, obj_jclass);
1736 jmethodID obj_constructor = env->GetMethodID(obj_jclass, "<init>", "()V");
1737 ASSERT_NE(nullptr, obj_constructor);
1738
1739 // Sanity check: check that there is a conflict for List.contains in ArrayList.
1740
1741 mirror::Class* arraylist_class = soa.Decode<mirror::Class*>(arraylist_jclass);
1742 mirror::ArtMethod* m = arraylist_class->GetImTable()->Get(
1743 inf_contains->GetDexMethodIndex() % ClassLinker::kImtSize);
Andreas Gampe51f76352014-05-21 08:28:48 -07001744
Andreas Gampe0ea37942014-05-21 14:12:18 -07001745 if (!m->IsImtConflictMethod()) {
1746 LOG(WARNING) << "Test is meaningless, no IMT conflict in setup: " <<
1747 PrettyMethod(m, true);
1748 LOG(WARNING) << "Please update StubTest.IMT.";
1749 return;
1750 }
Andreas Gampe51f76352014-05-21 08:28:48 -07001751
1752 // Create instances.
1753
1754 jobject jarray_list = env->NewObject(arraylist_jclass, arraylist_constructor);
1755 ASSERT_NE(nullptr, jarray_list);
1756 Handle<mirror::Object> array_list(hs.NewHandle(soa.Decode<mirror::Object*>(jarray_list)));
1757
1758 jobject jobj = env->NewObject(obj_jclass, obj_constructor);
1759 ASSERT_NE(nullptr, jobj);
1760 Handle<mirror::Object> obj(hs.NewHandle(soa.Decode<mirror::Object*>(jobj)));
1761
1762 // Invoke.
1763
1764 size_t result =
1765 Invoke3WithReferrerAndHidden(0U, reinterpret_cast<size_t>(array_list.Get()),
1766 reinterpret_cast<size_t>(obj.Get()),
1767 reinterpret_cast<uintptr_t>(&art_quick_imt_conflict_trampoline),
1768 self, contains_amethod.Get(),
1769 static_cast<size_t>(inf_contains.Get()->GetDexMethodIndex()));
1770
1771 ASSERT_FALSE(self->IsExceptionPending());
1772 EXPECT_EQ(static_cast<size_t>(JNI_FALSE), result);
1773
1774 // Add object.
1775
1776 env->CallBooleanMethod(jarray_list, add_jmethod, jobj);
1777
1778 ASSERT_FALSE(self->IsExceptionPending()) << PrettyTypeOf(self->GetException(nullptr));
1779
1780 // Invoke again.
1781
1782 result = Invoke3WithReferrerAndHidden(0U, reinterpret_cast<size_t>(array_list.Get()),
1783 reinterpret_cast<size_t>(obj.Get()),
1784 reinterpret_cast<uintptr_t>(&art_quick_imt_conflict_trampoline),
1785 self, contains_amethod.Get(),
1786 static_cast<size_t>(inf_contains.Get()->GetDexMethodIndex()));
1787
1788 ASSERT_FALSE(self->IsExceptionPending());
1789 EXPECT_EQ(static_cast<size_t>(JNI_TRUE), result);
1790#else
Andreas Gampe6aac3552014-06-09 14:55:53 -07001791 LOG(INFO) << "Skipping imt as I don't know how to do that on " << kRuntimeISA;
Andreas Gampe51f76352014-05-21 08:28:48 -07001792 // Force-print to std::cout so it's also outside the logcat.
Andreas Gampe6aac3552014-06-09 14:55:53 -07001793 std::cout << "Skipping imt as I don't know how to do that on " << kRuntimeISA << std::endl;
1794#endif
1795}
1796
1797#if defined(__arm__) || defined(__aarch64__)
1798extern "C" void art_quick_indexof(void);
1799#endif
1800
1801TEST_F(StubTest, StringIndexOf) {
1802#if defined(__arm__) || defined(__aarch64__)
Hiroshi Yamauchi52fa8142014-06-16 12:59:49 -07001803 TEST_DISABLED_FOR_HEAP_REFERENCE_POISONING();
1804
Andreas Gampe6aac3552014-06-09 14:55:53 -07001805 Thread* self = Thread::Current();
1806 ScopedObjectAccess soa(self);
1807 // garbage is created during ClassLinker::Init
1808
1809 // Create some strings
1810 // Use array so we can index into it and use a matrix for expected results
1811 // Setup: The first half is standard. The second half uses a non-zero offset.
1812 // TODO: Shared backing arrays.
1813 static constexpr size_t kStringCount = 7;
1814 const char* c_str[kStringCount] = { "", "a", "ba", "cba", "dcba", "edcba", "asdfghjkl" };
1815 static constexpr size_t kCharCount = 5;
1816 const char c_char[kCharCount] = { 'a', 'b', 'c', 'd', 'e' };
1817
1818 StackHandleScope<kStringCount> hs(self);
1819 Handle<mirror::String> s[kStringCount];
1820
1821 for (size_t i = 0; i < kStringCount; ++i) {
1822 s[i] = hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), c_str[i]));
1823 }
1824
1825 // Matrix of expectations. First component is first parameter. Note we only check against the
1826 // sign, not the value. As we are testing random offsets, we need to compute this and need to
1827 // rely on String::CompareTo being correct.
1828 static constexpr size_t kMaxLen = 9;
1829 DCHECK_LE(strlen(c_str[kStringCount-1]), kMaxLen) << "Please fix the indexof test.";
1830
1831 // Last dimension: start, offset by 1.
1832 int32_t expected[kStringCount][kCharCount][kMaxLen + 3];
1833 for (size_t x = 0; x < kStringCount; ++x) {
1834 for (size_t y = 0; y < kCharCount; ++y) {
1835 for (size_t z = 0; z <= kMaxLen + 2; ++z) {
1836 expected[x][y][z] = s[x]->FastIndexOf(c_char[y], static_cast<int32_t>(z) - 1);
1837 }
1838 }
1839 }
1840
1841 // Play with it...
1842
1843 for (size_t x = 0; x < kStringCount; ++x) {
1844 for (size_t y = 0; y < kCharCount; ++y) {
1845 for (size_t z = 0; z <= kMaxLen + 2; ++z) {
1846 int32_t start = static_cast<int32_t>(z) - 1;
1847
1848 // Test string_compareto x y
1849 size_t result = Invoke3(reinterpret_cast<size_t>(s[x].Get()), c_char[y], start,
1850 reinterpret_cast<uintptr_t>(&art_quick_indexof), self);
1851
1852 EXPECT_FALSE(self->IsExceptionPending());
1853
1854 // The result is a 32b signed integer
1855 union {
1856 size_t r;
1857 int32_t i;
1858 } conv;
1859 conv.r = result;
1860
1861 EXPECT_EQ(expected[x][y][z], conv.i) << "Wrong result for " << c_str[x] << " / " <<
1862 c_char[y] << " @ " << start;
1863 }
1864 }
1865 }
1866
1867 // TODO: Deallocate things.
1868
1869 // Tests done.
1870#else
1871 LOG(INFO) << "Skipping indexof as I don't know how to do that on " << kRuntimeISA;
1872 // Force-print to std::cout so it's also outside the logcat.
1873 std::cout << "Skipping indexof as I don't know how to do that on " << kRuntimeISA << std::endl;
Andreas Gampe51f76352014-05-21 08:28:48 -07001874#endif
1875}
1876
Andreas Gampe525cde22014-04-22 15:44:50 -07001877} // namespace art