blob: b0928f8cfacc881cb60df27bea6534f9eef933fd [file] [log] [blame]
Andreas Gampe525cde22014-04-22 15:44:50 -07001/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
Ian Rogerse63db272014-07-15 15:36:11 -070017#include <cstdio>
18
Andreas Gampe525cde22014-04-22 15:44:50 -070019#include "common_runtime_test.h"
Andreas Gampe29b38412014-08-13 00:15:43 -070020#include "entrypoints/quick/quick_entrypoints_enum.h"
Andreas Gampe6e4e59c2014-05-05 20:11:02 -070021#include "mirror/art_field-inl.h"
Andreas Gampe51f76352014-05-21 08:28:48 -070022#include "mirror/art_method-inl.h"
23#include "mirror/class-inl.h"
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -070024#include "mirror/string-inl.h"
Ian Rogerse63db272014-07-15 15:36:11 -070025#include "scoped_thread_state_change.h"
Andreas Gampe525cde22014-04-22 15:44:50 -070026
27namespace art {
28
29
30class StubTest : public CommonRuntimeTest {
31 protected:
32 // We need callee-save methods set up in the Runtime for exceptions.
33 void SetUp() OVERRIDE {
34 // Do the normal setup.
35 CommonRuntimeTest::SetUp();
36
37 {
38 // Create callee-save methods
39 ScopedObjectAccess soa(Thread::Current());
Vladimir Marko7624d252014-05-02 14:40:15 +010040 runtime_->SetInstructionSet(kRuntimeISA);
Andreas Gampe525cde22014-04-22 15:44:50 -070041 for (int i = 0; i < Runtime::kLastCalleeSaveType; i++) {
42 Runtime::CalleeSaveType type = Runtime::CalleeSaveType(i);
43 if (!runtime_->HasCalleeSaveMethod(type)) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -070044 runtime_->SetCalleeSaveMethod(runtime_->CreateCalleeSaveMethod(), type);
Andreas Gampe525cde22014-04-22 15:44:50 -070045 }
46 }
47 }
48 }
49
Ian Rogerse63db272014-07-15 15:36:11 -070050 void SetUpRuntimeOptions(RuntimeOptions *options) OVERRIDE {
Andreas Gampe00c1e6d2014-04-25 15:47:13 -070051 // Use a smaller heap
52 for (std::pair<std::string, const void*>& pair : *options) {
53 if (pair.first.find("-Xmx") == 0) {
54 pair.first = "-Xmx4M"; // Smallest we can go.
55 }
56 }
Andreas Gampe51f76352014-05-21 08:28:48 -070057 options->push_back(std::make_pair("-Xint", nullptr));
Andreas Gampe00c1e6d2014-04-25 15:47:13 -070058 }
Andreas Gampe525cde22014-04-22 15:44:50 -070059
Mathieu Chartier119c6bd2014-05-09 14:11:47 -070060 // Helper function needed since TEST_F makes a new class.
61 Thread::tls_ptr_sized_values* GetTlsPtr(Thread* self) {
62 return &self->tlsPtr_;
63 }
64
Andreas Gampe4fc046e2014-05-06 16:56:39 -070065 public:
Andreas Gampe525cde22014-04-22 15:44:50 -070066 size_t Invoke3(size_t arg0, size_t arg1, size_t arg2, uintptr_t code, Thread* self) {
Andreas Gampe6cf80102014-05-19 11:32:41 -070067 return Invoke3WithReferrer(arg0, arg1, arg2, code, self, nullptr);
Andreas Gampe525cde22014-04-22 15:44:50 -070068 }
Andreas Gampe6e4e59c2014-05-05 20:11:02 -070069
Andreas Gampe6e4e59c2014-05-05 20:11:02 -070070 // TODO: Set up a frame according to referrer's specs.
71 size_t Invoke3WithReferrer(size_t arg0, size_t arg1, size_t arg2, uintptr_t code, Thread* self,
72 mirror::ArtMethod* referrer) {
73 // Push a transition back into managed code onto the linked list in thread.
74 ManagedStack fragment;
75 self->PushManagedStackFragment(&fragment);
76
77 size_t result;
Andreas Gampe6cf80102014-05-19 11:32:41 -070078 size_t fpr_result = 0;
Andreas Gampe6e4e59c2014-05-05 20:11:02 -070079#if defined(__i386__)
80 // TODO: Set the thread?
81 __asm__ __volatile__(
Ian Rogersc5f17732014-06-05 20:48:42 -070082 "subl $12, %%esp\n\t" // Align stack.
83 "pushl %[referrer]\n\t" // Store referrer.
Andreas Gampe6e4e59c2014-05-05 20:11:02 -070084 "call *%%edi\n\t" // Call the stub
Ian Rogersc5f17732014-06-05 20:48:42 -070085 "addl $16, %%esp" // Pop referrer
Andreas Gampe6e4e59c2014-05-05 20:11:02 -070086 : "=a" (result)
87 // Use the result from eax
Andreas Gampe2f6e3512014-06-07 01:32:33 -070088 : "a"(arg0), "c"(arg1), "d"(arg2), "D"(code), [referrer]"r"(referrer)
89 // This places code into edi, arg0 into eax, arg1 into ecx, and arg2 into edx
90 : "memory"); // clobber.
Andreas Gampe6e4e59c2014-05-05 20:11:02 -070091 // TODO: Should we clobber the other registers? EBX gets clobbered by some of the stubs,
92 // but compilation fails when declaring that.
93#elif defined(__arm__)
94 __asm__ __volatile__(
95 "push {r1-r12, lr}\n\t" // Save state, 13*4B = 52B
96 ".cfi_adjust_cfa_offset 52\n\t"
97 "push {r9}\n\t"
98 ".cfi_adjust_cfa_offset 4\n\t"
99 "mov r9, %[referrer]\n\n"
100 "str r9, [sp, #-8]!\n\t" // Push referrer, +8B padding so 16B aligned
101 ".cfi_adjust_cfa_offset 8\n\t"
102 "ldr r9, [sp, #8]\n\t"
103
104 // Push everything on the stack, so we don't rely on the order. What a mess. :-(
105 "sub sp, sp, #20\n\t"
106 "str %[arg0], [sp]\n\t"
107 "str %[arg1], [sp, #4]\n\t"
108 "str %[arg2], [sp, #8]\n\t"
109 "str %[code], [sp, #12]\n\t"
110 "str %[self], [sp, #16]\n\t"
111 "ldr r0, [sp]\n\t"
112 "ldr r1, [sp, #4]\n\t"
113 "ldr r2, [sp, #8]\n\t"
114 "ldr r3, [sp, #12]\n\t"
115 "ldr r9, [sp, #16]\n\t"
116 "add sp, sp, #20\n\t"
117
118 "blx r3\n\t" // Call the stub
119 "add sp, sp, #12\n\t" // Pop nullptr and padding
120 ".cfi_adjust_cfa_offset -12\n\t"
121 "pop {r1-r12, lr}\n\t" // Restore state
122 ".cfi_adjust_cfa_offset -52\n\t"
123 "mov %[result], r0\n\t" // Save the result
124 : [result] "=r" (result)
125 // Use the result from r0
126 : [arg0] "r"(arg0), [arg1] "r"(arg1), [arg2] "r"(arg2), [code] "r"(code), [self] "r"(self),
127 [referrer] "r"(referrer)
Andreas Gampe2f6e3512014-06-07 01:32:33 -0700128 : "memory"); // clobber.
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700129#elif defined(__aarch64__)
130 __asm__ __volatile__(
Andreas Gampef39b3782014-06-03 14:38:30 -0700131 // Spill x0-x7 which we say we don't clobber. May contain args.
Andreas Gampe6cf80102014-05-19 11:32:41 -0700132 "sub sp, sp, #64\n\t"
133 ".cfi_adjust_cfa_offset 64\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700134 "stp x0, x1, [sp]\n\t"
135 "stp x2, x3, [sp, #16]\n\t"
136 "stp x4, x5, [sp, #32]\n\t"
137 "stp x6, x7, [sp, #48]\n\t"
Andreas Gampe6cf80102014-05-19 11:32:41 -0700138
Andreas Gampef39b3782014-06-03 14:38:30 -0700139 "sub sp, sp, #16\n\t" // Reserve stack space, 16B aligned
140 ".cfi_adjust_cfa_offset 16\n\t"
141 "str %[referrer], [sp]\n\t" // referrer
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700142
143 // Push everything on the stack, so we don't rely on the order. What a mess. :-(
144 "sub sp, sp, #48\n\t"
Andreas Gampe6cf80102014-05-19 11:32:41 -0700145 ".cfi_adjust_cfa_offset 48\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700146 // All things are "r" constraints, so direct str/stp should work.
147 "stp %[arg0], %[arg1], [sp]\n\t"
148 "stp %[arg2], %[code], [sp, #16]\n\t"
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700149 "str %[self], [sp, #32]\n\t"
Andreas Gampe6cf80102014-05-19 11:32:41 -0700150
151 // Now we definitely have x0-x3 free, use it to garble d8 - d15
152 "movk x0, #0xfad0\n\t"
153 "movk x0, #0xebad, lsl #16\n\t"
154 "movk x0, #0xfad0, lsl #32\n\t"
155 "movk x0, #0xebad, lsl #48\n\t"
156 "fmov d8, x0\n\t"
157 "add x0, x0, 1\n\t"
158 "fmov d9, x0\n\t"
159 "add x0, x0, 1\n\t"
160 "fmov d10, x0\n\t"
161 "add x0, x0, 1\n\t"
162 "fmov d11, x0\n\t"
163 "add x0, x0, 1\n\t"
164 "fmov d12, x0\n\t"
165 "add x0, x0, 1\n\t"
166 "fmov d13, x0\n\t"
167 "add x0, x0, 1\n\t"
168 "fmov d14, x0\n\t"
169 "add x0, x0, 1\n\t"
170 "fmov d15, x0\n\t"
171
Andreas Gampef39b3782014-06-03 14:38:30 -0700172 // Load call params into the right registers.
173 "ldp x0, x1, [sp]\n\t"
174 "ldp x2, x3, [sp, #16]\n\t"
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700175 "ldr x18, [sp, #32]\n\t"
176 "add sp, sp, #48\n\t"
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700177 ".cfi_adjust_cfa_offset -48\n\t"
178
Andreas Gampe6cf80102014-05-19 11:32:41 -0700179
180 "blr x3\n\t" // Call the stub
Andreas Gampef39b3782014-06-03 14:38:30 -0700181 "mov x8, x0\n\t" // Store result
182 "add sp, sp, #16\n\t" // Drop the quick "frame"
183 ".cfi_adjust_cfa_offset -16\n\t"
Andreas Gampe6cf80102014-05-19 11:32:41 -0700184
185 // Test d8 - d15. We can use x1 and x2.
186 "movk x1, #0xfad0\n\t"
187 "movk x1, #0xebad, lsl #16\n\t"
188 "movk x1, #0xfad0, lsl #32\n\t"
189 "movk x1, #0xebad, lsl #48\n\t"
190 "fmov x2, d8\n\t"
191 "cmp x1, x2\n\t"
192 "b.ne 1f\n\t"
193 "add x1, x1, 1\n\t"
194
195 "fmov x2, d9\n\t"
196 "cmp x1, x2\n\t"
197 "b.ne 1f\n\t"
198 "add x1, x1, 1\n\t"
199
200 "fmov x2, d10\n\t"
201 "cmp x1, x2\n\t"
202 "b.ne 1f\n\t"
203 "add x1, x1, 1\n\t"
204
205 "fmov x2, d11\n\t"
206 "cmp x1, x2\n\t"
207 "b.ne 1f\n\t"
208 "add x1, x1, 1\n\t"
209
210 "fmov x2, d12\n\t"
211 "cmp x1, x2\n\t"
212 "b.ne 1f\n\t"
213 "add x1, x1, 1\n\t"
214
215 "fmov x2, d13\n\t"
216 "cmp x1, x2\n\t"
217 "b.ne 1f\n\t"
218 "add x1, x1, 1\n\t"
219
220 "fmov x2, d14\n\t"
221 "cmp x1, x2\n\t"
222 "b.ne 1f\n\t"
223 "add x1, x1, 1\n\t"
224
225 "fmov x2, d15\n\t"
226 "cmp x1, x2\n\t"
227 "b.ne 1f\n\t"
228
Andreas Gampef39b3782014-06-03 14:38:30 -0700229 "mov x9, #0\n\t" // Use x9 as flag, in clobber list
Andreas Gampe6cf80102014-05-19 11:32:41 -0700230
231 // Finish up.
232 "2:\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700233 "ldp x0, x1, [sp]\n\t" // Restore stuff not named clobbered, may contain fpr_result
234 "ldp x2, x3, [sp, #16]\n\t"
235 "ldp x4, x5, [sp, #32]\n\t"
236 "ldp x6, x7, [sp, #48]\n\t"
237 "add sp, sp, #64\n\t" // Free stack space, now sp as on entry
Andreas Gampe6cf80102014-05-19 11:32:41 -0700238 ".cfi_adjust_cfa_offset -64\n\t"
239
Andreas Gampef39b3782014-06-03 14:38:30 -0700240 "str x9, %[fpr_result]\n\t" // Store the FPR comparison result
241 "mov %[result], x8\n\t" // Store the call result
242
Andreas Gampe6cf80102014-05-19 11:32:41 -0700243 "b 3f\n\t" // Goto end
244
245 // Failed fpr verification.
246 "1:\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700247 "mov x9, #1\n\t"
Andreas Gampe6cf80102014-05-19 11:32:41 -0700248 "b 2b\n\t" // Goto finish-up
249
250 // End
251 "3:\n\t"
Andreas Gampecf4035a2014-05-28 22:43:01 -0700252 : [result] "=r" (result)
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700253 // Use the result from r0
254 : [arg0] "0"(arg0), [arg1] "r"(arg1), [arg2] "r"(arg2), [code] "r"(code), [self] "r"(self),
Andreas Gampecf4035a2014-05-28 22:43:01 -0700255 [referrer] "r"(referrer), [fpr_result] "m" (fpr_result)
Andreas Gampef39b3782014-06-03 14:38:30 -0700256 : "x8", "x9", "x10", "x11", "x12", "x13", "x14", "x15", "x16", "x17", "x18", "x19", "x20",
257 "x21", "x22", "x23", "x24", "x25", "x26", "x27", "x28", "x30",
258 "d0", "d1", "d2", "d3", "d4", "d5", "d6", "d7",
259 "d8", "d9", "d10", "d11", "d12", "d13", "d14", "d15",
260 "d16", "d17", "d18", "d19", "d20", "d21", "d22", "d23",
Andreas Gampe2f6e3512014-06-07 01:32:33 -0700261 "d24", "d25", "d26", "d27", "d28", "d29", "d30", "d31",
262 "memory"); // clobber.
Ian Rogers6f3dbba2014-10-14 17:41:57 -0700263#elif defined(__x86_64__) && !defined(__APPLE__) && defined(__clang__)
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700264 // Note: Uses the native convention
265 // TODO: Set the thread?
266 __asm__ __volatile__(
267 "pushq %[referrer]\n\t" // Push referrer
268 "pushq (%%rsp)\n\t" // & 16B alignment padding
269 ".cfi_adjust_cfa_offset 16\n\t"
270 "call *%%rax\n\t" // Call the stub
271 "addq $16, %%rsp\n\t" // Pop nullptr and padding
272 ".cfi_adjust_cfa_offset -16\n\t"
273 : "=a" (result)
274 // Use the result from rax
Andreas Gampe2f6e3512014-06-07 01:32:33 -0700275 : "D"(arg0), "S"(arg1), "d"(arg2), "a"(code), [referrer] "m"(referrer)
276 // This places arg0 into rdi, arg1 into rsi, arg2 into rdx, and code into rax
277 : "rbx", "rcx", "rbp", "r8", "r9", "r10", "r11", "r12", "r13", "r14", "r15",
278 "memory"); // clobber all
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700279 // TODO: Should we clobber the other registers?
280#else
281 LOG(WARNING) << "Was asked to invoke for an architecture I do not understand.";
282 result = 0;
283#endif
284 // Pop transition.
285 self->PopManagedStackFragment(fragment);
Andreas Gampe6cf80102014-05-19 11:32:41 -0700286
287 fp_result = fpr_result;
288 EXPECT_EQ(0U, fp_result);
289
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700290 return result;
291 }
292
Andreas Gampe51f76352014-05-21 08:28:48 -0700293 // TODO: Set up a frame according to referrer's specs.
294 size_t Invoke3WithReferrerAndHidden(size_t arg0, size_t arg1, size_t arg2, uintptr_t code,
295 Thread* self, mirror::ArtMethod* referrer, size_t hidden) {
296 // Push a transition back into managed code onto the linked list in thread.
297 ManagedStack fragment;
298 self->PushManagedStackFragment(&fragment);
299
300 size_t result;
301 size_t fpr_result = 0;
302#if defined(__i386__)
303 // TODO: Set the thread?
304 __asm__ __volatile__(
305 "movd %[hidden], %%xmm0\n\t"
Ian Rogersc5f17732014-06-05 20:48:42 -0700306 "subl $12, %%esp\n\t" // Align stack.
Andreas Gampe51f76352014-05-21 08:28:48 -0700307 "pushl %[referrer]\n\t" // Store referrer
308 "call *%%edi\n\t" // Call the stub
Ian Rogersc5f17732014-06-05 20:48:42 -0700309 "addl $16, %%esp" // Pop referrer
Andreas Gampe51f76352014-05-21 08:28:48 -0700310 : "=a" (result)
311 // Use the result from eax
Andreas Gampe1a7e2922014-05-21 15:37:53 -0700312 : "a"(arg0), "c"(arg1), "d"(arg2), "D"(code), [referrer]"r"(referrer), [hidden]"m"(hidden)
Andreas Gampe2f6e3512014-06-07 01:32:33 -0700313 // This places code into edi, arg0 into eax, arg1 into ecx, and arg2 into edx
314 : "memory"); // clobber.
Andreas Gampe51f76352014-05-21 08:28:48 -0700315 // TODO: Should we clobber the other registers? EBX gets clobbered by some of the stubs,
316 // but compilation fails when declaring that.
317#elif defined(__arm__)
318 __asm__ __volatile__(
319 "push {r1-r12, lr}\n\t" // Save state, 13*4B = 52B
320 ".cfi_adjust_cfa_offset 52\n\t"
321 "push {r9}\n\t"
322 ".cfi_adjust_cfa_offset 4\n\t"
323 "mov r9, %[referrer]\n\n"
324 "str r9, [sp, #-8]!\n\t" // Push referrer, +8B padding so 16B aligned
325 ".cfi_adjust_cfa_offset 8\n\t"
326 "ldr r9, [sp, #8]\n\t"
327
328 // Push everything on the stack, so we don't rely on the order. What a mess. :-(
329 "sub sp, sp, #24\n\t"
330 "str %[arg0], [sp]\n\t"
331 "str %[arg1], [sp, #4]\n\t"
332 "str %[arg2], [sp, #8]\n\t"
333 "str %[code], [sp, #12]\n\t"
334 "str %[self], [sp, #16]\n\t"
335 "str %[hidden], [sp, #20]\n\t"
336 "ldr r0, [sp]\n\t"
337 "ldr r1, [sp, #4]\n\t"
338 "ldr r2, [sp, #8]\n\t"
339 "ldr r3, [sp, #12]\n\t"
340 "ldr r9, [sp, #16]\n\t"
341 "ldr r12, [sp, #20]\n\t"
342 "add sp, sp, #24\n\t"
343
344 "blx r3\n\t" // Call the stub
345 "add sp, sp, #12\n\t" // Pop nullptr and padding
346 ".cfi_adjust_cfa_offset -12\n\t"
347 "pop {r1-r12, lr}\n\t" // Restore state
348 ".cfi_adjust_cfa_offset -52\n\t"
349 "mov %[result], r0\n\t" // Save the result
350 : [result] "=r" (result)
351 // Use the result from r0
Andreas Gampe2f6e3512014-06-07 01:32:33 -0700352 : [arg0] "r"(arg0), [arg1] "r"(arg1), [arg2] "r"(arg2), [code] "r"(code), [self] "r"(self),
353 [referrer] "r"(referrer), [hidden] "r"(hidden)
354 : "memory"); // clobber.
Andreas Gampe51f76352014-05-21 08:28:48 -0700355#elif defined(__aarch64__)
356 __asm__ __volatile__(
Andreas Gampef39b3782014-06-03 14:38:30 -0700357 // Spill x0-x7 which we say we don't clobber. May contain args.
Andreas Gampe51f76352014-05-21 08:28:48 -0700358 "sub sp, sp, #64\n\t"
359 ".cfi_adjust_cfa_offset 64\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700360 "stp x0, x1, [sp]\n\t"
361 "stp x2, x3, [sp, #16]\n\t"
362 "stp x4, x5, [sp, #32]\n\t"
363 "stp x6, x7, [sp, #48]\n\t"
Andreas Gampe51f76352014-05-21 08:28:48 -0700364
Andreas Gampef39b3782014-06-03 14:38:30 -0700365 "sub sp, sp, #16\n\t" // Reserve stack space, 16B aligned
366 ".cfi_adjust_cfa_offset 16\n\t"
367 "str %[referrer], [sp]\n\t" // referrer
Andreas Gampe51f76352014-05-21 08:28:48 -0700368
369 // Push everything on the stack, so we don't rely on the order. What a mess. :-(
370 "sub sp, sp, #48\n\t"
371 ".cfi_adjust_cfa_offset 48\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700372 // All things are "r" constraints, so direct str/stp should work.
373 "stp %[arg0], %[arg1], [sp]\n\t"
374 "stp %[arg2], %[code], [sp, #16]\n\t"
375 "stp %[self], %[hidden], [sp, #32]\n\t"
Andreas Gampe51f76352014-05-21 08:28:48 -0700376
377 // Now we definitely have x0-x3 free, use it to garble d8 - d15
378 "movk x0, #0xfad0\n\t"
379 "movk x0, #0xebad, lsl #16\n\t"
380 "movk x0, #0xfad0, lsl #32\n\t"
381 "movk x0, #0xebad, lsl #48\n\t"
382 "fmov d8, x0\n\t"
383 "add x0, x0, 1\n\t"
384 "fmov d9, x0\n\t"
385 "add x0, x0, 1\n\t"
386 "fmov d10, x0\n\t"
387 "add x0, x0, 1\n\t"
388 "fmov d11, x0\n\t"
389 "add x0, x0, 1\n\t"
390 "fmov d12, x0\n\t"
391 "add x0, x0, 1\n\t"
392 "fmov d13, x0\n\t"
393 "add x0, x0, 1\n\t"
394 "fmov d14, x0\n\t"
395 "add x0, x0, 1\n\t"
396 "fmov d15, x0\n\t"
397
Andreas Gampef39b3782014-06-03 14:38:30 -0700398 // Load call params into the right registers.
399 "ldp x0, x1, [sp]\n\t"
400 "ldp x2, x3, [sp, #16]\n\t"
Andreas Gampe1a7e2922014-05-21 15:37:53 -0700401 "ldp x18, x17, [sp, #32]\n\t"
Andreas Gampe51f76352014-05-21 08:28:48 -0700402 "add sp, sp, #48\n\t"
403 ".cfi_adjust_cfa_offset -48\n\t"
404
Andreas Gampe51f76352014-05-21 08:28:48 -0700405 "blr x3\n\t" // Call the stub
Andreas Gampef39b3782014-06-03 14:38:30 -0700406 "mov x8, x0\n\t" // Store result
407 "add sp, sp, #16\n\t" // Drop the quick "frame"
408 ".cfi_adjust_cfa_offset -16\n\t"
Andreas Gampe51f76352014-05-21 08:28:48 -0700409
410 // Test d8 - d15. We can use x1 and x2.
411 "movk x1, #0xfad0\n\t"
412 "movk x1, #0xebad, lsl #16\n\t"
413 "movk x1, #0xfad0, lsl #32\n\t"
414 "movk x1, #0xebad, lsl #48\n\t"
415 "fmov x2, d8\n\t"
416 "cmp x1, x2\n\t"
417 "b.ne 1f\n\t"
418 "add x1, x1, 1\n\t"
419
420 "fmov x2, d9\n\t"
421 "cmp x1, x2\n\t"
422 "b.ne 1f\n\t"
423 "add x1, x1, 1\n\t"
424
425 "fmov x2, d10\n\t"
426 "cmp x1, x2\n\t"
427 "b.ne 1f\n\t"
428 "add x1, x1, 1\n\t"
429
430 "fmov x2, d11\n\t"
431 "cmp x1, x2\n\t"
432 "b.ne 1f\n\t"
433 "add x1, x1, 1\n\t"
434
435 "fmov x2, d12\n\t"
436 "cmp x1, x2\n\t"
437 "b.ne 1f\n\t"
438 "add x1, x1, 1\n\t"
439
440 "fmov x2, d13\n\t"
441 "cmp x1, x2\n\t"
442 "b.ne 1f\n\t"
443 "add x1, x1, 1\n\t"
444
445 "fmov x2, d14\n\t"
446 "cmp x1, x2\n\t"
447 "b.ne 1f\n\t"
448 "add x1, x1, 1\n\t"
449
450 "fmov x2, d15\n\t"
451 "cmp x1, x2\n\t"
452 "b.ne 1f\n\t"
453
Andreas Gampef39b3782014-06-03 14:38:30 -0700454 "mov x9, #0\n\t" // Use x9 as flag, in clobber list
Andreas Gampe51f76352014-05-21 08:28:48 -0700455
456 // Finish up.
457 "2:\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700458 "ldp x0, x1, [sp]\n\t" // Restore stuff not named clobbered, may contain fpr_result
459 "ldp x2, x3, [sp, #16]\n\t"
460 "ldp x4, x5, [sp, #32]\n\t"
461 "ldp x6, x7, [sp, #48]\n\t"
462 "add sp, sp, #64\n\t" // Free stack space, now sp as on entry
Andreas Gampe51f76352014-05-21 08:28:48 -0700463 ".cfi_adjust_cfa_offset -64\n\t"
464
Andreas Gampef39b3782014-06-03 14:38:30 -0700465 "str x9, %[fpr_result]\n\t" // Store the FPR comparison result
466 "mov %[result], x8\n\t" // Store the call result
467
Andreas Gampe51f76352014-05-21 08:28:48 -0700468 "b 3f\n\t" // Goto end
469
470 // Failed fpr verification.
471 "1:\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700472 "mov x9, #1\n\t"
Andreas Gampe51f76352014-05-21 08:28:48 -0700473 "b 2b\n\t" // Goto finish-up
474
475 // End
476 "3:\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700477 : [result] "=r" (result)
478 // Use the result from r0
Andreas Gampe51f76352014-05-21 08:28:48 -0700479 : [arg0] "0"(arg0), [arg1] "r"(arg1), [arg2] "r"(arg2), [code] "r"(code), [self] "r"(self),
Andreas Gampef39b3782014-06-03 14:38:30 -0700480 [referrer] "r"(referrer), [hidden] "r"(hidden), [fpr_result] "m" (fpr_result)
481 : "x8", "x9", "x10", "x11", "x12", "x13", "x14", "x15", "x16", "x17", "x18", "x19", "x20",
482 "x21", "x22", "x23", "x24", "x25", "x26", "x27", "x28", "x30",
483 "d0", "d1", "d2", "d3", "d4", "d5", "d6", "d7",
484 "d8", "d9", "d10", "d11", "d12", "d13", "d14", "d15",
485 "d16", "d17", "d18", "d19", "d20", "d21", "d22", "d23",
Andreas Gampe2f6e3512014-06-07 01:32:33 -0700486 "d24", "d25", "d26", "d27", "d28", "d29", "d30", "d31",
487 "memory"); // clobber.
Ian Rogers6f3dbba2014-10-14 17:41:57 -0700488#elif defined(__x86_64__) && !defined(__APPLE__) && defined(__clang__)
Andreas Gampe51f76352014-05-21 08:28:48 -0700489 // Note: Uses the native convention
490 // TODO: Set the thread?
491 __asm__ __volatile__(
Andreas Gampe51f76352014-05-21 08:28:48 -0700492 "pushq %[referrer]\n\t" // Push referrer
493 "pushq (%%rsp)\n\t" // & 16B alignment padding
494 ".cfi_adjust_cfa_offset 16\n\t"
Andreas Gampe1a7e2922014-05-21 15:37:53 -0700495 "call *%%rbx\n\t" // Call the stub
Andreas Gampe51f76352014-05-21 08:28:48 -0700496 "addq $16, %%rsp\n\t" // Pop nullptr and padding
497 ".cfi_adjust_cfa_offset -16\n\t"
498 : "=a" (result)
499 // Use the result from rax
Andreas Gampe1a7e2922014-05-21 15:37:53 -0700500 : "D"(arg0), "S"(arg1), "d"(arg2), "b"(code), [referrer] "c"(referrer), [hidden] "a"(hidden)
Andreas Gampe51f76352014-05-21 08:28:48 -0700501 // This places arg0 into rdi, arg1 into rsi, arg2 into rdx, and code into rax
Andreas Gampe1a7e2922014-05-21 15:37:53 -0700502 : "rbp", "r8", "r9", "r10", "r11", "r12", "r13", "r14", "r15",
Andreas Gampe2f6e3512014-06-07 01:32:33 -0700503 "memory"); // clobber all
Andreas Gampe51f76352014-05-21 08:28:48 -0700504 // TODO: Should we clobber the other registers?
505#else
506 LOG(WARNING) << "Was asked to invoke for an architecture I do not understand.";
507 result = 0;
508#endif
509 // Pop transition.
510 self->PopManagedStackFragment(fragment);
511
512 fp_result = fpr_result;
513 EXPECT_EQ(0U, fp_result);
514
515 return result;
516 }
517
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700518 // Method with 32b arg0, 64b arg1
519 size_t Invoke3UWithReferrer(size_t arg0, uint64_t arg1, uintptr_t code, Thread* self,
520 mirror::ArtMethod* referrer) {
Ian Rogersc3ccc102014-06-25 11:52:14 -0700521#if (defined(__x86_64__) && !defined(__APPLE__)) || defined(__aarch64__)
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700522 // Just pass through.
523 return Invoke3WithReferrer(arg0, arg1, 0U, code, self, referrer);
524#else
525 // Need to split up arguments.
526 uint32_t lower = static_cast<uint32_t>(arg1 & 0xFFFFFFFF);
527 uint32_t upper = static_cast<uint32_t>((arg1 >> 32) & 0xFFFFFFFF);
528
529 return Invoke3WithReferrer(arg0, lower, upper, code, self, referrer);
530#endif
531 }
532
Andreas Gampe29b38412014-08-13 00:15:43 -0700533 static uintptr_t GetEntrypoint(Thread* self, QuickEntrypointEnum entrypoint) {
534 int32_t offset;
535#ifdef __LP64__
536 offset = GetThreadOffset<8>(entrypoint).Int32Value();
537#else
538 offset = GetThreadOffset<4>(entrypoint).Int32Value();
539#endif
540 return *reinterpret_cast<uintptr_t*>(reinterpret_cast<uint8_t*>(self) + offset);
541 }
542
Andreas Gampe6cf80102014-05-19 11:32:41 -0700543 protected:
544 size_t fp_result;
Andreas Gampe525cde22014-04-22 15:44:50 -0700545};
546
547
Andreas Gampe525cde22014-04-22 15:44:50 -0700548TEST_F(StubTest, Memcpy) {
Ian Rogersc3ccc102014-06-25 11:52:14 -0700549#if defined(__i386__) || (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe525cde22014-04-22 15:44:50 -0700550 Thread* self = Thread::Current();
551
552 uint32_t orig[20];
553 uint32_t trg[20];
554 for (size_t i = 0; i < 20; ++i) {
555 orig[i] = i;
556 trg[i] = 0;
557 }
558
559 Invoke3(reinterpret_cast<size_t>(&trg[4]), reinterpret_cast<size_t>(&orig[4]),
Andreas Gampe29b38412014-08-13 00:15:43 -0700560 10 * sizeof(uint32_t), StubTest::GetEntrypoint(self, kQuickMemcpy), self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700561
562 EXPECT_EQ(orig[0], trg[0]);
563
564 for (size_t i = 1; i < 4; ++i) {
565 EXPECT_NE(orig[i], trg[i]);
566 }
567
568 for (size_t i = 4; i < 14; ++i) {
569 EXPECT_EQ(orig[i], trg[i]);
570 }
571
572 for (size_t i = 14; i < 20; ++i) {
573 EXPECT_NE(orig[i], trg[i]);
574 }
575
576 // TODO: Test overlapping?
577
578#else
579 LOG(INFO) << "Skipping memcpy as I don't know how to do that on " << kRuntimeISA;
580 // Force-print to std::cout so it's also outside the logcat.
581 std::cout << "Skipping memcpy as I don't know how to do that on " << kRuntimeISA << std::endl;
582#endif
583}
584
Andreas Gampe525cde22014-04-22 15:44:50 -0700585TEST_F(StubTest, LockObject) {
Ian Rogersc3ccc102014-06-25 11:52:14 -0700586#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -0700587 static constexpr size_t kThinLockLoops = 100;
588
Andreas Gampe525cde22014-04-22 15:44:50 -0700589 Thread* self = Thread::Current();
Andreas Gampe29b38412014-08-13 00:15:43 -0700590
591 const uintptr_t art_quick_lock_object = StubTest::GetEntrypoint(self, kQuickLockObject);
592
Andreas Gampe525cde22014-04-22 15:44:50 -0700593 // Create an object
594 ScopedObjectAccess soa(self);
595 // garbage is created during ClassLinker::Init
596
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700597 StackHandleScope<2> hs(soa.Self());
598 Handle<mirror::String> obj(
599 hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), "hello, world!")));
Andreas Gampe525cde22014-04-22 15:44:50 -0700600 LockWord lock = obj->GetLockWord(false);
601 LockWord::LockState old_state = lock.GetState();
602 EXPECT_EQ(LockWord::LockState::kUnlocked, old_state);
603
Andreas Gampe29b38412014-08-13 00:15:43 -0700604 Invoke3(reinterpret_cast<size_t>(obj.Get()), 0U, 0U, art_quick_lock_object, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700605
606 LockWord lock_after = obj->GetLockWord(false);
607 LockWord::LockState new_state = lock_after.GetState();
608 EXPECT_EQ(LockWord::LockState::kThinLocked, new_state);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700609 EXPECT_EQ(lock_after.ThinLockCount(), 0U); // Thin lock starts count at zero
610
611 for (size_t i = 1; i < kThinLockLoops; ++i) {
Andreas Gampe29b38412014-08-13 00:15:43 -0700612 Invoke3(reinterpret_cast<size_t>(obj.Get()), 0U, 0U, art_quick_lock_object, self);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700613
614 // Check we're at lock count i
615
616 LockWord l_inc = obj->GetLockWord(false);
617 LockWord::LockState l_inc_state = l_inc.GetState();
618 EXPECT_EQ(LockWord::LockState::kThinLocked, l_inc_state);
619 EXPECT_EQ(l_inc.ThinLockCount(), i);
620 }
621
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700622 // Force a fat lock by running identity hashcode to fill up lock word.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700623 Handle<mirror::String> obj2(hs.NewHandle(
624 mirror::String::AllocFromModifiedUtf8(soa.Self(), "hello, world!")));
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700625
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700626 obj2->IdentityHashCode();
627
Andreas Gampe29b38412014-08-13 00:15:43 -0700628 Invoke3(reinterpret_cast<size_t>(obj2.Get()), 0U, 0U, art_quick_lock_object, self);
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700629
630 LockWord lock_after2 = obj2->GetLockWord(false);
631 LockWord::LockState new_state2 = lock_after2.GetState();
632 EXPECT_EQ(LockWord::LockState::kFatLocked, new_state2);
633 EXPECT_NE(lock_after2.FatLockMonitor(), static_cast<Monitor*>(nullptr));
634
635 // Test done.
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700636#else
637 LOG(INFO) << "Skipping lock_object as I don't know how to do that on " << kRuntimeISA;
638 // Force-print to std::cout so it's also outside the logcat.
639 std::cout << "Skipping lock_object as I don't know how to do that on " << kRuntimeISA << std::endl;
640#endif
641}
642
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700643
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700644class RandGen {
645 public:
646 explicit RandGen(uint32_t seed) : val_(seed) {}
647
648 uint32_t next() {
649 val_ = val_ * 48271 % 2147483647 + 13;
650 return val_;
651 }
652
653 uint32_t val_;
654};
655
656
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700657// NO_THREAD_SAFETY_ANALYSIS as we do not want to grab exclusive mutator lock for MonitorInfo.
658static void TestUnlockObject(StubTest* test) NO_THREAD_SAFETY_ANALYSIS {
Ian Rogersc3ccc102014-06-25 11:52:14 -0700659#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -0700660 static constexpr size_t kThinLockLoops = 100;
661
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700662 Thread* self = Thread::Current();
Andreas Gampe29b38412014-08-13 00:15:43 -0700663
664 const uintptr_t art_quick_lock_object = StubTest::GetEntrypoint(self, kQuickLockObject);
665 const uintptr_t art_quick_unlock_object = StubTest::GetEntrypoint(self, kQuickUnlockObject);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700666 // Create an object
667 ScopedObjectAccess soa(self);
668 // garbage is created during ClassLinker::Init
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700669 static constexpr size_t kNumberOfLocks = 10; // Number of objects = lock
670 StackHandleScope<kNumberOfLocks + 1> hs(self);
671 Handle<mirror::String> obj(
672 hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), "hello, world!")));
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700673 LockWord lock = obj->GetLockWord(false);
674 LockWord::LockState old_state = lock.GetState();
675 EXPECT_EQ(LockWord::LockState::kUnlocked, old_state);
676
Andreas Gampe29b38412014-08-13 00:15:43 -0700677 test->Invoke3(reinterpret_cast<size_t>(obj.Get()), 0U, 0U, art_quick_unlock_object, self);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700678 // This should be an illegal monitor state.
679 EXPECT_TRUE(self->IsExceptionPending());
680 self->ClearException();
681
682 LockWord lock_after = obj->GetLockWord(false);
683 LockWord::LockState new_state = lock_after.GetState();
684 EXPECT_EQ(LockWord::LockState::kUnlocked, new_state);
Andreas Gampe525cde22014-04-22 15:44:50 -0700685
Andreas Gampe29b38412014-08-13 00:15:43 -0700686 test->Invoke3(reinterpret_cast<size_t>(obj.Get()), 0U, 0U, art_quick_lock_object, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700687
688 LockWord lock_after2 = obj->GetLockWord(false);
689 LockWord::LockState new_state2 = lock_after2.GetState();
690 EXPECT_EQ(LockWord::LockState::kThinLocked, new_state2);
691
Andreas Gampe29b38412014-08-13 00:15:43 -0700692 test->Invoke3(reinterpret_cast<size_t>(obj.Get()), 0U, 0U, art_quick_unlock_object, self);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700693
694 LockWord lock_after3 = obj->GetLockWord(false);
695 LockWord::LockState new_state3 = lock_after3.GetState();
696 EXPECT_EQ(LockWord::LockState::kUnlocked, new_state3);
697
698 // Stress test:
699 // Keep a number of objects and their locks in flight. Randomly lock or unlock one of them in
700 // each step.
701
702 RandGen r(0x1234);
703
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700704 constexpr size_t kIterations = 10000; // Number of iterations
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700705 constexpr size_t kMoveToFat = 1000; // Chance of 1:kMoveFat to make a lock fat.
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700706
707 size_t counts[kNumberOfLocks];
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700708 bool fat[kNumberOfLocks]; // Whether a lock should be thin or fat.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700709 Handle<mirror::String> objects[kNumberOfLocks];
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700710
711 // Initialize = allocate.
712 for (size_t i = 0; i < kNumberOfLocks; ++i) {
713 counts[i] = 0;
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700714 fat[i] = false;
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700715 objects[i] = hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), ""));
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700716 }
717
718 for (size_t i = 0; i < kIterations; ++i) {
719 // Select which lock to update.
720 size_t index = r.next() % kNumberOfLocks;
721
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700722 // Make lock fat?
723 if (!fat[index] && (r.next() % kMoveToFat == 0)) {
724 fat[index] = true;
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700725 objects[index]->IdentityHashCode();
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700726
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700727 LockWord lock_iter = objects[index]->GetLockWord(false);
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700728 LockWord::LockState iter_state = lock_iter.GetState();
729 if (counts[index] == 0) {
730 EXPECT_EQ(LockWord::LockState::kHashCode, iter_state);
731 } else {
732 EXPECT_EQ(LockWord::LockState::kFatLocked, iter_state);
733 }
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700734 } else {
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700735 bool lock; // Whether to lock or unlock in this step.
736 if (counts[index] == 0) {
737 lock = true;
738 } else if (counts[index] == kThinLockLoops) {
739 lock = false;
740 } else {
741 // Randomly.
742 lock = r.next() % 2 == 0;
743 }
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700744
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700745 if (lock) {
Andreas Gampe29b38412014-08-13 00:15:43 -0700746 test->Invoke3(reinterpret_cast<size_t>(objects[index].Get()), 0U, 0U, art_quick_lock_object,
747 self);
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700748 counts[index]++;
749 } else {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700750 test->Invoke3(reinterpret_cast<size_t>(objects[index].Get()), 0U, 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -0700751 art_quick_unlock_object, self);
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700752 counts[index]--;
753 }
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700754
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700755 EXPECT_FALSE(self->IsExceptionPending());
756
757 // Check the new state.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700758 LockWord lock_iter = objects[index]->GetLockWord(true);
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700759 LockWord::LockState iter_state = lock_iter.GetState();
760 if (fat[index]) {
761 // Abuse MonitorInfo.
762 EXPECT_EQ(LockWord::LockState::kFatLocked, iter_state) << index;
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700763 MonitorInfo info(objects[index].Get());
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700764 EXPECT_EQ(counts[index], info.entry_count_) << index;
765 } else {
766 if (counts[index] > 0) {
767 EXPECT_EQ(LockWord::LockState::kThinLocked, iter_state);
768 EXPECT_EQ(counts[index] - 1, lock_iter.ThinLockCount());
769 } else {
770 EXPECT_EQ(LockWord::LockState::kUnlocked, iter_state);
771 }
772 }
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700773 }
774 }
775
776 // Unlock the remaining count times and then check it's unlocked. Then deallocate.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700777 // Go reverse order to correctly handle Handles.
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700778 for (size_t i = 0; i < kNumberOfLocks; ++i) {
779 size_t index = kNumberOfLocks - 1 - i;
780 size_t count = counts[index];
781 while (count > 0) {
Andreas Gampe29b38412014-08-13 00:15:43 -0700782 test->Invoke3(reinterpret_cast<size_t>(objects[index].Get()), 0U, 0U, art_quick_unlock_object,
783 self);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700784 count--;
785 }
786
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700787 LockWord lock_after4 = objects[index]->GetLockWord(false);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700788 LockWord::LockState new_state4 = lock_after4.GetState();
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700789 EXPECT_TRUE(LockWord::LockState::kUnlocked == new_state4
790 || LockWord::LockState::kFatLocked == new_state4);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700791 }
792
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700793 // Test done.
Andreas Gampe525cde22014-04-22 15:44:50 -0700794#else
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700795 LOG(INFO) << "Skipping unlock_object as I don't know how to do that on " << kRuntimeISA;
Andreas Gampe525cde22014-04-22 15:44:50 -0700796 // Force-print to std::cout so it's also outside the logcat.
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700797 std::cout << "Skipping unlock_object as I don't know how to do that on " << kRuntimeISA << std::endl;
Andreas Gampe525cde22014-04-22 15:44:50 -0700798#endif
799}
800
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700801TEST_F(StubTest, UnlockObject) {
802 TestUnlockObject(this);
803}
Andreas Gampe525cde22014-04-22 15:44:50 -0700804
Ian Rogersc3ccc102014-06-25 11:52:14 -0700805#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe525cde22014-04-22 15:44:50 -0700806extern "C" void art_quick_check_cast(void);
807#endif
808
809TEST_F(StubTest, CheckCast) {
Ian Rogersc3ccc102014-06-25 11:52:14 -0700810#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe525cde22014-04-22 15:44:50 -0700811 Thread* self = Thread::Current();
Andreas Gampe29b38412014-08-13 00:15:43 -0700812
813 const uintptr_t art_quick_check_cast = StubTest::GetEntrypoint(self, kQuickCheckCast);
814
Andreas Gampe525cde22014-04-22 15:44:50 -0700815 // Find some classes.
816 ScopedObjectAccess soa(self);
817 // garbage is created during ClassLinker::Init
818
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700819 StackHandleScope<2> hs(soa.Self());
820 Handle<mirror::Class> c(
821 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "[Ljava/lang/Object;")));
822 Handle<mirror::Class> c2(
823 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "[Ljava/lang/String;")));
Andreas Gampe525cde22014-04-22 15:44:50 -0700824
825 EXPECT_FALSE(self->IsExceptionPending());
826
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700827 Invoke3(reinterpret_cast<size_t>(c.Get()), reinterpret_cast<size_t>(c.Get()), 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -0700828 art_quick_check_cast, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700829
830 EXPECT_FALSE(self->IsExceptionPending());
831
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700832 Invoke3(reinterpret_cast<size_t>(c2.Get()), reinterpret_cast<size_t>(c2.Get()), 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -0700833 art_quick_check_cast, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700834
835 EXPECT_FALSE(self->IsExceptionPending());
836
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700837 Invoke3(reinterpret_cast<size_t>(c.Get()), reinterpret_cast<size_t>(c2.Get()), 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -0700838 art_quick_check_cast, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700839
840 EXPECT_FALSE(self->IsExceptionPending());
841
842 // TODO: Make the following work. But that would require correct managed frames.
843
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700844 Invoke3(reinterpret_cast<size_t>(c2.Get()), reinterpret_cast<size_t>(c.Get()), 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -0700845 art_quick_check_cast, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700846
847 EXPECT_TRUE(self->IsExceptionPending());
848 self->ClearException();
849
850#else
851 LOG(INFO) << "Skipping check_cast as I don't know how to do that on " << kRuntimeISA;
852 // Force-print to std::cout so it's also outside the logcat.
853 std::cout << "Skipping check_cast as I don't know how to do that on " << kRuntimeISA << std::endl;
854#endif
855}
856
857
Andreas Gampe525cde22014-04-22 15:44:50 -0700858TEST_F(StubTest, APutObj) {
Hiroshi Yamauchid6881ae2014-04-28 17:21:48 -0700859 TEST_DISABLED_FOR_HEAP_REFERENCE_POISONING();
860
Ian Rogersc3ccc102014-06-25 11:52:14 -0700861#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe525cde22014-04-22 15:44:50 -0700862 Thread* self = Thread::Current();
Andreas Gampe29b38412014-08-13 00:15:43 -0700863
864 // Do not check non-checked ones, we'd need handlers and stuff...
865 const uintptr_t art_quick_aput_obj_with_null_and_bound_check =
866 StubTest::GetEntrypoint(self, kQuickAputObjectWithNullAndBoundCheck);
867
Andreas Gampe525cde22014-04-22 15:44:50 -0700868 // Create an object
869 ScopedObjectAccess soa(self);
870 // garbage is created during ClassLinker::Init
871
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700872 StackHandleScope<5> hs(soa.Self());
873 Handle<mirror::Class> c(
874 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "Ljava/lang/Object;")));
875 Handle<mirror::Class> ca(
876 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "[Ljava/lang/String;")));
Andreas Gampe525cde22014-04-22 15:44:50 -0700877
878 // Build a string array of size 1
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700879 Handle<mirror::ObjectArray<mirror::Object>> array(
880 hs.NewHandle(mirror::ObjectArray<mirror::Object>::Alloc(soa.Self(), ca.Get(), 10)));
Andreas Gampe525cde22014-04-22 15:44:50 -0700881
882 // Build a string -> should be assignable
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700883 Handle<mirror::String> str_obj(
884 hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), "hello, world!")));
Andreas Gampe525cde22014-04-22 15:44:50 -0700885
886 // Build a generic object -> should fail assigning
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700887 Handle<mirror::Object> obj_obj(hs.NewHandle(c->AllocObject(soa.Self())));
Andreas Gampe525cde22014-04-22 15:44:50 -0700888
889 // Play with it...
890
891 // 1) Success cases
Andreas Gampef4e910b2014-04-29 16:55:52 -0700892 // 1.1) Assign str_obj to array[0..3]
Andreas Gampe525cde22014-04-22 15:44:50 -0700893
894 EXPECT_FALSE(self->IsExceptionPending());
895
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700896 Invoke3(reinterpret_cast<size_t>(array.Get()), 0U, reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampe29b38412014-08-13 00:15:43 -0700897 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700898
899 EXPECT_FALSE(self->IsExceptionPending());
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700900 EXPECT_EQ(str_obj.Get(), array->Get(0));
Andreas Gampe525cde22014-04-22 15:44:50 -0700901
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700902 Invoke3(reinterpret_cast<size_t>(array.Get()), 1U, reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampe29b38412014-08-13 00:15:43 -0700903 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampef4e910b2014-04-29 16:55:52 -0700904
905 EXPECT_FALSE(self->IsExceptionPending());
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700906 EXPECT_EQ(str_obj.Get(), array->Get(1));
Andreas Gampef4e910b2014-04-29 16:55:52 -0700907
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700908 Invoke3(reinterpret_cast<size_t>(array.Get()), 2U, reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampe29b38412014-08-13 00:15:43 -0700909 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampef4e910b2014-04-29 16:55:52 -0700910
911 EXPECT_FALSE(self->IsExceptionPending());
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700912 EXPECT_EQ(str_obj.Get(), array->Get(2));
Andreas Gampef4e910b2014-04-29 16:55:52 -0700913
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700914 Invoke3(reinterpret_cast<size_t>(array.Get()), 3U, reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampe29b38412014-08-13 00:15:43 -0700915 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampef4e910b2014-04-29 16:55:52 -0700916
917 EXPECT_FALSE(self->IsExceptionPending());
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700918 EXPECT_EQ(str_obj.Get(), array->Get(3));
Andreas Gampef4e910b2014-04-29 16:55:52 -0700919
920 // 1.2) Assign null to array[0..3]
Andreas Gampe525cde22014-04-22 15:44:50 -0700921
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700922 Invoke3(reinterpret_cast<size_t>(array.Get()), 0U, reinterpret_cast<size_t>(nullptr),
Andreas Gampe29b38412014-08-13 00:15:43 -0700923 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700924
925 EXPECT_FALSE(self->IsExceptionPending());
Andreas Gampef4e910b2014-04-29 16:55:52 -0700926 EXPECT_EQ(nullptr, array->Get(0));
927
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700928 Invoke3(reinterpret_cast<size_t>(array.Get()), 1U, reinterpret_cast<size_t>(nullptr),
Andreas Gampe29b38412014-08-13 00:15:43 -0700929 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampef4e910b2014-04-29 16:55:52 -0700930
931 EXPECT_FALSE(self->IsExceptionPending());
932 EXPECT_EQ(nullptr, array->Get(1));
933
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700934 Invoke3(reinterpret_cast<size_t>(array.Get()), 2U, reinterpret_cast<size_t>(nullptr),
Andreas Gampe29b38412014-08-13 00:15:43 -0700935 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampef4e910b2014-04-29 16:55:52 -0700936
937 EXPECT_FALSE(self->IsExceptionPending());
938 EXPECT_EQ(nullptr, array->Get(2));
939
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700940 Invoke3(reinterpret_cast<size_t>(array.Get()), 3U, reinterpret_cast<size_t>(nullptr),
Andreas Gampe29b38412014-08-13 00:15:43 -0700941 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampef4e910b2014-04-29 16:55:52 -0700942
943 EXPECT_FALSE(self->IsExceptionPending());
944 EXPECT_EQ(nullptr, array->Get(3));
Andreas Gampe525cde22014-04-22 15:44:50 -0700945
946 // TODO: Check _which_ exception is thrown. Then make 3) check that it's the right check order.
947
948 // 2) Failure cases (str into str[])
949 // 2.1) Array = null
950 // TODO: Throwing NPE needs actual DEX code
951
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700952// Invoke3(reinterpret_cast<size_t>(nullptr), 0U, reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampe525cde22014-04-22 15:44:50 -0700953// reinterpret_cast<uintptr_t>(&art_quick_aput_obj_with_null_and_bound_check), self);
954//
955// EXPECT_TRUE(self->IsExceptionPending());
956// self->ClearException();
957
958 // 2.2) Index < 0
959
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700960 Invoke3(reinterpret_cast<size_t>(array.Get()), static_cast<size_t>(-1),
961 reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampe29b38412014-08-13 00:15:43 -0700962 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700963
964 EXPECT_TRUE(self->IsExceptionPending());
965 self->ClearException();
966
967 // 2.3) Index > 0
968
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700969 Invoke3(reinterpret_cast<size_t>(array.Get()), 10U, reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampe29b38412014-08-13 00:15:43 -0700970 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700971
972 EXPECT_TRUE(self->IsExceptionPending());
973 self->ClearException();
974
975 // 3) Failure cases (obj into str[])
976
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700977 Invoke3(reinterpret_cast<size_t>(array.Get()), 0U, reinterpret_cast<size_t>(obj_obj.Get()),
Andreas Gampe29b38412014-08-13 00:15:43 -0700978 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700979
980 EXPECT_TRUE(self->IsExceptionPending());
981 self->ClearException();
982
983 // Tests done.
984#else
985 LOG(INFO) << "Skipping aput_obj as I don't know how to do that on " << kRuntimeISA;
986 // Force-print to std::cout so it's also outside the logcat.
987 std::cout << "Skipping aput_obj as I don't know how to do that on " << kRuntimeISA << std::endl;
988#endif
989}
990
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700991TEST_F(StubTest, AllocObject) {
992 TEST_DISABLED_FOR_HEAP_REFERENCE_POISONING();
993
Ian Rogersc3ccc102014-06-25 11:52:14 -0700994#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700995 // TODO: Check the "Unresolved" allocation stubs
996
997 Thread* self = Thread::Current();
998 // Create an object
999 ScopedObjectAccess soa(self);
1000 // garbage is created during ClassLinker::Init
1001
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001002 StackHandleScope<2> hs(soa.Self());
1003 Handle<mirror::Class> c(
1004 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "Ljava/lang/Object;")));
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001005
1006 // Play with it...
1007
1008 EXPECT_FALSE(self->IsExceptionPending());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001009 {
1010 // Use an arbitrary method from c to use as referrer
1011 size_t result = Invoke3(static_cast<size_t>(c->GetDexTypeIndex()), // type_idx
1012 reinterpret_cast<size_t>(c->GetVirtualMethod(0)), // arbitrary
1013 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001014 StubTest::GetEntrypoint(self, kQuickAllocObject),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001015 self);
1016
1017 EXPECT_FALSE(self->IsExceptionPending());
1018 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
1019 mirror::Object* obj = reinterpret_cast<mirror::Object*>(result);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001020 EXPECT_EQ(c.Get(), obj->GetClass());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001021 VerifyObject(obj);
1022 }
1023
1024 {
1025 // We can use nullptr in the second argument as we do not need a method here (not used in
1026 // resolved/initialized cases)
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001027 size_t result = Invoke3(reinterpret_cast<size_t>(c.Get()), reinterpret_cast<size_t>(nullptr), 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001028 StubTest::GetEntrypoint(self, kQuickAllocObjectResolved),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001029 self);
1030
1031 EXPECT_FALSE(self->IsExceptionPending());
1032 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
1033 mirror::Object* obj = reinterpret_cast<mirror::Object*>(result);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001034 EXPECT_EQ(c.Get(), obj->GetClass());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001035 VerifyObject(obj);
1036 }
1037
1038 {
1039 // We can use nullptr in the second argument as we do not need a method here (not used in
1040 // resolved/initialized cases)
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001041 size_t result = Invoke3(reinterpret_cast<size_t>(c.Get()), reinterpret_cast<size_t>(nullptr), 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001042 StubTest::GetEntrypoint(self, kQuickAllocObjectInitialized),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001043 self);
1044
1045 EXPECT_FALSE(self->IsExceptionPending());
1046 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
1047 mirror::Object* obj = reinterpret_cast<mirror::Object*>(result);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001048 EXPECT_EQ(c.Get(), obj->GetClass());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001049 VerifyObject(obj);
1050 }
1051
1052 // Failure tests.
1053
1054 // Out-of-memory.
1055 {
1056 Runtime::Current()->GetHeap()->SetIdealFootprint(1 * GB);
1057
1058 // Array helps to fill memory faster.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001059 Handle<mirror::Class> ca(
1060 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "[Ljava/lang/Object;")));
1061
1062 // Use arbitrary large amount for now.
1063 static const size_t kMaxHandles = 1000000;
Ian Rogers700a4022014-05-19 16:49:03 -07001064 std::unique_ptr<StackHandleScope<kMaxHandles>> hsp(new StackHandleScope<kMaxHandles>(self));
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001065
1066 std::vector<Handle<mirror::Object>> handles;
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001067 // Start allocating with 128K
1068 size_t length = 128 * KB / 4;
1069 while (length > 10) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001070 Handle<mirror::Object> h(hsp->NewHandle<mirror::Object>(
1071 mirror::ObjectArray<mirror::Object>::Alloc(soa.Self(), ca.Get(), length / 4)));
1072 if (self->IsExceptionPending() || h.Get() == nullptr) {
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001073 self->ClearException();
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001074
1075 // Try a smaller length
1076 length = length / 8;
1077 // Use at most half the reported free space.
1078 size_t mem = Runtime::Current()->GetHeap()->GetFreeMemory();
1079 if (length * 8 > mem) {
1080 length = mem / 8;
1081 }
1082 } else {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001083 handles.push_back(h);
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001084 }
1085 }
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001086 LOG(INFO) << "Used " << handles.size() << " arrays to fill space.";
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001087
1088 // Allocate simple objects till it fails.
1089 while (!self->IsExceptionPending()) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001090 Handle<mirror::Object> h = hsp->NewHandle(c->AllocObject(soa.Self()));
1091 if (!self->IsExceptionPending() && h.Get() != nullptr) {
1092 handles.push_back(h);
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001093 }
1094 }
1095 self->ClearException();
1096
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001097 size_t result = Invoke3(reinterpret_cast<size_t>(c.Get()), reinterpret_cast<size_t>(nullptr), 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001098 StubTest::GetEntrypoint(self, kQuickAllocObjectInitialized),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001099 self);
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001100 EXPECT_TRUE(self->IsExceptionPending());
1101 self->ClearException();
1102 EXPECT_EQ(reinterpret_cast<size_t>(nullptr), result);
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001103 }
1104
1105 // Tests done.
1106#else
1107 LOG(INFO) << "Skipping alloc_object as I don't know how to do that on " << kRuntimeISA;
1108 // Force-print to std::cout so it's also outside the logcat.
1109 std::cout << "Skipping alloc_object as I don't know how to do that on " << kRuntimeISA << std::endl;
1110#endif
1111}
1112
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001113TEST_F(StubTest, AllocObjectArray) {
1114 TEST_DISABLED_FOR_HEAP_REFERENCE_POISONING();
1115
Ian Rogersc3ccc102014-06-25 11:52:14 -07001116#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001117 // TODO: Check the "Unresolved" allocation stubs
1118
1119 Thread* self = Thread::Current();
1120 // Create an object
1121 ScopedObjectAccess soa(self);
1122 // garbage is created during ClassLinker::Init
1123
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001124 StackHandleScope<2> hs(self);
1125 Handle<mirror::Class> c(
1126 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "[Ljava/lang/Object;")));
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001127
1128 // Needed to have a linked method.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001129 Handle<mirror::Class> c_obj(
1130 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "Ljava/lang/Object;")));
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001131
1132 // Play with it...
1133
1134 EXPECT_FALSE(self->IsExceptionPending());
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001135
1136 // For some reason this does not work, as the type_idx is artificial and outside what the
1137 // resolved types of c_obj allow...
1138
Ian Rogerscf7f1912014-10-22 22:06:39 -07001139 if ((false)) {
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001140 // Use an arbitrary method from c to use as referrer
1141 size_t result = Invoke3(static_cast<size_t>(c->GetDexTypeIndex()), // type_idx
1142 reinterpret_cast<size_t>(c_obj->GetVirtualMethod(0)), // arbitrary
1143 10U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001144 StubTest::GetEntrypoint(self, kQuickAllocArray),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001145 self);
1146
1147 EXPECT_FALSE(self->IsExceptionPending());
1148 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
1149 mirror::Array* obj = reinterpret_cast<mirror::Array*>(result);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001150 EXPECT_EQ(c.Get(), obj->GetClass());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001151 VerifyObject(obj);
1152 EXPECT_EQ(obj->GetLength(), 10);
1153 }
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001154
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001155 {
1156 // We can use nullptr in the second argument as we do not need a method here (not used in
1157 // resolved/initialized cases)
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001158 size_t result = Invoke3(reinterpret_cast<size_t>(c.Get()), reinterpret_cast<size_t>(nullptr), 10U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001159 StubTest::GetEntrypoint(self, kQuickAllocArrayResolved),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001160 self);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001161 EXPECT_FALSE(self->IsExceptionPending()) << PrettyTypeOf(self->GetException(nullptr));
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001162 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
1163 mirror::Object* obj = reinterpret_cast<mirror::Object*>(result);
1164 EXPECT_TRUE(obj->IsArrayInstance());
1165 EXPECT_TRUE(obj->IsObjectArray());
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001166 EXPECT_EQ(c.Get(), obj->GetClass());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001167 VerifyObject(obj);
1168 mirror::Array* array = reinterpret_cast<mirror::Array*>(result);
1169 EXPECT_EQ(array->GetLength(), 10);
1170 }
1171
1172 // Failure tests.
1173
1174 // Out-of-memory.
1175 {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001176 size_t result = Invoke3(reinterpret_cast<size_t>(c.Get()), reinterpret_cast<size_t>(nullptr),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001177 GB, // that should fail...
Andreas Gampe29b38412014-08-13 00:15:43 -07001178 StubTest::GetEntrypoint(self, kQuickAllocArrayResolved),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001179 self);
1180
1181 EXPECT_TRUE(self->IsExceptionPending());
1182 self->ClearException();
1183 EXPECT_EQ(reinterpret_cast<size_t>(nullptr), result);
1184 }
1185
1186 // Tests done.
1187#else
1188 LOG(INFO) << "Skipping alloc_array as I don't know how to do that on " << kRuntimeISA;
1189 // Force-print to std::cout so it's also outside the logcat.
1190 std::cout << "Skipping alloc_array as I don't know how to do that on " << kRuntimeISA << std::endl;
1191#endif
1192}
1193
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001194
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001195TEST_F(StubTest, StringCompareTo) {
1196 TEST_DISABLED_FOR_HEAP_REFERENCE_POISONING();
1197
Ian Rogersc3ccc102014-06-25 11:52:14 -07001198#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001199 // TODO: Check the "Unresolved" allocation stubs
1200
1201 Thread* self = Thread::Current();
Andreas Gampe29b38412014-08-13 00:15:43 -07001202
1203 const uintptr_t art_quick_string_compareto = StubTest::GetEntrypoint(self, kQuickStringCompareTo);
1204
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001205 ScopedObjectAccess soa(self);
1206 // garbage is created during ClassLinker::Init
1207
1208 // Create some strings
1209 // Use array so we can index into it and use a matrix for expected results
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -07001210 // Setup: The first half is standard. The second half uses a non-zero offset.
1211 // TODO: Shared backing arrays.
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001212 const char* c[] = { "", "", "a", "aa", "ab",
Serban Constantinescu86797a72014-06-19 16:17:56 +01001213 "aacaacaacaacaacaac", // This one's under the default limit to go to __memcmp16.
1214 "aacaacaacaacaacaacaacaacaacaacaacaac", // This one's over.
1215 "aacaacaacaacaacaacaacaacaacaacaacaaca" }; // As is this one. We need a separate one to
1216 // defeat object-equal optimizations.
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001217 static constexpr size_t kBaseStringCount = arraysize(c);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001218 static constexpr size_t kStringCount = 2 * kBaseStringCount;
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001219
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001220 StackHandleScope<kStringCount> hs(self);
1221 Handle<mirror::String> s[kStringCount];
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001222
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001223 for (size_t i = 0; i < kBaseStringCount; ++i) {
1224 s[i] = hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), c[i]));
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001225 }
1226
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -07001227 RandGen r(0x1234);
1228
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001229 for (size_t i = kBaseStringCount; i < kStringCount; ++i) {
1230 s[i] = hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), c[i - kBaseStringCount]));
1231 int32_t length = s[i]->GetLength();
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -07001232 if (length > 1) {
1233 // Set a random offset and length.
1234 int32_t new_offset = 1 + (r.next() % (length - 1));
1235 int32_t rest = length - new_offset - 1;
1236 int32_t new_length = 1 + (rest > 0 ? r.next() % rest : 0);
1237
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001238 s[i]->SetField32<false>(mirror::String::CountOffset(), new_length);
1239 s[i]->SetField32<false>(mirror::String::OffsetOffset(), new_offset);
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -07001240 }
1241 }
1242
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001243 // TODO: wide characters
1244
1245 // Matrix of expectations. First component is first parameter. Note we only check against the
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -07001246 // sign, not the value. As we are testing random offsets, we need to compute this and need to
1247 // rely on String::CompareTo being correct.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001248 int32_t expected[kStringCount][kStringCount];
1249 for (size_t x = 0; x < kStringCount; ++x) {
1250 for (size_t y = 0; y < kStringCount; ++y) {
1251 expected[x][y] = s[x]->CompareTo(s[y].Get());
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -07001252 }
1253 }
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001254
1255 // Play with it...
1256
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001257 for (size_t x = 0; x < kStringCount; ++x) {
1258 for (size_t y = 0; y < kStringCount; ++y) {
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001259 // Test string_compareto x y
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001260 size_t result = Invoke3(reinterpret_cast<size_t>(s[x].Get()),
1261 reinterpret_cast<size_t>(s[y].Get()), 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001262 art_quick_string_compareto, self);
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001263
1264 EXPECT_FALSE(self->IsExceptionPending());
1265
1266 // The result is a 32b signed integer
1267 union {
1268 size_t r;
1269 int32_t i;
1270 } conv;
1271 conv.r = result;
1272 int32_t e = expected[x][y];
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -07001273 EXPECT_TRUE(e == 0 ? conv.i == 0 : true) << "x=" << c[x] << " y=" << c[y] << " res=" <<
1274 conv.r;
1275 EXPECT_TRUE(e < 0 ? conv.i < 0 : true) << "x=" << c[x] << " y=" << c[y] << " res=" <<
1276 conv.r;
1277 EXPECT_TRUE(e > 0 ? conv.i > 0 : true) << "x=" << c[x] << " y=" << c[y] << " res=" <<
1278 conv.r;
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001279 }
1280 }
1281
Andreas Gampe7177d7c2014-05-02 12:10:02 -07001282 // TODO: Deallocate things.
1283
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001284 // Tests done.
1285#else
1286 LOG(INFO) << "Skipping string_compareto as I don't know how to do that on " << kRuntimeISA;
1287 // Force-print to std::cout so it's also outside the logcat.
1288 std::cout << "Skipping string_compareto as I don't know how to do that on " << kRuntimeISA <<
1289 std::endl;
1290#endif
1291}
1292
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001293
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001294static void GetSetBooleanStatic(Handle<mirror::ArtField>* f, Thread* self,
1295 mirror::ArtMethod* referrer, StubTest* test)
Fred Shih37f05ef2014-07-16 18:38:08 -07001296 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1297#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
1298 constexpr size_t num_values = 5;
1299 uint8_t values[num_values] = { 0, 1, 2, 128, 0xFF };
1300
1301 for (size_t i = 0; i < num_values; ++i) {
1302 test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
1303 static_cast<size_t>(values[i]),
1304 0U,
1305 StubTest::GetEntrypoint(self, kQuickSet8Static),
1306 self,
1307 referrer);
1308
1309 size_t res = test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
1310 0U, 0U,
1311 StubTest::GetEntrypoint(self, kQuickGetBooleanStatic),
1312 self,
1313 referrer);
1314 // Boolean currently stores bools as uint8_t, be more zealous about asserting correct writes/gets.
1315 EXPECT_EQ(values[i], static_cast<uint8_t>(res)) << "Iteration " << i;
1316 }
1317#else
1318 LOG(INFO) << "Skipping set_boolean_static as I don't know how to do that on " << kRuntimeISA;
1319 // Force-print to std::cout so it's also outside the logcat.
1320 std::cout << "Skipping set_boolean_static as I don't know how to do that on " << kRuntimeISA << std::endl;
1321#endif
1322}
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001323static void GetSetByteStatic(Handle<mirror::ArtField>* f, Thread* self,
1324 mirror::ArtMethod* referrer, StubTest* test)
Fred Shih37f05ef2014-07-16 18:38:08 -07001325 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1326#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001327 int8_t values[] = { -128, -64, 0, 64, 127 };
Fred Shih37f05ef2014-07-16 18:38:08 -07001328
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001329 for (size_t i = 0; i < arraysize(values); ++i) {
Fred Shih37f05ef2014-07-16 18:38:08 -07001330 test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
1331 static_cast<size_t>(values[i]),
1332 0U,
1333 StubTest::GetEntrypoint(self, kQuickSet8Static),
1334 self,
1335 referrer);
1336
1337 size_t res = test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
1338 0U, 0U,
1339 StubTest::GetEntrypoint(self, kQuickGetByteStatic),
1340 self,
1341 referrer);
1342 EXPECT_EQ(values[i], static_cast<int8_t>(res)) << "Iteration " << i;
1343 }
1344#else
1345 LOG(INFO) << "Skipping set_byte_static as I don't know how to do that on " << kRuntimeISA;
1346 // Force-print to std::cout so it's also outside the logcat.
1347 std::cout << "Skipping set_byte_static as I don't know how to do that on " << kRuntimeISA << std::endl;
1348#endif
1349}
1350
1351
Fred Shih37f05ef2014-07-16 18:38:08 -07001352static void GetSetBooleanInstance(Handle<mirror::Object>* obj, Handle<mirror::ArtField>* f,
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001353 Thread* self, mirror::ArtMethod* referrer, StubTest* test)
Fred Shih37f05ef2014-07-16 18:38:08 -07001354 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1355#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001356 uint8_t values[] = { 0, true, 2, 128, 0xFF };
Fred Shih37f05ef2014-07-16 18:38:08 -07001357
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001358 for (size_t i = 0; i < arraysize(values); ++i) {
Fred Shih37f05ef2014-07-16 18:38:08 -07001359 test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
1360 reinterpret_cast<size_t>(obj->Get()),
1361 static_cast<size_t>(values[i]),
1362 StubTest::GetEntrypoint(self, kQuickSet8Instance),
1363 self,
1364 referrer);
1365
1366 uint8_t res = f->Get()->GetBoolean(obj->Get());
1367 EXPECT_EQ(values[i], res) << "Iteration " << i;
1368
1369 f->Get()->SetBoolean<false>(obj->Get(), res);
1370
1371 size_t res2 = test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
1372 reinterpret_cast<size_t>(obj->Get()),
1373 0U,
1374 StubTest::GetEntrypoint(self, kQuickGetBooleanInstance),
1375 self,
1376 referrer);
1377 EXPECT_EQ(res, static_cast<uint8_t>(res2));
1378 }
1379#else
1380 LOG(INFO) << "Skipping set_boolean_instance as I don't know how to do that on " << kRuntimeISA;
1381 // Force-print to std::cout so it's also outside the logcat.
1382 std::cout << "Skipping set_boolean_instance as I don't know how to do that on " << kRuntimeISA << std::endl;
1383#endif
1384}
1385static void GetSetByteInstance(Handle<mirror::Object>* obj, Handle<mirror::ArtField>* f,
1386 Thread* self, mirror::ArtMethod* referrer, StubTest* test)
1387 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1388#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001389 int8_t values[] = { -128, -64, 0, 64, 127 };
Fred Shih37f05ef2014-07-16 18:38:08 -07001390
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001391 for (size_t i = 0; i < arraysize(values); ++i) {
Fred Shih37f05ef2014-07-16 18:38:08 -07001392 test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
1393 reinterpret_cast<size_t>(obj->Get()),
1394 static_cast<size_t>(values[i]),
1395 StubTest::GetEntrypoint(self, kQuickSet8Instance),
1396 self,
1397 referrer);
1398
1399 int8_t res = f->Get()->GetByte(obj->Get());
1400 EXPECT_EQ(res, values[i]) << "Iteration " << i;
1401 f->Get()->SetByte<false>(obj->Get(), ++res);
1402
1403 size_t res2 = test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
1404 reinterpret_cast<size_t>(obj->Get()),
1405 0U,
1406 StubTest::GetEntrypoint(self, kQuickGetByteInstance),
1407 self,
1408 referrer);
1409 EXPECT_EQ(res, static_cast<int8_t>(res2));
1410 }
1411#else
1412 LOG(INFO) << "Skipping set_byte_instance as I don't know how to do that on " << kRuntimeISA;
1413 // Force-print to std::cout so it's also outside the logcat.
1414 std::cout << "Skipping set_byte_instance as I don't know how to do that on " << kRuntimeISA << std::endl;
1415#endif
1416}
1417
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001418static void GetSetCharStatic(Handle<mirror::ArtField>* f, Thread* self, mirror::ArtMethod* referrer,
1419 StubTest* test)
Fred Shih37f05ef2014-07-16 18:38:08 -07001420 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1421#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001422 uint16_t values[] = { 0, 1, 2, 255, 32768, 0xFFFF };
Fred Shih37f05ef2014-07-16 18:38:08 -07001423
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001424 for (size_t i = 0; i < arraysize(values); ++i) {
Fred Shih37f05ef2014-07-16 18:38:08 -07001425 test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
1426 static_cast<size_t>(values[i]),
1427 0U,
1428 StubTest::GetEntrypoint(self, kQuickSet16Static),
1429 self,
1430 referrer);
1431
1432 size_t res = test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
1433 0U, 0U,
1434 StubTest::GetEntrypoint(self, kQuickGetCharStatic),
1435 self,
1436 referrer);
1437
1438 EXPECT_EQ(values[i], static_cast<uint16_t>(res)) << "Iteration " << i;
1439 }
1440#else
1441 LOG(INFO) << "Skipping set_char_static as I don't know how to do that on " << kRuntimeISA;
1442 // Force-print to std::cout so it's also outside the logcat.
1443 std::cout << "Skipping set_char_static as I don't know how to do that on " << kRuntimeISA << std::endl;
1444#endif
1445}
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001446static void GetSetShortStatic(Handle<mirror::ArtField>* f, Thread* self,
1447 mirror::ArtMethod* referrer, StubTest* test)
Fred Shih37f05ef2014-07-16 18:38:08 -07001448 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1449#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001450 int16_t values[] = { -0x7FFF, -32768, 0, 255, 32767, 0x7FFE };
Fred Shih37f05ef2014-07-16 18:38:08 -07001451
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001452 for (size_t i = 0; i < arraysize(values); ++i) {
Fred Shih37f05ef2014-07-16 18:38:08 -07001453 test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
1454 static_cast<size_t>(values[i]),
1455 0U,
1456 StubTest::GetEntrypoint(self, kQuickSet16Static),
1457 self,
1458 referrer);
1459
1460 size_t res = test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
1461 0U, 0U,
1462 StubTest::GetEntrypoint(self, kQuickGetShortStatic),
1463 self,
1464 referrer);
1465
1466 EXPECT_EQ(static_cast<int16_t>(res), values[i]) << "Iteration " << i;
1467 }
1468#else
1469 LOG(INFO) << "Skipping set_short_static as I don't know how to do that on " << kRuntimeISA;
1470 // Force-print to std::cout so it's also outside the logcat.
1471 std::cout << "Skipping set_short_static as I don't know how to do that on " << kRuntimeISA << std::endl;
1472#endif
1473}
1474
Fred Shih37f05ef2014-07-16 18:38:08 -07001475static void GetSetCharInstance(Handle<mirror::Object>* obj, Handle<mirror::ArtField>* f,
1476 Thread* self, mirror::ArtMethod* referrer, StubTest* test)
1477 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1478#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001479 uint16_t values[] = { 0, 1, 2, 255, 32768, 0xFFFF };
Fred Shih37f05ef2014-07-16 18:38:08 -07001480
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001481 for (size_t i = 0; i < arraysize(values); ++i) {
Fred Shih37f05ef2014-07-16 18:38:08 -07001482 test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
1483 reinterpret_cast<size_t>(obj->Get()),
1484 static_cast<size_t>(values[i]),
1485 StubTest::GetEntrypoint(self, kQuickSet16Instance),
1486 self,
1487 referrer);
1488
1489 uint16_t res = f->Get()->GetChar(obj->Get());
1490 EXPECT_EQ(res, values[i]) << "Iteration " << i;
1491 f->Get()->SetChar<false>(obj->Get(), ++res);
1492
1493 size_t res2 = test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
1494 reinterpret_cast<size_t>(obj->Get()),
1495 0U,
1496 StubTest::GetEntrypoint(self, kQuickGetCharInstance),
1497 self,
1498 referrer);
1499 EXPECT_EQ(res, static_cast<uint16_t>(res2));
1500 }
1501#else
1502 LOG(INFO) << "Skipping set_char_instance as I don't know how to do that on " << kRuntimeISA;
1503 // Force-print to std::cout so it's also outside the logcat.
1504 std::cout << "Skipping set_char_instance as I don't know how to do that on " << kRuntimeISA << std::endl;
1505#endif
1506}
1507static void GetSetShortInstance(Handle<mirror::Object>* obj, Handle<mirror::ArtField>* f,
1508 Thread* self, mirror::ArtMethod* referrer, StubTest* test)
1509 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1510#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001511 int16_t values[] = { -0x7FFF, -32768, 0, 255, 32767, 0x7FFE };
Fred Shih37f05ef2014-07-16 18:38:08 -07001512
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001513 for (size_t i = 0; i < arraysize(values); ++i) {
Fred Shih37f05ef2014-07-16 18:38:08 -07001514 test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
1515 reinterpret_cast<size_t>(obj->Get()),
1516 static_cast<size_t>(values[i]),
1517 StubTest::GetEntrypoint(self, kQuickSet16Instance),
1518 self,
1519 referrer);
1520
1521 int16_t res = f->Get()->GetShort(obj->Get());
1522 EXPECT_EQ(res, values[i]) << "Iteration " << i;
1523 f->Get()->SetShort<false>(obj->Get(), ++res);
1524
1525 size_t res2 = test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
1526 reinterpret_cast<size_t>(obj->Get()),
1527 0U,
1528 StubTest::GetEntrypoint(self, kQuickGetShortInstance),
1529 self,
1530 referrer);
1531 EXPECT_EQ(res, static_cast<int16_t>(res2));
1532 }
1533#else
1534 LOG(INFO) << "Skipping set_short_instance as I don't know how to do that on " << kRuntimeISA;
1535 // Force-print to std::cout so it's also outside the logcat.
1536 std::cout << "Skipping set_short_instance as I don't know how to do that on " << kRuntimeISA << std::endl;
1537#endif
1538}
1539
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001540static void GetSet32Static(Handle<mirror::ArtField>* f, Thread* self, mirror::ArtMethod* referrer,
1541 StubTest* test)
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001542 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Ian Rogersc3ccc102014-06-25 11:52:14 -07001543#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001544 uint32_t values[] = { 0, 1, 2, 255, 32768, 1000000, 0xFFFFFFFF };
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001545
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001546 for (size_t i = 0; i < arraysize(values); ++i) {
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001547 test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
1548 static_cast<size_t>(values[i]),
1549 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001550 StubTest::GetEntrypoint(self, kQuickSet32Static),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001551 self,
1552 referrer);
1553
1554 size_t res = test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
1555 0U, 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001556 StubTest::GetEntrypoint(self, kQuickGet32Static),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001557 self,
1558 referrer);
1559
1560 EXPECT_EQ(res, values[i]) << "Iteration " << i;
1561 }
1562#else
1563 LOG(INFO) << "Skipping set32static as I don't know how to do that on " << kRuntimeISA;
1564 // Force-print to std::cout so it's also outside the logcat.
1565 std::cout << "Skipping set32static as I don't know how to do that on " << kRuntimeISA << std::endl;
1566#endif
1567}
1568
1569
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001570static void GetSet32Instance(Handle<mirror::Object>* obj, Handle<mirror::ArtField>* f,
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001571 Thread* self, mirror::ArtMethod* referrer, StubTest* test)
1572 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Ian Rogersc3ccc102014-06-25 11:52:14 -07001573#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001574 uint32_t values[] = { 0, 1, 2, 255, 32768, 1000000, 0xFFFFFFFF };
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001575
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001576 for (size_t i = 0; i < arraysize(values); ++i) {
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001577 test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001578 reinterpret_cast<size_t>(obj->Get()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001579 static_cast<size_t>(values[i]),
Andreas Gampe29b38412014-08-13 00:15:43 -07001580 StubTest::GetEntrypoint(self, kQuickSet32Instance),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001581 self,
1582 referrer);
1583
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001584 int32_t res = f->Get()->GetInt(obj->Get());
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001585 EXPECT_EQ(res, static_cast<int32_t>(values[i])) << "Iteration " << i;
1586
1587 res++;
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001588 f->Get()->SetInt<false>(obj->Get(), res);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001589
1590 size_t res2 = test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001591 reinterpret_cast<size_t>(obj->Get()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001592 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001593 StubTest::GetEntrypoint(self, kQuickGet32Instance),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001594 self,
1595 referrer);
1596 EXPECT_EQ(res, static_cast<int32_t>(res2));
1597 }
1598#else
1599 LOG(INFO) << "Skipping set32instance as I don't know how to do that on " << kRuntimeISA;
1600 // Force-print to std::cout so it's also outside the logcat.
1601 std::cout << "Skipping set32instance as I don't know how to do that on " << kRuntimeISA << std::endl;
1602#endif
1603}
1604
1605
Ian Rogersc3ccc102014-06-25 11:52:14 -07001606#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001607
1608static void set_and_check_static(uint32_t f_idx, mirror::Object* val, Thread* self,
1609 mirror::ArtMethod* referrer, StubTest* test)
1610 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1611 test->Invoke3WithReferrer(static_cast<size_t>(f_idx),
1612 reinterpret_cast<size_t>(val),
1613 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001614 StubTest::GetEntrypoint(self, kQuickSetObjStatic),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001615 self,
1616 referrer);
1617
1618 size_t res = test->Invoke3WithReferrer(static_cast<size_t>(f_idx),
1619 0U, 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001620 StubTest::GetEntrypoint(self, kQuickGetObjStatic),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001621 self,
1622 referrer);
1623
1624 EXPECT_EQ(res, reinterpret_cast<size_t>(val)) << "Value " << val;
1625}
1626#endif
1627
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001628static void GetSetObjStatic(Handle<mirror::ArtField>* f, Thread* self, mirror::ArtMethod* referrer,
1629 StubTest* test)
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001630 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Ian Rogersc3ccc102014-06-25 11:52:14 -07001631#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001632 set_and_check_static((*f)->GetDexFieldIndex(), nullptr, self, referrer, test);
1633
1634 // Allocate a string object for simplicity.
1635 mirror::String* str = mirror::String::AllocFromModifiedUtf8(self, "Test");
1636 set_and_check_static((*f)->GetDexFieldIndex(), str, self, referrer, test);
1637
1638 set_and_check_static((*f)->GetDexFieldIndex(), nullptr, self, referrer, test);
1639#else
1640 LOG(INFO) << "Skipping setObjstatic as I don't know how to do that on " << kRuntimeISA;
1641 // Force-print to std::cout so it's also outside the logcat.
1642 std::cout << "Skipping setObjstatic as I don't know how to do that on " << kRuntimeISA << std::endl;
1643#endif
1644}
1645
1646
Ian Rogersc3ccc102014-06-25 11:52:14 -07001647#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001648static void set_and_check_instance(Handle<mirror::ArtField>* f, mirror::Object* trg,
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001649 mirror::Object* val, Thread* self, mirror::ArtMethod* referrer,
1650 StubTest* test)
1651 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1652 test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
1653 reinterpret_cast<size_t>(trg),
1654 reinterpret_cast<size_t>(val),
Andreas Gampe29b38412014-08-13 00:15:43 -07001655 StubTest::GetEntrypoint(self, kQuickSetObjInstance),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001656 self,
1657 referrer);
1658
1659 size_t res = test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
1660 reinterpret_cast<size_t>(trg),
1661 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001662 StubTest::GetEntrypoint(self, kQuickGetObjInstance),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001663 self,
1664 referrer);
1665
1666 EXPECT_EQ(res, reinterpret_cast<size_t>(val)) << "Value " << val;
1667
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001668 EXPECT_EQ(val, f->Get()->GetObj(trg));
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001669}
1670#endif
1671
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001672static void GetSetObjInstance(Handle<mirror::Object>* obj, Handle<mirror::ArtField>* f,
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001673 Thread* self, mirror::ArtMethod* referrer, StubTest* test)
1674 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Ian Rogersc3ccc102014-06-25 11:52:14 -07001675#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001676 set_and_check_instance(f, obj->Get(), nullptr, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001677
1678 // Allocate a string object for simplicity.
1679 mirror::String* str = mirror::String::AllocFromModifiedUtf8(self, "Test");
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001680 set_and_check_instance(f, obj->Get(), str, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001681
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001682 set_and_check_instance(f, obj->Get(), nullptr, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001683#else
1684 LOG(INFO) << "Skipping setObjinstance as I don't know how to do that on " << kRuntimeISA;
1685 // Force-print to std::cout so it's also outside the logcat.
1686 std::cout << "Skipping setObjinstance as I don't know how to do that on " << kRuntimeISA << std::endl;
1687#endif
1688}
1689
1690
1691// TODO: Complete these tests for 32b architectures.
1692
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001693static void GetSet64Static(Handle<mirror::ArtField>* f, Thread* self, mirror::ArtMethod* referrer,
1694 StubTest* test)
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001695 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Ian Rogersc3ccc102014-06-25 11:52:14 -07001696#if (defined(__x86_64__) && !defined(__APPLE__)) || defined(__aarch64__)
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001697 uint64_t values[] = { 0, 1, 2, 255, 32768, 1000000, 0xFFFFFFFF, 0xFFFFFFFFFFFF };
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001698
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001699 for (size_t i = 0; i < arraysize(values); ++i) {
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001700 test->Invoke3UWithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
1701 values[i],
Andreas Gampe29b38412014-08-13 00:15:43 -07001702 StubTest::GetEntrypoint(self, kQuickSet64Static),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001703 self,
1704 referrer);
1705
1706 size_t res = test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
1707 0U, 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001708 StubTest::GetEntrypoint(self, kQuickGet64Static),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001709 self,
1710 referrer);
1711
1712 EXPECT_EQ(res, values[i]) << "Iteration " << i;
1713 }
1714#else
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001715 UNUSED(f, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001716 LOG(INFO) << "Skipping set64static as I don't know how to do that on " << kRuntimeISA;
1717 // Force-print to std::cout so it's also outside the logcat.
1718 std::cout << "Skipping set64static as I don't know how to do that on " << kRuntimeISA << std::endl;
1719#endif
1720}
1721
1722
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001723static void GetSet64Instance(Handle<mirror::Object>* obj, Handle<mirror::ArtField>* f,
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001724 Thread* self, mirror::ArtMethod* referrer, StubTest* test)
1725 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Ian Rogersc3ccc102014-06-25 11:52:14 -07001726#if (defined(__x86_64__) && !defined(__APPLE__)) || defined(__aarch64__)
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001727 uint64_t values[] = { 0, 1, 2, 255, 32768, 1000000, 0xFFFFFFFF, 0xFFFFFFFFFFFF };
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001728
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001729 for (size_t i = 0; i < arraysize(values); ++i) {
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001730 test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001731 reinterpret_cast<size_t>(obj->Get()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001732 static_cast<size_t>(values[i]),
Andreas Gampe29b38412014-08-13 00:15:43 -07001733 StubTest::GetEntrypoint(self, kQuickSet64Instance),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001734 self,
1735 referrer);
1736
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001737 int64_t res = f->Get()->GetLong(obj->Get());
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001738 EXPECT_EQ(res, static_cast<int64_t>(values[i])) << "Iteration " << i;
1739
1740 res++;
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001741 f->Get()->SetLong<false>(obj->Get(), res);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001742
1743 size_t res2 = test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001744 reinterpret_cast<size_t>(obj->Get()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001745 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001746 StubTest::GetEntrypoint(self, kQuickGet64Instance),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001747 self,
1748 referrer);
1749 EXPECT_EQ(res, static_cast<int64_t>(res2));
1750 }
1751#else
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001752 UNUSED(obj, f, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001753 LOG(INFO) << "Skipping set64instance as I don't know how to do that on " << kRuntimeISA;
1754 // Force-print to std::cout so it's also outside the logcat.
1755 std::cout << "Skipping set64instance as I don't know how to do that on " << kRuntimeISA << std::endl;
1756#endif
1757}
1758
1759static void TestFields(Thread* self, StubTest* test, Primitive::Type test_type) {
1760 // garbage is created during ClassLinker::Init
1761
1762 JNIEnv* env = Thread::Current()->GetJniEnv();
1763 jclass jc = env->FindClass("AllFields");
1764 CHECK(jc != NULL);
1765 jobject o = env->AllocObject(jc);
1766 CHECK(o != NULL);
1767
1768 ScopedObjectAccess soa(self);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001769 StackHandleScope<5> hs(self);
1770 Handle<mirror::Object> obj(hs.NewHandle(soa.Decode<mirror::Object*>(o)));
1771 Handle<mirror::Class> c(hs.NewHandle(obj->GetClass()));
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001772 // Need a method as a referrer
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001773 Handle<mirror::ArtMethod> m(hs.NewHandle(c->GetDirectMethod(0)));
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001774
1775 // Play with it...
1776
1777 // Static fields.
1778 {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001779 Handle<mirror::ObjectArray<mirror::ArtField>> fields(hs.NewHandle(c.Get()->GetSFields()));
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001780 int32_t num_fields = fields->GetLength();
1781 for (int32_t i = 0; i < num_fields; ++i) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001782 StackHandleScope<1> hs(self);
1783 Handle<mirror::ArtField> f(hs.NewHandle(fields->Get(i)));
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001784
Mathieu Chartier61c5ebc2014-06-05 17:42:53 -07001785 Primitive::Type type = f->GetTypeAsPrimitiveType();
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001786 switch (type) {
Fred Shih37f05ef2014-07-16 18:38:08 -07001787 case Primitive::Type::kPrimBoolean:
1788 if (test_type == type) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001789 GetSetBooleanStatic(&f, self, m.Get(), test);
Fred Shih37f05ef2014-07-16 18:38:08 -07001790 }
1791 break;
1792 case Primitive::Type::kPrimByte:
1793 if (test_type == type) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001794 GetSetByteStatic(&f, self, m.Get(), test);
Fred Shih37f05ef2014-07-16 18:38:08 -07001795 }
1796 break;
1797 case Primitive::Type::kPrimChar:
1798 if (test_type == type) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001799 GetSetCharStatic(&f, self, m.Get(), test);
Fred Shih37f05ef2014-07-16 18:38:08 -07001800 }
1801 break;
1802 case Primitive::Type::kPrimShort:
1803 if (test_type == type) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001804 GetSetShortStatic(&f, self, m.Get(), test);
Fred Shih37f05ef2014-07-16 18:38:08 -07001805 }
1806 break;
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001807 case Primitive::Type::kPrimInt:
1808 if (test_type == type) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001809 GetSet32Static(&f, self, m.Get(), test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001810 }
1811 break;
1812
1813 case Primitive::Type::kPrimLong:
1814 if (test_type == type) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001815 GetSet64Static(&f, self, m.Get(), test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001816 }
1817 break;
1818
1819 case Primitive::Type::kPrimNot:
1820 // Don't try array.
Mathieu Chartier61c5ebc2014-06-05 17:42:53 -07001821 if (test_type == type && f->GetTypeDescriptor()[0] != '[') {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001822 GetSetObjStatic(&f, self, m.Get(), test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001823 }
1824 break;
1825
1826 default:
1827 break; // Skip.
1828 }
1829 }
1830 }
1831
1832 // Instance fields.
1833 {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001834 Handle<mirror::ObjectArray<mirror::ArtField>> fields(hs.NewHandle(c.Get()->GetIFields()));
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001835 int32_t num_fields = fields->GetLength();
1836 for (int32_t i = 0; i < num_fields; ++i) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001837 StackHandleScope<1> hs(self);
1838 Handle<mirror::ArtField> f(hs.NewHandle(fields->Get(i)));
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001839
Mathieu Chartier61c5ebc2014-06-05 17:42:53 -07001840 Primitive::Type type = f->GetTypeAsPrimitiveType();
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001841 switch (type) {
Fred Shih37f05ef2014-07-16 18:38:08 -07001842 case Primitive::Type::kPrimBoolean:
1843 if (test_type == type) {
1844 GetSetBooleanInstance(&obj, &f, self, m.Get(), test);
1845 }
1846 break;
1847 case Primitive::Type::kPrimByte:
1848 if (test_type == type) {
1849 GetSetByteInstance(&obj, &f, self, m.Get(), test);
1850 }
1851 break;
1852 case Primitive::Type::kPrimChar:
1853 if (test_type == type) {
1854 GetSetCharInstance(&obj, &f, self, m.Get(), test);
1855 }
1856 break;
1857 case Primitive::Type::kPrimShort:
1858 if (test_type == type) {
1859 GetSetShortInstance(&obj, &f, self, m.Get(), test);
1860 }
1861 break;
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001862 case Primitive::Type::kPrimInt:
1863 if (test_type == type) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001864 GetSet32Instance(&obj, &f, self, m.Get(), test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001865 }
1866 break;
1867
1868 case Primitive::Type::kPrimLong:
1869 if (test_type == type) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001870 GetSet64Instance(&obj, &f, self, m.Get(), test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001871 }
1872 break;
1873
1874 case Primitive::Type::kPrimNot:
1875 // Don't try array.
Mathieu Chartier61c5ebc2014-06-05 17:42:53 -07001876 if (test_type == type && f->GetTypeDescriptor()[0] != '[') {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001877 GetSetObjInstance(&obj, &f, self, m.Get(), test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001878 }
1879 break;
1880
1881 default:
1882 break; // Skip.
1883 }
1884 }
1885 }
1886
1887 // TODO: Deallocate things.
1888}
1889
Fred Shih37f05ef2014-07-16 18:38:08 -07001890TEST_F(StubTest, Fields8) {
1891 TEST_DISABLED_FOR_HEAP_REFERENCE_POISONING();
1892
1893 Thread* self = Thread::Current();
1894
1895 self->TransitionFromSuspendedToRunnable();
1896 LoadDex("AllFields");
1897 bool started = runtime_->Start();
1898 CHECK(started);
1899
1900 TestFields(self, this, Primitive::Type::kPrimBoolean);
1901 TestFields(self, this, Primitive::Type::kPrimByte);
1902}
1903
1904TEST_F(StubTest, Fields16) {
1905 TEST_DISABLED_FOR_HEAP_REFERENCE_POISONING();
1906
1907 Thread* self = Thread::Current();
1908
1909 self->TransitionFromSuspendedToRunnable();
1910 LoadDex("AllFields");
1911 bool started = runtime_->Start();
1912 CHECK(started);
1913
1914 TestFields(self, this, Primitive::Type::kPrimChar);
1915 TestFields(self, this, Primitive::Type::kPrimShort);
1916}
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001917
1918TEST_F(StubTest, Fields32) {
1919 TEST_DISABLED_FOR_HEAP_REFERENCE_POISONING();
1920
1921 Thread* self = Thread::Current();
1922
1923 self->TransitionFromSuspendedToRunnable();
1924 LoadDex("AllFields");
1925 bool started = runtime_->Start();
1926 CHECK(started);
1927
1928 TestFields(self, this, Primitive::Type::kPrimInt);
1929}
1930
1931TEST_F(StubTest, FieldsObj) {
1932 TEST_DISABLED_FOR_HEAP_REFERENCE_POISONING();
1933
1934 Thread* self = Thread::Current();
1935
1936 self->TransitionFromSuspendedToRunnable();
1937 LoadDex("AllFields");
1938 bool started = runtime_->Start();
1939 CHECK(started);
1940
1941 TestFields(self, this, Primitive::Type::kPrimNot);
1942}
1943
1944TEST_F(StubTest, Fields64) {
1945 TEST_DISABLED_FOR_HEAP_REFERENCE_POISONING();
1946
1947 Thread* self = Thread::Current();
1948
1949 self->TransitionFromSuspendedToRunnable();
1950 LoadDex("AllFields");
1951 bool started = runtime_->Start();
1952 CHECK(started);
1953
1954 TestFields(self, this, Primitive::Type::kPrimLong);
1955}
1956
Andreas Gampe51f76352014-05-21 08:28:48 -07001957TEST_F(StubTest, IMT) {
Ian Rogersc3ccc102014-06-25 11:52:14 -07001958#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe51f76352014-05-21 08:28:48 -07001959 TEST_DISABLED_FOR_HEAP_REFERENCE_POISONING();
1960
1961 Thread* self = Thread::Current();
1962
1963 ScopedObjectAccess soa(self);
1964 StackHandleScope<7> hs(self);
1965
1966 JNIEnv* env = Thread::Current()->GetJniEnv();
1967
1968 // ArrayList
1969
1970 // Load ArrayList and used methods (JNI).
1971 jclass arraylist_jclass = env->FindClass("java/util/ArrayList");
1972 ASSERT_NE(nullptr, arraylist_jclass);
1973 jmethodID arraylist_constructor = env->GetMethodID(arraylist_jclass, "<init>", "()V");
1974 ASSERT_NE(nullptr, arraylist_constructor);
1975 jmethodID contains_jmethod = env->GetMethodID(arraylist_jclass, "contains", "(Ljava/lang/Object;)Z");
1976 ASSERT_NE(nullptr, contains_jmethod);
1977 jmethodID add_jmethod = env->GetMethodID(arraylist_jclass, "add", "(Ljava/lang/Object;)Z");
1978 ASSERT_NE(nullptr, add_jmethod);
1979
1980 // Get mirror representation.
1981 Handle<mirror::ArtMethod> contains_amethod(hs.NewHandle(soa.DecodeMethod(contains_jmethod)));
1982
1983 // Patch up ArrayList.contains.
1984 if (contains_amethod.Get()->GetEntryPointFromQuickCompiledCode() == nullptr) {
1985 contains_amethod.Get()->SetEntryPointFromQuickCompiledCode(reinterpret_cast<void*>(
Andreas Gampe29b38412014-08-13 00:15:43 -07001986 StubTest::GetEntrypoint(self, kQuickQuickToInterpreterBridge)));
Andreas Gampe51f76352014-05-21 08:28:48 -07001987 }
1988
1989 // List
1990
1991 // Load List and used methods (JNI).
1992 jclass list_jclass = env->FindClass("java/util/List");
1993 ASSERT_NE(nullptr, list_jclass);
1994 jmethodID inf_contains_jmethod = env->GetMethodID(list_jclass, "contains", "(Ljava/lang/Object;)Z");
1995 ASSERT_NE(nullptr, inf_contains_jmethod);
1996
1997 // Get mirror representation.
1998 Handle<mirror::ArtMethod> inf_contains(hs.NewHandle(soa.DecodeMethod(inf_contains_jmethod)));
1999
2000 // Object
2001
2002 jclass obj_jclass = env->FindClass("java/lang/Object");
2003 ASSERT_NE(nullptr, obj_jclass);
2004 jmethodID obj_constructor = env->GetMethodID(obj_jclass, "<init>", "()V");
2005 ASSERT_NE(nullptr, obj_constructor);
2006
Andreas Gampe51f76352014-05-21 08:28:48 -07002007 // Create instances.
2008
2009 jobject jarray_list = env->NewObject(arraylist_jclass, arraylist_constructor);
2010 ASSERT_NE(nullptr, jarray_list);
2011 Handle<mirror::Object> array_list(hs.NewHandle(soa.Decode<mirror::Object*>(jarray_list)));
2012
2013 jobject jobj = env->NewObject(obj_jclass, obj_constructor);
2014 ASSERT_NE(nullptr, jobj);
2015 Handle<mirror::Object> obj(hs.NewHandle(soa.Decode<mirror::Object*>(jobj)));
2016
Andreas Gampe1a7e2922014-05-21 15:37:53 -07002017 // Invocation tests.
2018
2019 // 1. imt_conflict
2020
2021 // Contains.
Andreas Gampe51f76352014-05-21 08:28:48 -07002022
2023 size_t result =
2024 Invoke3WithReferrerAndHidden(0U, reinterpret_cast<size_t>(array_list.Get()),
2025 reinterpret_cast<size_t>(obj.Get()),
Andreas Gampe29b38412014-08-13 00:15:43 -07002026 StubTest::GetEntrypoint(self, kQuickQuickImtConflictTrampoline),
Andreas Gampe51f76352014-05-21 08:28:48 -07002027 self, contains_amethod.Get(),
2028 static_cast<size_t>(inf_contains.Get()->GetDexMethodIndex()));
2029
2030 ASSERT_FALSE(self->IsExceptionPending());
2031 EXPECT_EQ(static_cast<size_t>(JNI_FALSE), result);
2032
2033 // Add object.
2034
2035 env->CallBooleanMethod(jarray_list, add_jmethod, jobj);
2036
2037 ASSERT_FALSE(self->IsExceptionPending()) << PrettyTypeOf(self->GetException(nullptr));
2038
Andreas Gampe1a7e2922014-05-21 15:37:53 -07002039 // Contains.
Andreas Gampe51f76352014-05-21 08:28:48 -07002040
2041 result = Invoke3WithReferrerAndHidden(0U, reinterpret_cast<size_t>(array_list.Get()),
2042 reinterpret_cast<size_t>(obj.Get()),
Andreas Gampe29b38412014-08-13 00:15:43 -07002043 StubTest::GetEntrypoint(self, kQuickQuickImtConflictTrampoline),
Andreas Gampe51f76352014-05-21 08:28:48 -07002044 self, contains_amethod.Get(),
2045 static_cast<size_t>(inf_contains.Get()->GetDexMethodIndex()));
2046
2047 ASSERT_FALSE(self->IsExceptionPending());
2048 EXPECT_EQ(static_cast<size_t>(JNI_TRUE), result);
Andreas Gampe1a7e2922014-05-21 15:37:53 -07002049
2050 // 2. regular interface trampoline
2051
2052 result = Invoke3WithReferrer(static_cast<size_t>(inf_contains.Get()->GetDexMethodIndex()),
2053 reinterpret_cast<size_t>(array_list.Get()),
2054 reinterpret_cast<size_t>(obj.Get()),
2055 StubTest::GetEntrypoint(self,
2056 kQuickInvokeInterfaceTrampolineWithAccessCheck),
2057 self, contains_amethod.Get());
2058
2059 ASSERT_FALSE(self->IsExceptionPending());
2060 EXPECT_EQ(static_cast<size_t>(JNI_TRUE), result);
2061
2062 result = Invoke3WithReferrer(static_cast<size_t>(inf_contains.Get()->GetDexMethodIndex()),
2063 reinterpret_cast<size_t>(array_list.Get()),
2064 reinterpret_cast<size_t>(array_list.Get()),
2065 StubTest::GetEntrypoint(self,
2066 kQuickInvokeInterfaceTrampolineWithAccessCheck),
2067 self, contains_amethod.Get());
2068
2069 ASSERT_FALSE(self->IsExceptionPending());
2070 EXPECT_EQ(static_cast<size_t>(JNI_FALSE), result);
Andreas Gampe51f76352014-05-21 08:28:48 -07002071#else
Andreas Gampe6aac3552014-06-09 14:55:53 -07002072 LOG(INFO) << "Skipping imt as I don't know how to do that on " << kRuntimeISA;
Andreas Gampe51f76352014-05-21 08:28:48 -07002073 // Force-print to std::cout so it's also outside the logcat.
Andreas Gampe6aac3552014-06-09 14:55:53 -07002074 std::cout << "Skipping imt as I don't know how to do that on " << kRuntimeISA << std::endl;
2075#endif
2076}
2077
Andreas Gampe6aac3552014-06-09 14:55:53 -07002078TEST_F(StubTest, StringIndexOf) {
2079#if defined(__arm__) || defined(__aarch64__)
Hiroshi Yamauchi52fa8142014-06-16 12:59:49 -07002080 TEST_DISABLED_FOR_HEAP_REFERENCE_POISONING();
2081
Andreas Gampe6aac3552014-06-09 14:55:53 -07002082 Thread* self = Thread::Current();
2083 ScopedObjectAccess soa(self);
2084 // garbage is created during ClassLinker::Init
2085
2086 // Create some strings
2087 // Use array so we can index into it and use a matrix for expected results
2088 // Setup: The first half is standard. The second half uses a non-zero offset.
2089 // TODO: Shared backing arrays.
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07002090 const char* c_str[] = { "", "a", "ba", "cba", "dcba", "edcba", "asdfghjkl" };
2091 static constexpr size_t kStringCount = arraysize(c_str);
2092 const char c_char[] = { 'a', 'b', 'c', 'd', 'e' };
2093 static constexpr size_t kCharCount = arraysize(c_char);
Andreas Gampe6aac3552014-06-09 14:55:53 -07002094
2095 StackHandleScope<kStringCount> hs(self);
2096 Handle<mirror::String> s[kStringCount];
2097
2098 for (size_t i = 0; i < kStringCount; ++i) {
2099 s[i] = hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), c_str[i]));
2100 }
2101
2102 // Matrix of expectations. First component is first parameter. Note we only check against the
2103 // sign, not the value. As we are testing random offsets, we need to compute this and need to
2104 // rely on String::CompareTo being correct.
2105 static constexpr size_t kMaxLen = 9;
2106 DCHECK_LE(strlen(c_str[kStringCount-1]), kMaxLen) << "Please fix the indexof test.";
2107
2108 // Last dimension: start, offset by 1.
2109 int32_t expected[kStringCount][kCharCount][kMaxLen + 3];
2110 for (size_t x = 0; x < kStringCount; ++x) {
2111 for (size_t y = 0; y < kCharCount; ++y) {
2112 for (size_t z = 0; z <= kMaxLen + 2; ++z) {
2113 expected[x][y][z] = s[x]->FastIndexOf(c_char[y], static_cast<int32_t>(z) - 1);
2114 }
2115 }
2116 }
2117
2118 // Play with it...
2119
2120 for (size_t x = 0; x < kStringCount; ++x) {
2121 for (size_t y = 0; y < kCharCount; ++y) {
2122 for (size_t z = 0; z <= kMaxLen + 2; ++z) {
2123 int32_t start = static_cast<int32_t>(z) - 1;
2124
2125 // Test string_compareto x y
2126 size_t result = Invoke3(reinterpret_cast<size_t>(s[x].Get()), c_char[y], start,
Andreas Gampe29b38412014-08-13 00:15:43 -07002127 StubTest::GetEntrypoint(self, kQuickIndexOf), self);
Andreas Gampe6aac3552014-06-09 14:55:53 -07002128
2129 EXPECT_FALSE(self->IsExceptionPending());
2130
2131 // The result is a 32b signed integer
2132 union {
2133 size_t r;
2134 int32_t i;
2135 } conv;
2136 conv.r = result;
2137
2138 EXPECT_EQ(expected[x][y][z], conv.i) << "Wrong result for " << c_str[x] << " / " <<
2139 c_char[y] << " @ " << start;
2140 }
2141 }
2142 }
2143
2144 // TODO: Deallocate things.
2145
2146 // Tests done.
2147#else
2148 LOG(INFO) << "Skipping indexof as I don't know how to do that on " << kRuntimeISA;
2149 // Force-print to std::cout so it's also outside the logcat.
2150 std::cout << "Skipping indexof as I don't know how to do that on " << kRuntimeISA << std::endl;
Andreas Gampe51f76352014-05-21 08:28:48 -07002151#endif
2152}
2153
Andreas Gampe525cde22014-04-22 15:44:50 -07002154} // namespace art