blob: 6acc2a7829fbe9de5d1e7ae96394ccebc3c265d2 [file] [log] [blame]
Andreas Gampe525cde22014-04-22 15:44:50 -07001/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
Ian Rogerse63db272014-07-15 15:36:11 -070017#include <cstdio>
18
Andreas Gampe525cde22014-04-22 15:44:50 -070019#include "common_runtime_test.h"
Andreas Gampe29b38412014-08-13 00:15:43 -070020#include "entrypoints/quick/quick_entrypoints_enum.h"
Andreas Gampe6e4e59c2014-05-05 20:11:02 -070021#include "mirror/art_field-inl.h"
Andreas Gampe51f76352014-05-21 08:28:48 -070022#include "mirror/art_method-inl.h"
23#include "mirror/class-inl.h"
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -070024#include "mirror/string-inl.h"
Ian Rogerse63db272014-07-15 15:36:11 -070025#include "scoped_thread_state_change.h"
Andreas Gampe525cde22014-04-22 15:44:50 -070026
27namespace art {
28
29
30class StubTest : public CommonRuntimeTest {
31 protected:
32 // We need callee-save methods set up in the Runtime for exceptions.
33 void SetUp() OVERRIDE {
34 // Do the normal setup.
35 CommonRuntimeTest::SetUp();
36
37 {
38 // Create callee-save methods
39 ScopedObjectAccess soa(Thread::Current());
Vladimir Marko7624d252014-05-02 14:40:15 +010040 runtime_->SetInstructionSet(kRuntimeISA);
Andreas Gampe525cde22014-04-22 15:44:50 -070041 for (int i = 0; i < Runtime::kLastCalleeSaveType; i++) {
42 Runtime::CalleeSaveType type = Runtime::CalleeSaveType(i);
43 if (!runtime_->HasCalleeSaveMethod(type)) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -070044 runtime_->SetCalleeSaveMethod(runtime_->CreateCalleeSaveMethod(), type);
Andreas Gampe525cde22014-04-22 15:44:50 -070045 }
46 }
47 }
48 }
49
Ian Rogerse63db272014-07-15 15:36:11 -070050 void SetUpRuntimeOptions(RuntimeOptions *options) OVERRIDE {
Andreas Gampe00c1e6d2014-04-25 15:47:13 -070051 // Use a smaller heap
52 for (std::pair<std::string, const void*>& pair : *options) {
53 if (pair.first.find("-Xmx") == 0) {
54 pair.first = "-Xmx4M"; // Smallest we can go.
55 }
56 }
Andreas Gampe51f76352014-05-21 08:28:48 -070057 options->push_back(std::make_pair("-Xint", nullptr));
Andreas Gampe00c1e6d2014-04-25 15:47:13 -070058 }
Andreas Gampe525cde22014-04-22 15:44:50 -070059
Mathieu Chartier119c6bd2014-05-09 14:11:47 -070060 // Helper function needed since TEST_F makes a new class.
61 Thread::tls_ptr_sized_values* GetTlsPtr(Thread* self) {
62 return &self->tlsPtr_;
63 }
64
Andreas Gampe4fc046e2014-05-06 16:56:39 -070065 public:
Andreas Gampe525cde22014-04-22 15:44:50 -070066 size_t Invoke3(size_t arg0, size_t arg1, size_t arg2, uintptr_t code, Thread* self) {
Andreas Gampe6cf80102014-05-19 11:32:41 -070067 return Invoke3WithReferrer(arg0, arg1, arg2, code, self, nullptr);
Andreas Gampe525cde22014-04-22 15:44:50 -070068 }
Andreas Gampe6e4e59c2014-05-05 20:11:02 -070069
Andreas Gampe6e4e59c2014-05-05 20:11:02 -070070 // TODO: Set up a frame according to referrer's specs.
71 size_t Invoke3WithReferrer(size_t arg0, size_t arg1, size_t arg2, uintptr_t code, Thread* self,
72 mirror::ArtMethod* referrer) {
73 // Push a transition back into managed code onto the linked list in thread.
74 ManagedStack fragment;
75 self->PushManagedStackFragment(&fragment);
76
77 size_t result;
Andreas Gampe6cf80102014-05-19 11:32:41 -070078 size_t fpr_result = 0;
Andreas Gampe6e4e59c2014-05-05 20:11:02 -070079#if defined(__i386__)
80 // TODO: Set the thread?
81 __asm__ __volatile__(
Ian Rogersc5f17732014-06-05 20:48:42 -070082 "subl $12, %%esp\n\t" // Align stack.
83 "pushl %[referrer]\n\t" // Store referrer.
Andreas Gampe6e4e59c2014-05-05 20:11:02 -070084 "call *%%edi\n\t" // Call the stub
Ian Rogersc5f17732014-06-05 20:48:42 -070085 "addl $16, %%esp" // Pop referrer
Andreas Gampe6e4e59c2014-05-05 20:11:02 -070086 : "=a" (result)
87 // Use the result from eax
Andreas Gampe2f6e3512014-06-07 01:32:33 -070088 : "a"(arg0), "c"(arg1), "d"(arg2), "D"(code), [referrer]"r"(referrer)
89 // This places code into edi, arg0 into eax, arg1 into ecx, and arg2 into edx
90 : "memory"); // clobber.
Andreas Gampe6e4e59c2014-05-05 20:11:02 -070091 // TODO: Should we clobber the other registers? EBX gets clobbered by some of the stubs,
92 // but compilation fails when declaring that.
93#elif defined(__arm__)
94 __asm__ __volatile__(
95 "push {r1-r12, lr}\n\t" // Save state, 13*4B = 52B
96 ".cfi_adjust_cfa_offset 52\n\t"
97 "push {r9}\n\t"
98 ".cfi_adjust_cfa_offset 4\n\t"
99 "mov r9, %[referrer]\n\n"
100 "str r9, [sp, #-8]!\n\t" // Push referrer, +8B padding so 16B aligned
101 ".cfi_adjust_cfa_offset 8\n\t"
102 "ldr r9, [sp, #8]\n\t"
103
104 // Push everything on the stack, so we don't rely on the order. What a mess. :-(
105 "sub sp, sp, #20\n\t"
106 "str %[arg0], [sp]\n\t"
107 "str %[arg1], [sp, #4]\n\t"
108 "str %[arg2], [sp, #8]\n\t"
109 "str %[code], [sp, #12]\n\t"
110 "str %[self], [sp, #16]\n\t"
111 "ldr r0, [sp]\n\t"
112 "ldr r1, [sp, #4]\n\t"
113 "ldr r2, [sp, #8]\n\t"
114 "ldr r3, [sp, #12]\n\t"
115 "ldr r9, [sp, #16]\n\t"
116 "add sp, sp, #20\n\t"
117
118 "blx r3\n\t" // Call the stub
119 "add sp, sp, #12\n\t" // Pop nullptr and padding
120 ".cfi_adjust_cfa_offset -12\n\t"
121 "pop {r1-r12, lr}\n\t" // Restore state
122 ".cfi_adjust_cfa_offset -52\n\t"
123 "mov %[result], r0\n\t" // Save the result
124 : [result] "=r" (result)
125 // Use the result from r0
126 : [arg0] "r"(arg0), [arg1] "r"(arg1), [arg2] "r"(arg2), [code] "r"(code), [self] "r"(self),
127 [referrer] "r"(referrer)
Andreas Gampe2f6e3512014-06-07 01:32:33 -0700128 : "memory"); // clobber.
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700129#elif defined(__aarch64__)
130 __asm__ __volatile__(
Andreas Gampef39b3782014-06-03 14:38:30 -0700131 // Spill x0-x7 which we say we don't clobber. May contain args.
Andreas Gampe6cf80102014-05-19 11:32:41 -0700132 "sub sp, sp, #64\n\t"
133 ".cfi_adjust_cfa_offset 64\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700134 "stp x0, x1, [sp]\n\t"
135 "stp x2, x3, [sp, #16]\n\t"
136 "stp x4, x5, [sp, #32]\n\t"
137 "stp x6, x7, [sp, #48]\n\t"
Andreas Gampe6cf80102014-05-19 11:32:41 -0700138
Andreas Gampef39b3782014-06-03 14:38:30 -0700139 "sub sp, sp, #16\n\t" // Reserve stack space, 16B aligned
140 ".cfi_adjust_cfa_offset 16\n\t"
141 "str %[referrer], [sp]\n\t" // referrer
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700142
143 // Push everything on the stack, so we don't rely on the order. What a mess. :-(
144 "sub sp, sp, #48\n\t"
Andreas Gampe6cf80102014-05-19 11:32:41 -0700145 ".cfi_adjust_cfa_offset 48\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700146 // All things are "r" constraints, so direct str/stp should work.
147 "stp %[arg0], %[arg1], [sp]\n\t"
148 "stp %[arg2], %[code], [sp, #16]\n\t"
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700149 "str %[self], [sp, #32]\n\t"
Andreas Gampe6cf80102014-05-19 11:32:41 -0700150
151 // Now we definitely have x0-x3 free, use it to garble d8 - d15
152 "movk x0, #0xfad0\n\t"
153 "movk x0, #0xebad, lsl #16\n\t"
154 "movk x0, #0xfad0, lsl #32\n\t"
155 "movk x0, #0xebad, lsl #48\n\t"
156 "fmov d8, x0\n\t"
157 "add x0, x0, 1\n\t"
158 "fmov d9, x0\n\t"
159 "add x0, x0, 1\n\t"
160 "fmov d10, x0\n\t"
161 "add x0, x0, 1\n\t"
162 "fmov d11, x0\n\t"
163 "add x0, x0, 1\n\t"
164 "fmov d12, x0\n\t"
165 "add x0, x0, 1\n\t"
166 "fmov d13, x0\n\t"
167 "add x0, x0, 1\n\t"
168 "fmov d14, x0\n\t"
169 "add x0, x0, 1\n\t"
170 "fmov d15, x0\n\t"
171
Andreas Gampef39b3782014-06-03 14:38:30 -0700172 // Load call params into the right registers.
173 "ldp x0, x1, [sp]\n\t"
174 "ldp x2, x3, [sp, #16]\n\t"
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700175 "ldr x18, [sp, #32]\n\t"
176 "add sp, sp, #48\n\t"
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700177 ".cfi_adjust_cfa_offset -48\n\t"
178
Andreas Gampe6cf80102014-05-19 11:32:41 -0700179
180 "blr x3\n\t" // Call the stub
Andreas Gampef39b3782014-06-03 14:38:30 -0700181 "mov x8, x0\n\t" // Store result
182 "add sp, sp, #16\n\t" // Drop the quick "frame"
183 ".cfi_adjust_cfa_offset -16\n\t"
Andreas Gampe6cf80102014-05-19 11:32:41 -0700184
185 // Test d8 - d15. We can use x1 and x2.
186 "movk x1, #0xfad0\n\t"
187 "movk x1, #0xebad, lsl #16\n\t"
188 "movk x1, #0xfad0, lsl #32\n\t"
189 "movk x1, #0xebad, lsl #48\n\t"
190 "fmov x2, d8\n\t"
191 "cmp x1, x2\n\t"
192 "b.ne 1f\n\t"
193 "add x1, x1, 1\n\t"
194
195 "fmov x2, d9\n\t"
196 "cmp x1, x2\n\t"
197 "b.ne 1f\n\t"
198 "add x1, x1, 1\n\t"
199
200 "fmov x2, d10\n\t"
201 "cmp x1, x2\n\t"
202 "b.ne 1f\n\t"
203 "add x1, x1, 1\n\t"
204
205 "fmov x2, d11\n\t"
206 "cmp x1, x2\n\t"
207 "b.ne 1f\n\t"
208 "add x1, x1, 1\n\t"
209
210 "fmov x2, d12\n\t"
211 "cmp x1, x2\n\t"
212 "b.ne 1f\n\t"
213 "add x1, x1, 1\n\t"
214
215 "fmov x2, d13\n\t"
216 "cmp x1, x2\n\t"
217 "b.ne 1f\n\t"
218 "add x1, x1, 1\n\t"
219
220 "fmov x2, d14\n\t"
221 "cmp x1, x2\n\t"
222 "b.ne 1f\n\t"
223 "add x1, x1, 1\n\t"
224
225 "fmov x2, d15\n\t"
226 "cmp x1, x2\n\t"
227 "b.ne 1f\n\t"
228
Andreas Gampef39b3782014-06-03 14:38:30 -0700229 "mov x9, #0\n\t" // Use x9 as flag, in clobber list
Andreas Gampe6cf80102014-05-19 11:32:41 -0700230
231 // Finish up.
232 "2:\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700233 "ldp x0, x1, [sp]\n\t" // Restore stuff not named clobbered, may contain fpr_result
234 "ldp x2, x3, [sp, #16]\n\t"
235 "ldp x4, x5, [sp, #32]\n\t"
236 "ldp x6, x7, [sp, #48]\n\t"
237 "add sp, sp, #64\n\t" // Free stack space, now sp as on entry
Andreas Gampe6cf80102014-05-19 11:32:41 -0700238 ".cfi_adjust_cfa_offset -64\n\t"
239
Andreas Gampef39b3782014-06-03 14:38:30 -0700240 "str x9, %[fpr_result]\n\t" // Store the FPR comparison result
241 "mov %[result], x8\n\t" // Store the call result
242
Andreas Gampe6cf80102014-05-19 11:32:41 -0700243 "b 3f\n\t" // Goto end
244
245 // Failed fpr verification.
246 "1:\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700247 "mov x9, #1\n\t"
Andreas Gampe6cf80102014-05-19 11:32:41 -0700248 "b 2b\n\t" // Goto finish-up
249
250 // End
251 "3:\n\t"
Andreas Gampecf4035a2014-05-28 22:43:01 -0700252 : [result] "=r" (result)
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700253 // Use the result from r0
254 : [arg0] "0"(arg0), [arg1] "r"(arg1), [arg2] "r"(arg2), [code] "r"(code), [self] "r"(self),
Andreas Gampecf4035a2014-05-28 22:43:01 -0700255 [referrer] "r"(referrer), [fpr_result] "m" (fpr_result)
Andreas Gampef39b3782014-06-03 14:38:30 -0700256 : "x8", "x9", "x10", "x11", "x12", "x13", "x14", "x15", "x16", "x17", "x18", "x19", "x20",
257 "x21", "x22", "x23", "x24", "x25", "x26", "x27", "x28", "x30",
258 "d0", "d1", "d2", "d3", "d4", "d5", "d6", "d7",
259 "d8", "d9", "d10", "d11", "d12", "d13", "d14", "d15",
260 "d16", "d17", "d18", "d19", "d20", "d21", "d22", "d23",
Andreas Gampe2f6e3512014-06-07 01:32:33 -0700261 "d24", "d25", "d26", "d27", "d28", "d29", "d30", "d31",
262 "memory"); // clobber.
Ian Rogers6f3dbba2014-10-14 17:41:57 -0700263#elif defined(__x86_64__) && !defined(__APPLE__) && defined(__clang__)
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700264 // Note: Uses the native convention
265 // TODO: Set the thread?
266 __asm__ __volatile__(
267 "pushq %[referrer]\n\t" // Push referrer
268 "pushq (%%rsp)\n\t" // & 16B alignment padding
269 ".cfi_adjust_cfa_offset 16\n\t"
270 "call *%%rax\n\t" // Call the stub
271 "addq $16, %%rsp\n\t" // Pop nullptr and padding
272 ".cfi_adjust_cfa_offset -16\n\t"
273 : "=a" (result)
274 // Use the result from rax
Andreas Gampe5c3d3a92015-01-21 12:23:50 -0800275 : "D"(arg0), "S"(arg1), "d"(arg2), "a"(code), [referrer] "c"(referrer)
Andreas Gampe2f6e3512014-06-07 01:32:33 -0700276 // This places arg0 into rdi, arg1 into rsi, arg2 into rdx, and code into rax
Andreas Gampe5c3d3a92015-01-21 12:23:50 -0800277 : "rbx", "rbp", "r8", "r9", "r10", "r11", "r12", "r13", "r14", "r15",
Andreas Gampe2f6e3512014-06-07 01:32:33 -0700278 "memory"); // clobber all
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700279 // TODO: Should we clobber the other registers?
280#else
281 LOG(WARNING) << "Was asked to invoke for an architecture I do not understand.";
282 result = 0;
283#endif
284 // Pop transition.
285 self->PopManagedStackFragment(fragment);
Andreas Gampe6cf80102014-05-19 11:32:41 -0700286
287 fp_result = fpr_result;
288 EXPECT_EQ(0U, fp_result);
289
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700290 return result;
291 }
292
Andreas Gampe51f76352014-05-21 08:28:48 -0700293 // TODO: Set up a frame according to referrer's specs.
294 size_t Invoke3WithReferrerAndHidden(size_t arg0, size_t arg1, size_t arg2, uintptr_t code,
295 Thread* self, mirror::ArtMethod* referrer, size_t hidden) {
296 // Push a transition back into managed code onto the linked list in thread.
297 ManagedStack fragment;
298 self->PushManagedStackFragment(&fragment);
299
300 size_t result;
301 size_t fpr_result = 0;
302#if defined(__i386__)
303 // TODO: Set the thread?
304 __asm__ __volatile__(
Mark P Mendell966c3ae2015-01-27 15:45:27 +0000305 "movd %[hidden], %%xmm7\n\t"
Ian Rogersc5f17732014-06-05 20:48:42 -0700306 "subl $12, %%esp\n\t" // Align stack.
Andreas Gampe51f76352014-05-21 08:28:48 -0700307 "pushl %[referrer]\n\t" // Store referrer
308 "call *%%edi\n\t" // Call the stub
Ian Rogersc5f17732014-06-05 20:48:42 -0700309 "addl $16, %%esp" // Pop referrer
Andreas Gampe51f76352014-05-21 08:28:48 -0700310 : "=a" (result)
311 // Use the result from eax
Andreas Gampe1a7e2922014-05-21 15:37:53 -0700312 : "a"(arg0), "c"(arg1), "d"(arg2), "D"(code), [referrer]"r"(referrer), [hidden]"m"(hidden)
Andreas Gampe2f6e3512014-06-07 01:32:33 -0700313 // This places code into edi, arg0 into eax, arg1 into ecx, and arg2 into edx
314 : "memory"); // clobber.
Andreas Gampe51f76352014-05-21 08:28:48 -0700315 // TODO: Should we clobber the other registers? EBX gets clobbered by some of the stubs,
316 // but compilation fails when declaring that.
317#elif defined(__arm__)
318 __asm__ __volatile__(
319 "push {r1-r12, lr}\n\t" // Save state, 13*4B = 52B
320 ".cfi_adjust_cfa_offset 52\n\t"
321 "push {r9}\n\t"
322 ".cfi_adjust_cfa_offset 4\n\t"
323 "mov r9, %[referrer]\n\n"
324 "str r9, [sp, #-8]!\n\t" // Push referrer, +8B padding so 16B aligned
325 ".cfi_adjust_cfa_offset 8\n\t"
326 "ldr r9, [sp, #8]\n\t"
327
328 // Push everything on the stack, so we don't rely on the order. What a mess. :-(
329 "sub sp, sp, #24\n\t"
330 "str %[arg0], [sp]\n\t"
331 "str %[arg1], [sp, #4]\n\t"
332 "str %[arg2], [sp, #8]\n\t"
333 "str %[code], [sp, #12]\n\t"
334 "str %[self], [sp, #16]\n\t"
335 "str %[hidden], [sp, #20]\n\t"
336 "ldr r0, [sp]\n\t"
337 "ldr r1, [sp, #4]\n\t"
338 "ldr r2, [sp, #8]\n\t"
339 "ldr r3, [sp, #12]\n\t"
340 "ldr r9, [sp, #16]\n\t"
341 "ldr r12, [sp, #20]\n\t"
342 "add sp, sp, #24\n\t"
343
344 "blx r3\n\t" // Call the stub
345 "add sp, sp, #12\n\t" // Pop nullptr and padding
346 ".cfi_adjust_cfa_offset -12\n\t"
347 "pop {r1-r12, lr}\n\t" // Restore state
348 ".cfi_adjust_cfa_offset -52\n\t"
349 "mov %[result], r0\n\t" // Save the result
350 : [result] "=r" (result)
351 // Use the result from r0
Andreas Gampe2f6e3512014-06-07 01:32:33 -0700352 : [arg0] "r"(arg0), [arg1] "r"(arg1), [arg2] "r"(arg2), [code] "r"(code), [self] "r"(self),
353 [referrer] "r"(referrer), [hidden] "r"(hidden)
354 : "memory"); // clobber.
Andreas Gampe51f76352014-05-21 08:28:48 -0700355#elif defined(__aarch64__)
356 __asm__ __volatile__(
Andreas Gampef39b3782014-06-03 14:38:30 -0700357 // Spill x0-x7 which we say we don't clobber. May contain args.
Andreas Gampe51f76352014-05-21 08:28:48 -0700358 "sub sp, sp, #64\n\t"
359 ".cfi_adjust_cfa_offset 64\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700360 "stp x0, x1, [sp]\n\t"
361 "stp x2, x3, [sp, #16]\n\t"
362 "stp x4, x5, [sp, #32]\n\t"
363 "stp x6, x7, [sp, #48]\n\t"
Andreas Gampe51f76352014-05-21 08:28:48 -0700364
Andreas Gampef39b3782014-06-03 14:38:30 -0700365 "sub sp, sp, #16\n\t" // Reserve stack space, 16B aligned
366 ".cfi_adjust_cfa_offset 16\n\t"
367 "str %[referrer], [sp]\n\t" // referrer
Andreas Gampe51f76352014-05-21 08:28:48 -0700368
369 // Push everything on the stack, so we don't rely on the order. What a mess. :-(
370 "sub sp, sp, #48\n\t"
371 ".cfi_adjust_cfa_offset 48\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700372 // All things are "r" constraints, so direct str/stp should work.
373 "stp %[arg0], %[arg1], [sp]\n\t"
374 "stp %[arg2], %[code], [sp, #16]\n\t"
375 "stp %[self], %[hidden], [sp, #32]\n\t"
Andreas Gampe51f76352014-05-21 08:28:48 -0700376
377 // Now we definitely have x0-x3 free, use it to garble d8 - d15
378 "movk x0, #0xfad0\n\t"
379 "movk x0, #0xebad, lsl #16\n\t"
380 "movk x0, #0xfad0, lsl #32\n\t"
381 "movk x0, #0xebad, lsl #48\n\t"
382 "fmov d8, x0\n\t"
383 "add x0, x0, 1\n\t"
384 "fmov d9, x0\n\t"
385 "add x0, x0, 1\n\t"
386 "fmov d10, x0\n\t"
387 "add x0, x0, 1\n\t"
388 "fmov d11, x0\n\t"
389 "add x0, x0, 1\n\t"
390 "fmov d12, x0\n\t"
391 "add x0, x0, 1\n\t"
392 "fmov d13, x0\n\t"
393 "add x0, x0, 1\n\t"
394 "fmov d14, x0\n\t"
395 "add x0, x0, 1\n\t"
396 "fmov d15, x0\n\t"
397
Andreas Gampef39b3782014-06-03 14:38:30 -0700398 // Load call params into the right registers.
399 "ldp x0, x1, [sp]\n\t"
400 "ldp x2, x3, [sp, #16]\n\t"
Andreas Gampe1a7e2922014-05-21 15:37:53 -0700401 "ldp x18, x17, [sp, #32]\n\t"
Andreas Gampe51f76352014-05-21 08:28:48 -0700402 "add sp, sp, #48\n\t"
403 ".cfi_adjust_cfa_offset -48\n\t"
404
Andreas Gampe51f76352014-05-21 08:28:48 -0700405 "blr x3\n\t" // Call the stub
Andreas Gampef39b3782014-06-03 14:38:30 -0700406 "mov x8, x0\n\t" // Store result
407 "add sp, sp, #16\n\t" // Drop the quick "frame"
408 ".cfi_adjust_cfa_offset -16\n\t"
Andreas Gampe51f76352014-05-21 08:28:48 -0700409
410 // Test d8 - d15. We can use x1 and x2.
411 "movk x1, #0xfad0\n\t"
412 "movk x1, #0xebad, lsl #16\n\t"
413 "movk x1, #0xfad0, lsl #32\n\t"
414 "movk x1, #0xebad, lsl #48\n\t"
415 "fmov x2, d8\n\t"
416 "cmp x1, x2\n\t"
417 "b.ne 1f\n\t"
418 "add x1, x1, 1\n\t"
419
420 "fmov x2, d9\n\t"
421 "cmp x1, x2\n\t"
422 "b.ne 1f\n\t"
423 "add x1, x1, 1\n\t"
424
425 "fmov x2, d10\n\t"
426 "cmp x1, x2\n\t"
427 "b.ne 1f\n\t"
428 "add x1, x1, 1\n\t"
429
430 "fmov x2, d11\n\t"
431 "cmp x1, x2\n\t"
432 "b.ne 1f\n\t"
433 "add x1, x1, 1\n\t"
434
435 "fmov x2, d12\n\t"
436 "cmp x1, x2\n\t"
437 "b.ne 1f\n\t"
438 "add x1, x1, 1\n\t"
439
440 "fmov x2, d13\n\t"
441 "cmp x1, x2\n\t"
442 "b.ne 1f\n\t"
443 "add x1, x1, 1\n\t"
444
445 "fmov x2, d14\n\t"
446 "cmp x1, x2\n\t"
447 "b.ne 1f\n\t"
448 "add x1, x1, 1\n\t"
449
450 "fmov x2, d15\n\t"
451 "cmp x1, x2\n\t"
452 "b.ne 1f\n\t"
453
Andreas Gampef39b3782014-06-03 14:38:30 -0700454 "mov x9, #0\n\t" // Use x9 as flag, in clobber list
Andreas Gampe51f76352014-05-21 08:28:48 -0700455
456 // Finish up.
457 "2:\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700458 "ldp x0, x1, [sp]\n\t" // Restore stuff not named clobbered, may contain fpr_result
459 "ldp x2, x3, [sp, #16]\n\t"
460 "ldp x4, x5, [sp, #32]\n\t"
461 "ldp x6, x7, [sp, #48]\n\t"
462 "add sp, sp, #64\n\t" // Free stack space, now sp as on entry
Andreas Gampe51f76352014-05-21 08:28:48 -0700463 ".cfi_adjust_cfa_offset -64\n\t"
464
Andreas Gampef39b3782014-06-03 14:38:30 -0700465 "str x9, %[fpr_result]\n\t" // Store the FPR comparison result
466 "mov %[result], x8\n\t" // Store the call result
467
Andreas Gampe51f76352014-05-21 08:28:48 -0700468 "b 3f\n\t" // Goto end
469
470 // Failed fpr verification.
471 "1:\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700472 "mov x9, #1\n\t"
Andreas Gampe51f76352014-05-21 08:28:48 -0700473 "b 2b\n\t" // Goto finish-up
474
475 // End
476 "3:\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700477 : [result] "=r" (result)
478 // Use the result from r0
Andreas Gampe51f76352014-05-21 08:28:48 -0700479 : [arg0] "0"(arg0), [arg1] "r"(arg1), [arg2] "r"(arg2), [code] "r"(code), [self] "r"(self),
Andreas Gampef39b3782014-06-03 14:38:30 -0700480 [referrer] "r"(referrer), [hidden] "r"(hidden), [fpr_result] "m" (fpr_result)
481 : "x8", "x9", "x10", "x11", "x12", "x13", "x14", "x15", "x16", "x17", "x18", "x19", "x20",
482 "x21", "x22", "x23", "x24", "x25", "x26", "x27", "x28", "x30",
483 "d0", "d1", "d2", "d3", "d4", "d5", "d6", "d7",
484 "d8", "d9", "d10", "d11", "d12", "d13", "d14", "d15",
485 "d16", "d17", "d18", "d19", "d20", "d21", "d22", "d23",
Andreas Gampe2f6e3512014-06-07 01:32:33 -0700486 "d24", "d25", "d26", "d27", "d28", "d29", "d30", "d31",
487 "memory"); // clobber.
Ian Rogers6f3dbba2014-10-14 17:41:57 -0700488#elif defined(__x86_64__) && !defined(__APPLE__) && defined(__clang__)
Andreas Gampe51f76352014-05-21 08:28:48 -0700489 // Note: Uses the native convention
490 // TODO: Set the thread?
491 __asm__ __volatile__(
Andreas Gampe51f76352014-05-21 08:28:48 -0700492 "pushq %[referrer]\n\t" // Push referrer
493 "pushq (%%rsp)\n\t" // & 16B alignment padding
494 ".cfi_adjust_cfa_offset 16\n\t"
Andreas Gampe1a7e2922014-05-21 15:37:53 -0700495 "call *%%rbx\n\t" // Call the stub
Andreas Gampe51f76352014-05-21 08:28:48 -0700496 "addq $16, %%rsp\n\t" // Pop nullptr and padding
497 ".cfi_adjust_cfa_offset -16\n\t"
498 : "=a" (result)
499 // Use the result from rax
Andreas Gampe1a7e2922014-05-21 15:37:53 -0700500 : "D"(arg0), "S"(arg1), "d"(arg2), "b"(code), [referrer] "c"(referrer), [hidden] "a"(hidden)
Andreas Gampe51f76352014-05-21 08:28:48 -0700501 // This places arg0 into rdi, arg1 into rsi, arg2 into rdx, and code into rax
Andreas Gampe1a7e2922014-05-21 15:37:53 -0700502 : "rbp", "r8", "r9", "r10", "r11", "r12", "r13", "r14", "r15",
Andreas Gampe2f6e3512014-06-07 01:32:33 -0700503 "memory"); // clobber all
Andreas Gampe51f76352014-05-21 08:28:48 -0700504 // TODO: Should we clobber the other registers?
505#else
506 LOG(WARNING) << "Was asked to invoke for an architecture I do not understand.";
507 result = 0;
508#endif
509 // Pop transition.
510 self->PopManagedStackFragment(fragment);
511
512 fp_result = fpr_result;
513 EXPECT_EQ(0U, fp_result);
514
515 return result;
516 }
517
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700518 // Method with 32b arg0, 64b arg1
519 size_t Invoke3UWithReferrer(size_t arg0, uint64_t arg1, uintptr_t code, Thread* self,
520 mirror::ArtMethod* referrer) {
Ian Rogersc3ccc102014-06-25 11:52:14 -0700521#if (defined(__x86_64__) && !defined(__APPLE__)) || defined(__aarch64__)
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700522 // Just pass through.
523 return Invoke3WithReferrer(arg0, arg1, 0U, code, self, referrer);
524#else
525 // Need to split up arguments.
526 uint32_t lower = static_cast<uint32_t>(arg1 & 0xFFFFFFFF);
527 uint32_t upper = static_cast<uint32_t>((arg1 >> 32) & 0xFFFFFFFF);
528
529 return Invoke3WithReferrer(arg0, lower, upper, code, self, referrer);
530#endif
531 }
532
Andreas Gampe29b38412014-08-13 00:15:43 -0700533 static uintptr_t GetEntrypoint(Thread* self, QuickEntrypointEnum entrypoint) {
534 int32_t offset;
535#ifdef __LP64__
536 offset = GetThreadOffset<8>(entrypoint).Int32Value();
537#else
538 offset = GetThreadOffset<4>(entrypoint).Int32Value();
539#endif
540 return *reinterpret_cast<uintptr_t*>(reinterpret_cast<uint8_t*>(self) + offset);
541 }
542
Andreas Gampe6cf80102014-05-19 11:32:41 -0700543 protected:
544 size_t fp_result;
Andreas Gampe525cde22014-04-22 15:44:50 -0700545};
546
547
Andreas Gampe525cde22014-04-22 15:44:50 -0700548TEST_F(StubTest, Memcpy) {
Ian Rogersc3ccc102014-06-25 11:52:14 -0700549#if defined(__i386__) || (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe525cde22014-04-22 15:44:50 -0700550 Thread* self = Thread::Current();
551
552 uint32_t orig[20];
553 uint32_t trg[20];
554 for (size_t i = 0; i < 20; ++i) {
555 orig[i] = i;
556 trg[i] = 0;
557 }
558
559 Invoke3(reinterpret_cast<size_t>(&trg[4]), reinterpret_cast<size_t>(&orig[4]),
Andreas Gampe29b38412014-08-13 00:15:43 -0700560 10 * sizeof(uint32_t), StubTest::GetEntrypoint(self, kQuickMemcpy), self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700561
562 EXPECT_EQ(orig[0], trg[0]);
563
564 for (size_t i = 1; i < 4; ++i) {
565 EXPECT_NE(orig[i], trg[i]);
566 }
567
568 for (size_t i = 4; i < 14; ++i) {
569 EXPECT_EQ(orig[i], trg[i]);
570 }
571
572 for (size_t i = 14; i < 20; ++i) {
573 EXPECT_NE(orig[i], trg[i]);
574 }
575
576 // TODO: Test overlapping?
577
578#else
579 LOG(INFO) << "Skipping memcpy as I don't know how to do that on " << kRuntimeISA;
580 // Force-print to std::cout so it's also outside the logcat.
581 std::cout << "Skipping memcpy as I don't know how to do that on " << kRuntimeISA << std::endl;
582#endif
583}
584
Andreas Gampe525cde22014-04-22 15:44:50 -0700585TEST_F(StubTest, LockObject) {
Ian Rogersc3ccc102014-06-25 11:52:14 -0700586#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -0700587 static constexpr size_t kThinLockLoops = 100;
588
Andreas Gampe525cde22014-04-22 15:44:50 -0700589 Thread* self = Thread::Current();
Andreas Gampe29b38412014-08-13 00:15:43 -0700590
591 const uintptr_t art_quick_lock_object = StubTest::GetEntrypoint(self, kQuickLockObject);
592
Andreas Gampe525cde22014-04-22 15:44:50 -0700593 // Create an object
594 ScopedObjectAccess soa(self);
595 // garbage is created during ClassLinker::Init
596
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700597 StackHandleScope<2> hs(soa.Self());
598 Handle<mirror::String> obj(
599 hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), "hello, world!")));
Andreas Gampe525cde22014-04-22 15:44:50 -0700600 LockWord lock = obj->GetLockWord(false);
601 LockWord::LockState old_state = lock.GetState();
602 EXPECT_EQ(LockWord::LockState::kUnlocked, old_state);
603
Andreas Gampe29b38412014-08-13 00:15:43 -0700604 Invoke3(reinterpret_cast<size_t>(obj.Get()), 0U, 0U, art_quick_lock_object, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700605
606 LockWord lock_after = obj->GetLockWord(false);
607 LockWord::LockState new_state = lock_after.GetState();
608 EXPECT_EQ(LockWord::LockState::kThinLocked, new_state);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700609 EXPECT_EQ(lock_after.ThinLockCount(), 0U); // Thin lock starts count at zero
610
611 for (size_t i = 1; i < kThinLockLoops; ++i) {
Andreas Gampe29b38412014-08-13 00:15:43 -0700612 Invoke3(reinterpret_cast<size_t>(obj.Get()), 0U, 0U, art_quick_lock_object, self);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700613
614 // Check we're at lock count i
615
616 LockWord l_inc = obj->GetLockWord(false);
617 LockWord::LockState l_inc_state = l_inc.GetState();
618 EXPECT_EQ(LockWord::LockState::kThinLocked, l_inc_state);
619 EXPECT_EQ(l_inc.ThinLockCount(), i);
620 }
621
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700622 // Force a fat lock by running identity hashcode to fill up lock word.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700623 Handle<mirror::String> obj2(hs.NewHandle(
624 mirror::String::AllocFromModifiedUtf8(soa.Self(), "hello, world!")));
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700625
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700626 obj2->IdentityHashCode();
627
Andreas Gampe29b38412014-08-13 00:15:43 -0700628 Invoke3(reinterpret_cast<size_t>(obj2.Get()), 0U, 0U, art_quick_lock_object, self);
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700629
630 LockWord lock_after2 = obj2->GetLockWord(false);
631 LockWord::LockState new_state2 = lock_after2.GetState();
632 EXPECT_EQ(LockWord::LockState::kFatLocked, new_state2);
633 EXPECT_NE(lock_after2.FatLockMonitor(), static_cast<Monitor*>(nullptr));
634
635 // Test done.
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700636#else
637 LOG(INFO) << "Skipping lock_object as I don't know how to do that on " << kRuntimeISA;
638 // Force-print to std::cout so it's also outside the logcat.
639 std::cout << "Skipping lock_object as I don't know how to do that on " << kRuntimeISA << std::endl;
640#endif
641}
642
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700643
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700644class RandGen {
645 public:
646 explicit RandGen(uint32_t seed) : val_(seed) {}
647
648 uint32_t next() {
649 val_ = val_ * 48271 % 2147483647 + 13;
650 return val_;
651 }
652
653 uint32_t val_;
654};
655
656
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700657// NO_THREAD_SAFETY_ANALYSIS as we do not want to grab exclusive mutator lock for MonitorInfo.
658static void TestUnlockObject(StubTest* test) NO_THREAD_SAFETY_ANALYSIS {
Ian Rogersc3ccc102014-06-25 11:52:14 -0700659#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -0700660 static constexpr size_t kThinLockLoops = 100;
661
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700662 Thread* self = Thread::Current();
Andreas Gampe29b38412014-08-13 00:15:43 -0700663
664 const uintptr_t art_quick_lock_object = StubTest::GetEntrypoint(self, kQuickLockObject);
665 const uintptr_t art_quick_unlock_object = StubTest::GetEntrypoint(self, kQuickUnlockObject);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700666 // Create an object
667 ScopedObjectAccess soa(self);
668 // garbage is created during ClassLinker::Init
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700669 static constexpr size_t kNumberOfLocks = 10; // Number of objects = lock
670 StackHandleScope<kNumberOfLocks + 1> hs(self);
671 Handle<mirror::String> obj(
672 hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), "hello, world!")));
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700673 LockWord lock = obj->GetLockWord(false);
674 LockWord::LockState old_state = lock.GetState();
675 EXPECT_EQ(LockWord::LockState::kUnlocked, old_state);
676
Andreas Gampe29b38412014-08-13 00:15:43 -0700677 test->Invoke3(reinterpret_cast<size_t>(obj.Get()), 0U, 0U, art_quick_unlock_object, self);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700678 // This should be an illegal monitor state.
679 EXPECT_TRUE(self->IsExceptionPending());
680 self->ClearException();
681
682 LockWord lock_after = obj->GetLockWord(false);
683 LockWord::LockState new_state = lock_after.GetState();
684 EXPECT_EQ(LockWord::LockState::kUnlocked, new_state);
Andreas Gampe525cde22014-04-22 15:44:50 -0700685
Andreas Gampe29b38412014-08-13 00:15:43 -0700686 test->Invoke3(reinterpret_cast<size_t>(obj.Get()), 0U, 0U, art_quick_lock_object, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700687
688 LockWord lock_after2 = obj->GetLockWord(false);
689 LockWord::LockState new_state2 = lock_after2.GetState();
690 EXPECT_EQ(LockWord::LockState::kThinLocked, new_state2);
691
Andreas Gampe29b38412014-08-13 00:15:43 -0700692 test->Invoke3(reinterpret_cast<size_t>(obj.Get()), 0U, 0U, art_quick_unlock_object, self);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700693
694 LockWord lock_after3 = obj->GetLockWord(false);
695 LockWord::LockState new_state3 = lock_after3.GetState();
696 EXPECT_EQ(LockWord::LockState::kUnlocked, new_state3);
697
698 // Stress test:
699 // Keep a number of objects and their locks in flight. Randomly lock or unlock one of them in
700 // each step.
701
702 RandGen r(0x1234);
703
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700704 constexpr size_t kIterations = 10000; // Number of iterations
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700705 constexpr size_t kMoveToFat = 1000; // Chance of 1:kMoveFat to make a lock fat.
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700706
707 size_t counts[kNumberOfLocks];
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700708 bool fat[kNumberOfLocks]; // Whether a lock should be thin or fat.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700709 Handle<mirror::String> objects[kNumberOfLocks];
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700710
711 // Initialize = allocate.
712 for (size_t i = 0; i < kNumberOfLocks; ++i) {
713 counts[i] = 0;
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700714 fat[i] = false;
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700715 objects[i] = hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), ""));
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700716 }
717
718 for (size_t i = 0; i < kIterations; ++i) {
719 // Select which lock to update.
720 size_t index = r.next() % kNumberOfLocks;
721
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700722 // Make lock fat?
723 if (!fat[index] && (r.next() % kMoveToFat == 0)) {
724 fat[index] = true;
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700725 objects[index]->IdentityHashCode();
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700726
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700727 LockWord lock_iter = objects[index]->GetLockWord(false);
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700728 LockWord::LockState iter_state = lock_iter.GetState();
729 if (counts[index] == 0) {
730 EXPECT_EQ(LockWord::LockState::kHashCode, iter_state);
731 } else {
732 EXPECT_EQ(LockWord::LockState::kFatLocked, iter_state);
733 }
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700734 } else {
Andreas Gampe277ccbd2014-11-03 21:36:10 -0800735 bool take_lock; // Whether to lock or unlock in this step.
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700736 if (counts[index] == 0) {
Andreas Gampe277ccbd2014-11-03 21:36:10 -0800737 take_lock = true;
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700738 } else if (counts[index] == kThinLockLoops) {
Andreas Gampe277ccbd2014-11-03 21:36:10 -0800739 take_lock = false;
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700740 } else {
741 // Randomly.
Andreas Gampe277ccbd2014-11-03 21:36:10 -0800742 take_lock = r.next() % 2 == 0;
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700743 }
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700744
Andreas Gampe277ccbd2014-11-03 21:36:10 -0800745 if (take_lock) {
Andreas Gampe29b38412014-08-13 00:15:43 -0700746 test->Invoke3(reinterpret_cast<size_t>(objects[index].Get()), 0U, 0U, art_quick_lock_object,
747 self);
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700748 counts[index]++;
749 } else {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700750 test->Invoke3(reinterpret_cast<size_t>(objects[index].Get()), 0U, 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -0700751 art_quick_unlock_object, self);
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700752 counts[index]--;
753 }
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700754
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700755 EXPECT_FALSE(self->IsExceptionPending());
756
757 // Check the new state.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700758 LockWord lock_iter = objects[index]->GetLockWord(true);
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700759 LockWord::LockState iter_state = lock_iter.GetState();
760 if (fat[index]) {
761 // Abuse MonitorInfo.
762 EXPECT_EQ(LockWord::LockState::kFatLocked, iter_state) << index;
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700763 MonitorInfo info(objects[index].Get());
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700764 EXPECT_EQ(counts[index], info.entry_count_) << index;
765 } else {
766 if (counts[index] > 0) {
767 EXPECT_EQ(LockWord::LockState::kThinLocked, iter_state);
768 EXPECT_EQ(counts[index] - 1, lock_iter.ThinLockCount());
769 } else {
770 EXPECT_EQ(LockWord::LockState::kUnlocked, iter_state);
771 }
772 }
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700773 }
774 }
775
776 // Unlock the remaining count times and then check it's unlocked. Then deallocate.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700777 // Go reverse order to correctly handle Handles.
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700778 for (size_t i = 0; i < kNumberOfLocks; ++i) {
779 size_t index = kNumberOfLocks - 1 - i;
780 size_t count = counts[index];
781 while (count > 0) {
Andreas Gampe29b38412014-08-13 00:15:43 -0700782 test->Invoke3(reinterpret_cast<size_t>(objects[index].Get()), 0U, 0U, art_quick_unlock_object,
783 self);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700784 count--;
785 }
786
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700787 LockWord lock_after4 = objects[index]->GetLockWord(false);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700788 LockWord::LockState new_state4 = lock_after4.GetState();
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700789 EXPECT_TRUE(LockWord::LockState::kUnlocked == new_state4
790 || LockWord::LockState::kFatLocked == new_state4);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700791 }
792
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700793 // Test done.
Andreas Gampe525cde22014-04-22 15:44:50 -0700794#else
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700795 LOG(INFO) << "Skipping unlock_object as I don't know how to do that on " << kRuntimeISA;
Andreas Gampe525cde22014-04-22 15:44:50 -0700796 // Force-print to std::cout so it's also outside the logcat.
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700797 std::cout << "Skipping unlock_object as I don't know how to do that on " << kRuntimeISA << std::endl;
Andreas Gampe525cde22014-04-22 15:44:50 -0700798#endif
799}
800
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700801TEST_F(StubTest, UnlockObject) {
Andreas Gampe369810a2015-01-14 19:53:31 -0800802 // This will lead to monitor error messages in the log.
803 ScopedLogSeverity sls(LogSeverity::FATAL);
804
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700805 TestUnlockObject(this);
806}
Andreas Gampe525cde22014-04-22 15:44:50 -0700807
Ian Rogersc3ccc102014-06-25 11:52:14 -0700808#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe525cde22014-04-22 15:44:50 -0700809extern "C" void art_quick_check_cast(void);
810#endif
811
812TEST_F(StubTest, CheckCast) {
Ian Rogersc3ccc102014-06-25 11:52:14 -0700813#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe525cde22014-04-22 15:44:50 -0700814 Thread* self = Thread::Current();
Andreas Gampe29b38412014-08-13 00:15:43 -0700815
816 const uintptr_t art_quick_check_cast = StubTest::GetEntrypoint(self, kQuickCheckCast);
817
Andreas Gampe525cde22014-04-22 15:44:50 -0700818 // Find some classes.
819 ScopedObjectAccess soa(self);
820 // garbage is created during ClassLinker::Init
821
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700822 StackHandleScope<2> hs(soa.Self());
823 Handle<mirror::Class> c(
824 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "[Ljava/lang/Object;")));
825 Handle<mirror::Class> c2(
826 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "[Ljava/lang/String;")));
Andreas Gampe525cde22014-04-22 15:44:50 -0700827
828 EXPECT_FALSE(self->IsExceptionPending());
829
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700830 Invoke3(reinterpret_cast<size_t>(c.Get()), reinterpret_cast<size_t>(c.Get()), 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -0700831 art_quick_check_cast, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700832
833 EXPECT_FALSE(self->IsExceptionPending());
834
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700835 Invoke3(reinterpret_cast<size_t>(c2.Get()), reinterpret_cast<size_t>(c2.Get()), 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -0700836 art_quick_check_cast, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700837
838 EXPECT_FALSE(self->IsExceptionPending());
839
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700840 Invoke3(reinterpret_cast<size_t>(c.Get()), reinterpret_cast<size_t>(c2.Get()), 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -0700841 art_quick_check_cast, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700842
843 EXPECT_FALSE(self->IsExceptionPending());
844
845 // TODO: Make the following work. But that would require correct managed frames.
846
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700847 Invoke3(reinterpret_cast<size_t>(c2.Get()), reinterpret_cast<size_t>(c.Get()), 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -0700848 art_quick_check_cast, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700849
850 EXPECT_TRUE(self->IsExceptionPending());
851 self->ClearException();
852
853#else
854 LOG(INFO) << "Skipping check_cast as I don't know how to do that on " << kRuntimeISA;
855 // Force-print to std::cout so it's also outside the logcat.
856 std::cout << "Skipping check_cast as I don't know how to do that on " << kRuntimeISA << std::endl;
857#endif
858}
859
860
Andreas Gampe525cde22014-04-22 15:44:50 -0700861TEST_F(StubTest, APutObj) {
Hiroshi Yamauchid6881ae2014-04-28 17:21:48 -0700862 TEST_DISABLED_FOR_HEAP_REFERENCE_POISONING();
863
Ian Rogersc3ccc102014-06-25 11:52:14 -0700864#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe525cde22014-04-22 15:44:50 -0700865 Thread* self = Thread::Current();
Andreas Gampe29b38412014-08-13 00:15:43 -0700866
867 // Do not check non-checked ones, we'd need handlers and stuff...
868 const uintptr_t art_quick_aput_obj_with_null_and_bound_check =
869 StubTest::GetEntrypoint(self, kQuickAputObjectWithNullAndBoundCheck);
870
Andreas Gampe525cde22014-04-22 15:44:50 -0700871 // Create an object
872 ScopedObjectAccess soa(self);
873 // garbage is created during ClassLinker::Init
874
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700875 StackHandleScope<5> hs(soa.Self());
876 Handle<mirror::Class> c(
877 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "Ljava/lang/Object;")));
878 Handle<mirror::Class> ca(
879 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "[Ljava/lang/String;")));
Andreas Gampe525cde22014-04-22 15:44:50 -0700880
881 // Build a string array of size 1
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700882 Handle<mirror::ObjectArray<mirror::Object>> array(
883 hs.NewHandle(mirror::ObjectArray<mirror::Object>::Alloc(soa.Self(), ca.Get(), 10)));
Andreas Gampe525cde22014-04-22 15:44:50 -0700884
885 // Build a string -> should be assignable
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700886 Handle<mirror::String> str_obj(
887 hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), "hello, world!")));
Andreas Gampe525cde22014-04-22 15:44:50 -0700888
889 // Build a generic object -> should fail assigning
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700890 Handle<mirror::Object> obj_obj(hs.NewHandle(c->AllocObject(soa.Self())));
Andreas Gampe525cde22014-04-22 15:44:50 -0700891
892 // Play with it...
893
894 // 1) Success cases
Andreas Gampef4e910b2014-04-29 16:55:52 -0700895 // 1.1) Assign str_obj to array[0..3]
Andreas Gampe525cde22014-04-22 15:44:50 -0700896
897 EXPECT_FALSE(self->IsExceptionPending());
898
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700899 Invoke3(reinterpret_cast<size_t>(array.Get()), 0U, reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampe29b38412014-08-13 00:15:43 -0700900 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700901
902 EXPECT_FALSE(self->IsExceptionPending());
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700903 EXPECT_EQ(str_obj.Get(), array->Get(0));
Andreas Gampe525cde22014-04-22 15:44:50 -0700904
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700905 Invoke3(reinterpret_cast<size_t>(array.Get()), 1U, reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampe29b38412014-08-13 00:15:43 -0700906 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampef4e910b2014-04-29 16:55:52 -0700907
908 EXPECT_FALSE(self->IsExceptionPending());
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700909 EXPECT_EQ(str_obj.Get(), array->Get(1));
Andreas Gampef4e910b2014-04-29 16:55:52 -0700910
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700911 Invoke3(reinterpret_cast<size_t>(array.Get()), 2U, reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampe29b38412014-08-13 00:15:43 -0700912 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampef4e910b2014-04-29 16:55:52 -0700913
914 EXPECT_FALSE(self->IsExceptionPending());
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700915 EXPECT_EQ(str_obj.Get(), array->Get(2));
Andreas Gampef4e910b2014-04-29 16:55:52 -0700916
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700917 Invoke3(reinterpret_cast<size_t>(array.Get()), 3U, reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampe29b38412014-08-13 00:15:43 -0700918 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampef4e910b2014-04-29 16:55:52 -0700919
920 EXPECT_FALSE(self->IsExceptionPending());
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700921 EXPECT_EQ(str_obj.Get(), array->Get(3));
Andreas Gampef4e910b2014-04-29 16:55:52 -0700922
923 // 1.2) Assign null to array[0..3]
Andreas Gampe525cde22014-04-22 15:44:50 -0700924
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700925 Invoke3(reinterpret_cast<size_t>(array.Get()), 0U, reinterpret_cast<size_t>(nullptr),
Andreas Gampe29b38412014-08-13 00:15:43 -0700926 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700927
928 EXPECT_FALSE(self->IsExceptionPending());
Andreas Gampef4e910b2014-04-29 16:55:52 -0700929 EXPECT_EQ(nullptr, array->Get(0));
930
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700931 Invoke3(reinterpret_cast<size_t>(array.Get()), 1U, reinterpret_cast<size_t>(nullptr),
Andreas Gampe29b38412014-08-13 00:15:43 -0700932 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampef4e910b2014-04-29 16:55:52 -0700933
934 EXPECT_FALSE(self->IsExceptionPending());
935 EXPECT_EQ(nullptr, array->Get(1));
936
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700937 Invoke3(reinterpret_cast<size_t>(array.Get()), 2U, reinterpret_cast<size_t>(nullptr),
Andreas Gampe29b38412014-08-13 00:15:43 -0700938 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampef4e910b2014-04-29 16:55:52 -0700939
940 EXPECT_FALSE(self->IsExceptionPending());
941 EXPECT_EQ(nullptr, array->Get(2));
942
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700943 Invoke3(reinterpret_cast<size_t>(array.Get()), 3U, reinterpret_cast<size_t>(nullptr),
Andreas Gampe29b38412014-08-13 00:15:43 -0700944 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampef4e910b2014-04-29 16:55:52 -0700945
946 EXPECT_FALSE(self->IsExceptionPending());
947 EXPECT_EQ(nullptr, array->Get(3));
Andreas Gampe525cde22014-04-22 15:44:50 -0700948
949 // TODO: Check _which_ exception is thrown. Then make 3) check that it's the right check order.
950
951 // 2) Failure cases (str into str[])
952 // 2.1) Array = null
953 // TODO: Throwing NPE needs actual DEX code
954
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700955// Invoke3(reinterpret_cast<size_t>(nullptr), 0U, reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampe525cde22014-04-22 15:44:50 -0700956// reinterpret_cast<uintptr_t>(&art_quick_aput_obj_with_null_and_bound_check), self);
957//
958// EXPECT_TRUE(self->IsExceptionPending());
959// self->ClearException();
960
961 // 2.2) Index < 0
962
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700963 Invoke3(reinterpret_cast<size_t>(array.Get()), static_cast<size_t>(-1),
964 reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampe29b38412014-08-13 00:15:43 -0700965 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700966
967 EXPECT_TRUE(self->IsExceptionPending());
968 self->ClearException();
969
970 // 2.3) Index > 0
971
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700972 Invoke3(reinterpret_cast<size_t>(array.Get()), 10U, reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampe29b38412014-08-13 00:15:43 -0700973 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700974
975 EXPECT_TRUE(self->IsExceptionPending());
976 self->ClearException();
977
978 // 3) Failure cases (obj into str[])
979
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700980 Invoke3(reinterpret_cast<size_t>(array.Get()), 0U, reinterpret_cast<size_t>(obj_obj.Get()),
Andreas Gampe29b38412014-08-13 00:15:43 -0700981 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700982
983 EXPECT_TRUE(self->IsExceptionPending());
984 self->ClearException();
985
986 // Tests done.
987#else
988 LOG(INFO) << "Skipping aput_obj as I don't know how to do that on " << kRuntimeISA;
989 // Force-print to std::cout so it's also outside the logcat.
990 std::cout << "Skipping aput_obj as I don't know how to do that on " << kRuntimeISA << std::endl;
991#endif
992}
993
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700994TEST_F(StubTest, AllocObject) {
995 TEST_DISABLED_FOR_HEAP_REFERENCE_POISONING();
996
Ian Rogersc3ccc102014-06-25 11:52:14 -0700997#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe369810a2015-01-14 19:53:31 -0800998 // This will lead to OOM error messages in the log.
999 ScopedLogSeverity sls(LogSeverity::FATAL);
1000
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001001 // TODO: Check the "Unresolved" allocation stubs
1002
1003 Thread* self = Thread::Current();
1004 // Create an object
1005 ScopedObjectAccess soa(self);
1006 // garbage is created during ClassLinker::Init
1007
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001008 StackHandleScope<2> hs(soa.Self());
1009 Handle<mirror::Class> c(
1010 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "Ljava/lang/Object;")));
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001011
1012 // Play with it...
1013
1014 EXPECT_FALSE(self->IsExceptionPending());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001015 {
1016 // Use an arbitrary method from c to use as referrer
1017 size_t result = Invoke3(static_cast<size_t>(c->GetDexTypeIndex()), // type_idx
1018 reinterpret_cast<size_t>(c->GetVirtualMethod(0)), // arbitrary
1019 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001020 StubTest::GetEntrypoint(self, kQuickAllocObject),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001021 self);
1022
1023 EXPECT_FALSE(self->IsExceptionPending());
1024 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
1025 mirror::Object* obj = reinterpret_cast<mirror::Object*>(result);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001026 EXPECT_EQ(c.Get(), obj->GetClass());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001027 VerifyObject(obj);
1028 }
1029
1030 {
1031 // We can use nullptr in the second argument as we do not need a method here (not used in
1032 // resolved/initialized cases)
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001033 size_t result = Invoke3(reinterpret_cast<size_t>(c.Get()), reinterpret_cast<size_t>(nullptr), 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001034 StubTest::GetEntrypoint(self, kQuickAllocObjectResolved),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001035 self);
1036
1037 EXPECT_FALSE(self->IsExceptionPending());
1038 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
1039 mirror::Object* obj = reinterpret_cast<mirror::Object*>(result);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001040 EXPECT_EQ(c.Get(), obj->GetClass());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001041 VerifyObject(obj);
1042 }
1043
1044 {
1045 // We can use nullptr in the second argument as we do not need a method here (not used in
1046 // resolved/initialized cases)
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001047 size_t result = Invoke3(reinterpret_cast<size_t>(c.Get()), reinterpret_cast<size_t>(nullptr), 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001048 StubTest::GetEntrypoint(self, kQuickAllocObjectInitialized),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001049 self);
1050
1051 EXPECT_FALSE(self->IsExceptionPending());
1052 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
1053 mirror::Object* obj = reinterpret_cast<mirror::Object*>(result);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001054 EXPECT_EQ(c.Get(), obj->GetClass());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001055 VerifyObject(obj);
1056 }
1057
1058 // Failure tests.
1059
1060 // Out-of-memory.
1061 {
1062 Runtime::Current()->GetHeap()->SetIdealFootprint(1 * GB);
1063
1064 // Array helps to fill memory faster.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001065 Handle<mirror::Class> ca(
1066 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "[Ljava/lang/Object;")));
1067
1068 // Use arbitrary large amount for now.
1069 static const size_t kMaxHandles = 1000000;
Ian Rogers700a4022014-05-19 16:49:03 -07001070 std::unique_ptr<StackHandleScope<kMaxHandles>> hsp(new StackHandleScope<kMaxHandles>(self));
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001071
1072 std::vector<Handle<mirror::Object>> handles;
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001073 // Start allocating with 128K
1074 size_t length = 128 * KB / 4;
1075 while (length > 10) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001076 Handle<mirror::Object> h(hsp->NewHandle<mirror::Object>(
1077 mirror::ObjectArray<mirror::Object>::Alloc(soa.Self(), ca.Get(), length / 4)));
1078 if (self->IsExceptionPending() || h.Get() == nullptr) {
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001079 self->ClearException();
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001080
1081 // Try a smaller length
1082 length = length / 8;
1083 // Use at most half the reported free space.
1084 size_t mem = Runtime::Current()->GetHeap()->GetFreeMemory();
1085 if (length * 8 > mem) {
1086 length = mem / 8;
1087 }
1088 } else {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001089 handles.push_back(h);
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001090 }
1091 }
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001092 LOG(INFO) << "Used " << handles.size() << " arrays to fill space.";
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001093
1094 // Allocate simple objects till it fails.
1095 while (!self->IsExceptionPending()) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001096 Handle<mirror::Object> h = hsp->NewHandle(c->AllocObject(soa.Self()));
1097 if (!self->IsExceptionPending() && h.Get() != nullptr) {
1098 handles.push_back(h);
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001099 }
1100 }
1101 self->ClearException();
1102
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001103 size_t result = Invoke3(reinterpret_cast<size_t>(c.Get()), reinterpret_cast<size_t>(nullptr), 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001104 StubTest::GetEntrypoint(self, kQuickAllocObjectInitialized),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001105 self);
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001106 EXPECT_TRUE(self->IsExceptionPending());
1107 self->ClearException();
1108 EXPECT_EQ(reinterpret_cast<size_t>(nullptr), result);
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001109 }
1110
1111 // Tests done.
1112#else
1113 LOG(INFO) << "Skipping alloc_object as I don't know how to do that on " << kRuntimeISA;
1114 // Force-print to std::cout so it's also outside the logcat.
1115 std::cout << "Skipping alloc_object as I don't know how to do that on " << kRuntimeISA << std::endl;
1116#endif
1117}
1118
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001119TEST_F(StubTest, AllocObjectArray) {
1120 TEST_DISABLED_FOR_HEAP_REFERENCE_POISONING();
1121
Ian Rogersc3ccc102014-06-25 11:52:14 -07001122#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001123 // TODO: Check the "Unresolved" allocation stubs
1124
Andreas Gampe369810a2015-01-14 19:53:31 -08001125 // This will lead to OOM error messages in the log.
1126 ScopedLogSeverity sls(LogSeverity::FATAL);
1127
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001128 Thread* self = Thread::Current();
1129 // Create an object
1130 ScopedObjectAccess soa(self);
1131 // garbage is created during ClassLinker::Init
1132
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001133 StackHandleScope<2> hs(self);
1134 Handle<mirror::Class> c(
1135 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "[Ljava/lang/Object;")));
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001136
1137 // Needed to have a linked method.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001138 Handle<mirror::Class> c_obj(
1139 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "Ljava/lang/Object;")));
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001140
1141 // Play with it...
1142
1143 EXPECT_FALSE(self->IsExceptionPending());
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001144
1145 // For some reason this does not work, as the type_idx is artificial and outside what the
1146 // resolved types of c_obj allow...
1147
Ian Rogerscf7f1912014-10-22 22:06:39 -07001148 if ((false)) {
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001149 // Use an arbitrary method from c to use as referrer
1150 size_t result = Invoke3(static_cast<size_t>(c->GetDexTypeIndex()), // type_idx
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001151 10U,
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08001152 reinterpret_cast<size_t>(c_obj->GetVirtualMethod(0)), // arbitrary
Andreas Gampe29b38412014-08-13 00:15:43 -07001153 StubTest::GetEntrypoint(self, kQuickAllocArray),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001154 self);
1155
1156 EXPECT_FALSE(self->IsExceptionPending());
1157 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
1158 mirror::Array* obj = reinterpret_cast<mirror::Array*>(result);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001159 EXPECT_EQ(c.Get(), obj->GetClass());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001160 VerifyObject(obj);
1161 EXPECT_EQ(obj->GetLength(), 10);
1162 }
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001163
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001164 {
1165 // We can use nullptr in the second argument as we do not need a method here (not used in
1166 // resolved/initialized cases)
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08001167 size_t result = Invoke3(reinterpret_cast<size_t>(c.Get()), 10U,
1168 reinterpret_cast<size_t>(nullptr),
Andreas Gampe29b38412014-08-13 00:15:43 -07001169 StubTest::GetEntrypoint(self, kQuickAllocArrayResolved),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001170 self);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001171 EXPECT_FALSE(self->IsExceptionPending()) << PrettyTypeOf(self->GetException(nullptr));
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001172 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
1173 mirror::Object* obj = reinterpret_cast<mirror::Object*>(result);
1174 EXPECT_TRUE(obj->IsArrayInstance());
1175 EXPECT_TRUE(obj->IsObjectArray());
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001176 EXPECT_EQ(c.Get(), obj->GetClass());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001177 VerifyObject(obj);
1178 mirror::Array* array = reinterpret_cast<mirror::Array*>(result);
1179 EXPECT_EQ(array->GetLength(), 10);
1180 }
1181
1182 // Failure tests.
1183
1184 // Out-of-memory.
1185 {
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08001186 size_t result = Invoke3(reinterpret_cast<size_t>(c.Get()),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001187 GB, // that should fail...
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08001188 reinterpret_cast<size_t>(nullptr),
Andreas Gampe29b38412014-08-13 00:15:43 -07001189 StubTest::GetEntrypoint(self, kQuickAllocArrayResolved),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001190 self);
1191
1192 EXPECT_TRUE(self->IsExceptionPending());
1193 self->ClearException();
1194 EXPECT_EQ(reinterpret_cast<size_t>(nullptr), result);
1195 }
1196
1197 // Tests done.
1198#else
1199 LOG(INFO) << "Skipping alloc_array as I don't know how to do that on " << kRuntimeISA;
1200 // Force-print to std::cout so it's also outside the logcat.
1201 std::cout << "Skipping alloc_array as I don't know how to do that on " << kRuntimeISA << std::endl;
1202#endif
1203}
1204
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001205
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001206TEST_F(StubTest, StringCompareTo) {
1207 TEST_DISABLED_FOR_HEAP_REFERENCE_POISONING();
1208
Ian Rogersc3ccc102014-06-25 11:52:14 -07001209#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001210 // TODO: Check the "Unresolved" allocation stubs
1211
1212 Thread* self = Thread::Current();
Andreas Gampe29b38412014-08-13 00:15:43 -07001213
1214 const uintptr_t art_quick_string_compareto = StubTest::GetEntrypoint(self, kQuickStringCompareTo);
1215
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001216 ScopedObjectAccess soa(self);
1217 // garbage is created during ClassLinker::Init
1218
1219 // Create some strings
1220 // Use array so we can index into it and use a matrix for expected results
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -07001221 // Setup: The first half is standard. The second half uses a non-zero offset.
1222 // TODO: Shared backing arrays.
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001223 const char* c[] = { "", "", "a", "aa", "ab",
Serban Constantinescu86797a72014-06-19 16:17:56 +01001224 "aacaacaacaacaacaac", // This one's under the default limit to go to __memcmp16.
1225 "aacaacaacaacaacaacaacaacaacaacaacaac", // This one's over.
1226 "aacaacaacaacaacaacaacaacaacaacaacaaca" }; // As is this one. We need a separate one to
1227 // defeat object-equal optimizations.
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001228 static constexpr size_t kBaseStringCount = arraysize(c);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001229 static constexpr size_t kStringCount = 2 * kBaseStringCount;
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001230
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001231 StackHandleScope<kStringCount> hs(self);
1232 Handle<mirror::String> s[kStringCount];
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001233
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001234 for (size_t i = 0; i < kBaseStringCount; ++i) {
1235 s[i] = hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), c[i]));
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001236 }
1237
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -07001238 RandGen r(0x1234);
1239
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001240 for (size_t i = kBaseStringCount; i < kStringCount; ++i) {
1241 s[i] = hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), c[i - kBaseStringCount]));
1242 int32_t length = s[i]->GetLength();
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -07001243 if (length > 1) {
1244 // Set a random offset and length.
1245 int32_t new_offset = 1 + (r.next() % (length - 1));
1246 int32_t rest = length - new_offset - 1;
1247 int32_t new_length = 1 + (rest > 0 ? r.next() % rest : 0);
1248
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001249 s[i]->SetField32<false>(mirror::String::CountOffset(), new_length);
1250 s[i]->SetField32<false>(mirror::String::OffsetOffset(), new_offset);
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -07001251 }
1252 }
1253
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001254 // TODO: wide characters
1255
1256 // Matrix of expectations. First component is first parameter. Note we only check against the
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -07001257 // sign, not the value. As we are testing random offsets, we need to compute this and need to
1258 // rely on String::CompareTo being correct.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001259 int32_t expected[kStringCount][kStringCount];
1260 for (size_t x = 0; x < kStringCount; ++x) {
1261 for (size_t y = 0; y < kStringCount; ++y) {
1262 expected[x][y] = s[x]->CompareTo(s[y].Get());
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -07001263 }
1264 }
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001265
1266 // Play with it...
1267
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001268 for (size_t x = 0; x < kStringCount; ++x) {
1269 for (size_t y = 0; y < kStringCount; ++y) {
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001270 // Test string_compareto x y
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001271 size_t result = Invoke3(reinterpret_cast<size_t>(s[x].Get()),
1272 reinterpret_cast<size_t>(s[y].Get()), 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001273 art_quick_string_compareto, self);
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001274
1275 EXPECT_FALSE(self->IsExceptionPending());
1276
1277 // The result is a 32b signed integer
1278 union {
1279 size_t r;
1280 int32_t i;
1281 } conv;
1282 conv.r = result;
1283 int32_t e = expected[x][y];
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -07001284 EXPECT_TRUE(e == 0 ? conv.i == 0 : true) << "x=" << c[x] << " y=" << c[y] << " res=" <<
1285 conv.r;
1286 EXPECT_TRUE(e < 0 ? conv.i < 0 : true) << "x=" << c[x] << " y=" << c[y] << " res=" <<
1287 conv.r;
1288 EXPECT_TRUE(e > 0 ? conv.i > 0 : true) << "x=" << c[x] << " y=" << c[y] << " res=" <<
1289 conv.r;
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001290 }
1291 }
1292
Andreas Gampe7177d7c2014-05-02 12:10:02 -07001293 // TODO: Deallocate things.
1294
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001295 // Tests done.
1296#else
1297 LOG(INFO) << "Skipping string_compareto as I don't know how to do that on " << kRuntimeISA;
1298 // Force-print to std::cout so it's also outside the logcat.
1299 std::cout << "Skipping string_compareto as I don't know how to do that on " << kRuntimeISA <<
1300 std::endl;
1301#endif
1302}
1303
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001304
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001305static void GetSetBooleanStatic(Handle<mirror::ArtField>* f, Thread* self,
1306 mirror::ArtMethod* referrer, StubTest* test)
Fred Shih37f05ef2014-07-16 18:38:08 -07001307 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1308#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
1309 constexpr size_t num_values = 5;
1310 uint8_t values[num_values] = { 0, 1, 2, 128, 0xFF };
1311
1312 for (size_t i = 0; i < num_values; ++i) {
1313 test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
1314 static_cast<size_t>(values[i]),
1315 0U,
1316 StubTest::GetEntrypoint(self, kQuickSet8Static),
1317 self,
1318 referrer);
1319
1320 size_t res = test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
1321 0U, 0U,
1322 StubTest::GetEntrypoint(self, kQuickGetBooleanStatic),
1323 self,
1324 referrer);
1325 // Boolean currently stores bools as uint8_t, be more zealous about asserting correct writes/gets.
1326 EXPECT_EQ(values[i], static_cast<uint8_t>(res)) << "Iteration " << i;
1327 }
1328#else
1329 LOG(INFO) << "Skipping set_boolean_static as I don't know how to do that on " << kRuntimeISA;
1330 // Force-print to std::cout so it's also outside the logcat.
1331 std::cout << "Skipping set_boolean_static as I don't know how to do that on " << kRuntimeISA << std::endl;
1332#endif
1333}
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001334static void GetSetByteStatic(Handle<mirror::ArtField>* f, Thread* self,
1335 mirror::ArtMethod* referrer, StubTest* test)
Fred Shih37f05ef2014-07-16 18:38:08 -07001336 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1337#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001338 int8_t values[] = { -128, -64, 0, 64, 127 };
Fred Shih37f05ef2014-07-16 18:38:08 -07001339
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001340 for (size_t i = 0; i < arraysize(values); ++i) {
Fred Shih37f05ef2014-07-16 18:38:08 -07001341 test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
1342 static_cast<size_t>(values[i]),
1343 0U,
1344 StubTest::GetEntrypoint(self, kQuickSet8Static),
1345 self,
1346 referrer);
1347
1348 size_t res = test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
1349 0U, 0U,
1350 StubTest::GetEntrypoint(self, kQuickGetByteStatic),
1351 self,
1352 referrer);
1353 EXPECT_EQ(values[i], static_cast<int8_t>(res)) << "Iteration " << i;
1354 }
1355#else
1356 LOG(INFO) << "Skipping set_byte_static as I don't know how to do that on " << kRuntimeISA;
1357 // Force-print to std::cout so it's also outside the logcat.
1358 std::cout << "Skipping set_byte_static as I don't know how to do that on " << kRuntimeISA << std::endl;
1359#endif
1360}
1361
1362
Fred Shih37f05ef2014-07-16 18:38:08 -07001363static void GetSetBooleanInstance(Handle<mirror::Object>* obj, Handle<mirror::ArtField>* f,
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001364 Thread* self, mirror::ArtMethod* referrer, StubTest* test)
Fred Shih37f05ef2014-07-16 18:38:08 -07001365 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1366#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001367 uint8_t values[] = { 0, true, 2, 128, 0xFF };
Fred Shih37f05ef2014-07-16 18:38:08 -07001368
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001369 for (size_t i = 0; i < arraysize(values); ++i) {
Fred Shih37f05ef2014-07-16 18:38:08 -07001370 test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
1371 reinterpret_cast<size_t>(obj->Get()),
1372 static_cast<size_t>(values[i]),
1373 StubTest::GetEntrypoint(self, kQuickSet8Instance),
1374 self,
1375 referrer);
1376
1377 uint8_t res = f->Get()->GetBoolean(obj->Get());
1378 EXPECT_EQ(values[i], res) << "Iteration " << i;
1379
1380 f->Get()->SetBoolean<false>(obj->Get(), res);
1381
1382 size_t res2 = test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
1383 reinterpret_cast<size_t>(obj->Get()),
1384 0U,
1385 StubTest::GetEntrypoint(self, kQuickGetBooleanInstance),
1386 self,
1387 referrer);
1388 EXPECT_EQ(res, static_cast<uint8_t>(res2));
1389 }
1390#else
1391 LOG(INFO) << "Skipping set_boolean_instance as I don't know how to do that on " << kRuntimeISA;
1392 // Force-print to std::cout so it's also outside the logcat.
1393 std::cout << "Skipping set_boolean_instance as I don't know how to do that on " << kRuntimeISA << std::endl;
1394#endif
1395}
1396static void GetSetByteInstance(Handle<mirror::Object>* obj, Handle<mirror::ArtField>* f,
1397 Thread* self, mirror::ArtMethod* referrer, StubTest* test)
1398 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1399#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001400 int8_t values[] = { -128, -64, 0, 64, 127 };
Fred Shih37f05ef2014-07-16 18:38:08 -07001401
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001402 for (size_t i = 0; i < arraysize(values); ++i) {
Fred Shih37f05ef2014-07-16 18:38:08 -07001403 test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
1404 reinterpret_cast<size_t>(obj->Get()),
1405 static_cast<size_t>(values[i]),
1406 StubTest::GetEntrypoint(self, kQuickSet8Instance),
1407 self,
1408 referrer);
1409
1410 int8_t res = f->Get()->GetByte(obj->Get());
1411 EXPECT_EQ(res, values[i]) << "Iteration " << i;
1412 f->Get()->SetByte<false>(obj->Get(), ++res);
1413
1414 size_t res2 = test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
1415 reinterpret_cast<size_t>(obj->Get()),
1416 0U,
1417 StubTest::GetEntrypoint(self, kQuickGetByteInstance),
1418 self,
1419 referrer);
1420 EXPECT_EQ(res, static_cast<int8_t>(res2));
1421 }
1422#else
1423 LOG(INFO) << "Skipping set_byte_instance as I don't know how to do that on " << kRuntimeISA;
1424 // Force-print to std::cout so it's also outside the logcat.
1425 std::cout << "Skipping set_byte_instance as I don't know how to do that on " << kRuntimeISA << std::endl;
1426#endif
1427}
1428
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001429static void GetSetCharStatic(Handle<mirror::ArtField>* f, Thread* self, mirror::ArtMethod* referrer,
1430 StubTest* test)
Fred Shih37f05ef2014-07-16 18:38:08 -07001431 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1432#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001433 uint16_t values[] = { 0, 1, 2, 255, 32768, 0xFFFF };
Fred Shih37f05ef2014-07-16 18:38:08 -07001434
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001435 for (size_t i = 0; i < arraysize(values); ++i) {
Fred Shih37f05ef2014-07-16 18:38:08 -07001436 test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
1437 static_cast<size_t>(values[i]),
1438 0U,
1439 StubTest::GetEntrypoint(self, kQuickSet16Static),
1440 self,
1441 referrer);
1442
1443 size_t res = test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
1444 0U, 0U,
1445 StubTest::GetEntrypoint(self, kQuickGetCharStatic),
1446 self,
1447 referrer);
1448
1449 EXPECT_EQ(values[i], static_cast<uint16_t>(res)) << "Iteration " << i;
1450 }
1451#else
1452 LOG(INFO) << "Skipping set_char_static as I don't know how to do that on " << kRuntimeISA;
1453 // Force-print to std::cout so it's also outside the logcat.
1454 std::cout << "Skipping set_char_static as I don't know how to do that on " << kRuntimeISA << std::endl;
1455#endif
1456}
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001457static void GetSetShortStatic(Handle<mirror::ArtField>* f, Thread* self,
1458 mirror::ArtMethod* referrer, StubTest* test)
Fred Shih37f05ef2014-07-16 18:38:08 -07001459 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1460#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001461 int16_t values[] = { -0x7FFF, -32768, 0, 255, 32767, 0x7FFE };
Fred Shih37f05ef2014-07-16 18:38:08 -07001462
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001463 for (size_t i = 0; i < arraysize(values); ++i) {
Fred Shih37f05ef2014-07-16 18:38:08 -07001464 test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
1465 static_cast<size_t>(values[i]),
1466 0U,
1467 StubTest::GetEntrypoint(self, kQuickSet16Static),
1468 self,
1469 referrer);
1470
1471 size_t res = test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
1472 0U, 0U,
1473 StubTest::GetEntrypoint(self, kQuickGetShortStatic),
1474 self,
1475 referrer);
1476
1477 EXPECT_EQ(static_cast<int16_t>(res), values[i]) << "Iteration " << i;
1478 }
1479#else
1480 LOG(INFO) << "Skipping set_short_static as I don't know how to do that on " << kRuntimeISA;
1481 // Force-print to std::cout so it's also outside the logcat.
1482 std::cout << "Skipping set_short_static as I don't know how to do that on " << kRuntimeISA << std::endl;
1483#endif
1484}
1485
Fred Shih37f05ef2014-07-16 18:38:08 -07001486static void GetSetCharInstance(Handle<mirror::Object>* obj, Handle<mirror::ArtField>* f,
1487 Thread* self, mirror::ArtMethod* referrer, StubTest* test)
1488 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1489#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001490 uint16_t values[] = { 0, 1, 2, 255, 32768, 0xFFFF };
Fred Shih37f05ef2014-07-16 18:38:08 -07001491
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001492 for (size_t i = 0; i < arraysize(values); ++i) {
Fred Shih37f05ef2014-07-16 18:38:08 -07001493 test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
1494 reinterpret_cast<size_t>(obj->Get()),
1495 static_cast<size_t>(values[i]),
1496 StubTest::GetEntrypoint(self, kQuickSet16Instance),
1497 self,
1498 referrer);
1499
1500 uint16_t res = f->Get()->GetChar(obj->Get());
1501 EXPECT_EQ(res, values[i]) << "Iteration " << i;
1502 f->Get()->SetChar<false>(obj->Get(), ++res);
1503
1504 size_t res2 = test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
1505 reinterpret_cast<size_t>(obj->Get()),
1506 0U,
1507 StubTest::GetEntrypoint(self, kQuickGetCharInstance),
1508 self,
1509 referrer);
1510 EXPECT_EQ(res, static_cast<uint16_t>(res2));
1511 }
1512#else
1513 LOG(INFO) << "Skipping set_char_instance as I don't know how to do that on " << kRuntimeISA;
1514 // Force-print to std::cout so it's also outside the logcat.
1515 std::cout << "Skipping set_char_instance as I don't know how to do that on " << kRuntimeISA << std::endl;
1516#endif
1517}
1518static void GetSetShortInstance(Handle<mirror::Object>* obj, Handle<mirror::ArtField>* f,
1519 Thread* self, mirror::ArtMethod* referrer, StubTest* test)
1520 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1521#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001522 int16_t values[] = { -0x7FFF, -32768, 0, 255, 32767, 0x7FFE };
Fred Shih37f05ef2014-07-16 18:38:08 -07001523
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001524 for (size_t i = 0; i < arraysize(values); ++i) {
Fred Shih37f05ef2014-07-16 18:38:08 -07001525 test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
1526 reinterpret_cast<size_t>(obj->Get()),
1527 static_cast<size_t>(values[i]),
1528 StubTest::GetEntrypoint(self, kQuickSet16Instance),
1529 self,
1530 referrer);
1531
1532 int16_t res = f->Get()->GetShort(obj->Get());
1533 EXPECT_EQ(res, values[i]) << "Iteration " << i;
1534 f->Get()->SetShort<false>(obj->Get(), ++res);
1535
1536 size_t res2 = test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
1537 reinterpret_cast<size_t>(obj->Get()),
1538 0U,
1539 StubTest::GetEntrypoint(self, kQuickGetShortInstance),
1540 self,
1541 referrer);
1542 EXPECT_EQ(res, static_cast<int16_t>(res2));
1543 }
1544#else
1545 LOG(INFO) << "Skipping set_short_instance as I don't know how to do that on " << kRuntimeISA;
1546 // Force-print to std::cout so it's also outside the logcat.
1547 std::cout << "Skipping set_short_instance as I don't know how to do that on " << kRuntimeISA << std::endl;
1548#endif
1549}
1550
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001551static void GetSet32Static(Handle<mirror::ArtField>* f, Thread* self, mirror::ArtMethod* referrer,
1552 StubTest* test)
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001553 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Ian Rogersc3ccc102014-06-25 11:52:14 -07001554#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001555 uint32_t values[] = { 0, 1, 2, 255, 32768, 1000000, 0xFFFFFFFF };
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001556
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001557 for (size_t i = 0; i < arraysize(values); ++i) {
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001558 test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
1559 static_cast<size_t>(values[i]),
1560 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001561 StubTest::GetEntrypoint(self, kQuickSet32Static),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001562 self,
1563 referrer);
1564
1565 size_t res = test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
1566 0U, 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001567 StubTest::GetEntrypoint(self, kQuickGet32Static),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001568 self,
1569 referrer);
1570
1571 EXPECT_EQ(res, values[i]) << "Iteration " << i;
1572 }
1573#else
1574 LOG(INFO) << "Skipping set32static as I don't know how to do that on " << kRuntimeISA;
1575 // Force-print to std::cout so it's also outside the logcat.
1576 std::cout << "Skipping set32static as I don't know how to do that on " << kRuntimeISA << std::endl;
1577#endif
1578}
1579
1580
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001581static void GetSet32Instance(Handle<mirror::Object>* obj, Handle<mirror::ArtField>* f,
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001582 Thread* self, mirror::ArtMethod* referrer, StubTest* test)
1583 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Ian Rogersc3ccc102014-06-25 11:52:14 -07001584#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001585 uint32_t values[] = { 0, 1, 2, 255, 32768, 1000000, 0xFFFFFFFF };
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001586
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001587 for (size_t i = 0; i < arraysize(values); ++i) {
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001588 test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001589 reinterpret_cast<size_t>(obj->Get()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001590 static_cast<size_t>(values[i]),
Andreas Gampe29b38412014-08-13 00:15:43 -07001591 StubTest::GetEntrypoint(self, kQuickSet32Instance),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001592 self,
1593 referrer);
1594
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001595 int32_t res = f->Get()->GetInt(obj->Get());
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001596 EXPECT_EQ(res, static_cast<int32_t>(values[i])) << "Iteration " << i;
1597
1598 res++;
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001599 f->Get()->SetInt<false>(obj->Get(), res);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001600
1601 size_t res2 = test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001602 reinterpret_cast<size_t>(obj->Get()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001603 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001604 StubTest::GetEntrypoint(self, kQuickGet32Instance),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001605 self,
1606 referrer);
1607 EXPECT_EQ(res, static_cast<int32_t>(res2));
1608 }
1609#else
1610 LOG(INFO) << "Skipping set32instance as I don't know how to do that on " << kRuntimeISA;
1611 // Force-print to std::cout so it's also outside the logcat.
1612 std::cout << "Skipping set32instance as I don't know how to do that on " << kRuntimeISA << std::endl;
1613#endif
1614}
1615
1616
Ian Rogersc3ccc102014-06-25 11:52:14 -07001617#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001618
1619static void set_and_check_static(uint32_t f_idx, mirror::Object* val, Thread* self,
1620 mirror::ArtMethod* referrer, StubTest* test)
1621 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1622 test->Invoke3WithReferrer(static_cast<size_t>(f_idx),
1623 reinterpret_cast<size_t>(val),
1624 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001625 StubTest::GetEntrypoint(self, kQuickSetObjStatic),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001626 self,
1627 referrer);
1628
1629 size_t res = test->Invoke3WithReferrer(static_cast<size_t>(f_idx),
1630 0U, 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001631 StubTest::GetEntrypoint(self, kQuickGetObjStatic),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001632 self,
1633 referrer);
1634
1635 EXPECT_EQ(res, reinterpret_cast<size_t>(val)) << "Value " << val;
1636}
1637#endif
1638
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001639static void GetSetObjStatic(Handle<mirror::ArtField>* f, Thread* self, mirror::ArtMethod* referrer,
1640 StubTest* test)
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001641 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Ian Rogersc3ccc102014-06-25 11:52:14 -07001642#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001643 set_and_check_static((*f)->GetDexFieldIndex(), nullptr, self, referrer, test);
1644
1645 // Allocate a string object for simplicity.
1646 mirror::String* str = mirror::String::AllocFromModifiedUtf8(self, "Test");
1647 set_and_check_static((*f)->GetDexFieldIndex(), str, self, referrer, test);
1648
1649 set_and_check_static((*f)->GetDexFieldIndex(), nullptr, self, referrer, test);
1650#else
1651 LOG(INFO) << "Skipping setObjstatic as I don't know how to do that on " << kRuntimeISA;
1652 // Force-print to std::cout so it's also outside the logcat.
1653 std::cout << "Skipping setObjstatic as I don't know how to do that on " << kRuntimeISA << std::endl;
1654#endif
1655}
1656
1657
Ian Rogersc3ccc102014-06-25 11:52:14 -07001658#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001659static void set_and_check_instance(Handle<mirror::ArtField>* f, mirror::Object* trg,
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001660 mirror::Object* val, Thread* self, mirror::ArtMethod* referrer,
1661 StubTest* test)
1662 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1663 test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
1664 reinterpret_cast<size_t>(trg),
1665 reinterpret_cast<size_t>(val),
Andreas Gampe29b38412014-08-13 00:15:43 -07001666 StubTest::GetEntrypoint(self, kQuickSetObjInstance),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001667 self,
1668 referrer);
1669
1670 size_t res = test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
1671 reinterpret_cast<size_t>(trg),
1672 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001673 StubTest::GetEntrypoint(self, kQuickGetObjInstance),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001674 self,
1675 referrer);
1676
1677 EXPECT_EQ(res, reinterpret_cast<size_t>(val)) << "Value " << val;
1678
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001679 EXPECT_EQ(val, f->Get()->GetObj(trg));
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001680}
1681#endif
1682
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001683static void GetSetObjInstance(Handle<mirror::Object>* obj, Handle<mirror::ArtField>* f,
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001684 Thread* self, mirror::ArtMethod* referrer, StubTest* test)
1685 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Ian Rogersc3ccc102014-06-25 11:52:14 -07001686#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001687 set_and_check_instance(f, obj->Get(), nullptr, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001688
1689 // Allocate a string object for simplicity.
1690 mirror::String* str = mirror::String::AllocFromModifiedUtf8(self, "Test");
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001691 set_and_check_instance(f, obj->Get(), str, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001692
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001693 set_and_check_instance(f, obj->Get(), nullptr, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001694#else
1695 LOG(INFO) << "Skipping setObjinstance as I don't know how to do that on " << kRuntimeISA;
1696 // Force-print to std::cout so it's also outside the logcat.
1697 std::cout << "Skipping setObjinstance as I don't know how to do that on " << kRuntimeISA << std::endl;
1698#endif
1699}
1700
1701
1702// TODO: Complete these tests for 32b architectures.
1703
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001704static void GetSet64Static(Handle<mirror::ArtField>* f, Thread* self, mirror::ArtMethod* referrer,
1705 StubTest* test)
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001706 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Ian Rogersc3ccc102014-06-25 11:52:14 -07001707#if (defined(__x86_64__) && !defined(__APPLE__)) || defined(__aarch64__)
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001708 uint64_t values[] = { 0, 1, 2, 255, 32768, 1000000, 0xFFFFFFFF, 0xFFFFFFFFFFFF };
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001709
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001710 for (size_t i = 0; i < arraysize(values); ++i) {
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001711 test->Invoke3UWithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
1712 values[i],
Andreas Gampe29b38412014-08-13 00:15:43 -07001713 StubTest::GetEntrypoint(self, kQuickSet64Static),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001714 self,
1715 referrer);
1716
1717 size_t res = test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
1718 0U, 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001719 StubTest::GetEntrypoint(self, kQuickGet64Static),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001720 self,
1721 referrer);
1722
1723 EXPECT_EQ(res, values[i]) << "Iteration " << i;
1724 }
1725#else
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001726 UNUSED(f, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001727 LOG(INFO) << "Skipping set64static as I don't know how to do that on " << kRuntimeISA;
1728 // Force-print to std::cout so it's also outside the logcat.
1729 std::cout << "Skipping set64static as I don't know how to do that on " << kRuntimeISA << std::endl;
1730#endif
1731}
1732
1733
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001734static void GetSet64Instance(Handle<mirror::Object>* obj, Handle<mirror::ArtField>* f,
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001735 Thread* self, mirror::ArtMethod* referrer, StubTest* test)
1736 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Ian Rogersc3ccc102014-06-25 11:52:14 -07001737#if (defined(__x86_64__) && !defined(__APPLE__)) || defined(__aarch64__)
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001738 uint64_t values[] = { 0, 1, 2, 255, 32768, 1000000, 0xFFFFFFFF, 0xFFFFFFFFFFFF };
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001739
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001740 for (size_t i = 0; i < arraysize(values); ++i) {
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001741 test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001742 reinterpret_cast<size_t>(obj->Get()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001743 static_cast<size_t>(values[i]),
Andreas Gampe29b38412014-08-13 00:15:43 -07001744 StubTest::GetEntrypoint(self, kQuickSet64Instance),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001745 self,
1746 referrer);
1747
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001748 int64_t res = f->Get()->GetLong(obj->Get());
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001749 EXPECT_EQ(res, static_cast<int64_t>(values[i])) << "Iteration " << i;
1750
1751 res++;
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001752 f->Get()->SetLong<false>(obj->Get(), res);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001753
1754 size_t res2 = test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001755 reinterpret_cast<size_t>(obj->Get()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001756 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001757 StubTest::GetEntrypoint(self, kQuickGet64Instance),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001758 self,
1759 referrer);
1760 EXPECT_EQ(res, static_cast<int64_t>(res2));
1761 }
1762#else
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001763 UNUSED(obj, f, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001764 LOG(INFO) << "Skipping set64instance as I don't know how to do that on " << kRuntimeISA;
1765 // Force-print to std::cout so it's also outside the logcat.
1766 std::cout << "Skipping set64instance as I don't know how to do that on " << kRuntimeISA << std::endl;
1767#endif
1768}
1769
1770static void TestFields(Thread* self, StubTest* test, Primitive::Type test_type) {
1771 // garbage is created during ClassLinker::Init
1772
1773 JNIEnv* env = Thread::Current()->GetJniEnv();
1774 jclass jc = env->FindClass("AllFields");
1775 CHECK(jc != NULL);
1776 jobject o = env->AllocObject(jc);
1777 CHECK(o != NULL);
1778
1779 ScopedObjectAccess soa(self);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001780 StackHandleScope<5> hs(self);
1781 Handle<mirror::Object> obj(hs.NewHandle(soa.Decode<mirror::Object*>(o)));
1782 Handle<mirror::Class> c(hs.NewHandle(obj->GetClass()));
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001783 // Need a method as a referrer
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001784 Handle<mirror::ArtMethod> m(hs.NewHandle(c->GetDirectMethod(0)));
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001785
1786 // Play with it...
1787
1788 // Static fields.
1789 {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001790 Handle<mirror::ObjectArray<mirror::ArtField>> fields(hs.NewHandle(c.Get()->GetSFields()));
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001791 int32_t num_fields = fields->GetLength();
1792 for (int32_t i = 0; i < num_fields; ++i) {
Andreas Gampe277ccbd2014-11-03 21:36:10 -08001793 StackHandleScope<1> hs2(self);
1794 Handle<mirror::ArtField> f(hs2.NewHandle(fields->Get(i)));
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001795
Mathieu Chartier61c5ebc2014-06-05 17:42:53 -07001796 Primitive::Type type = f->GetTypeAsPrimitiveType();
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001797 switch (type) {
Fred Shih37f05ef2014-07-16 18:38:08 -07001798 case Primitive::Type::kPrimBoolean:
1799 if (test_type == type) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001800 GetSetBooleanStatic(&f, self, m.Get(), test);
Fred Shih37f05ef2014-07-16 18:38:08 -07001801 }
1802 break;
1803 case Primitive::Type::kPrimByte:
1804 if (test_type == type) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001805 GetSetByteStatic(&f, self, m.Get(), test);
Fred Shih37f05ef2014-07-16 18:38:08 -07001806 }
1807 break;
1808 case Primitive::Type::kPrimChar:
1809 if (test_type == type) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001810 GetSetCharStatic(&f, self, m.Get(), test);
Fred Shih37f05ef2014-07-16 18:38:08 -07001811 }
1812 break;
1813 case Primitive::Type::kPrimShort:
1814 if (test_type == type) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001815 GetSetShortStatic(&f, self, m.Get(), test);
Fred Shih37f05ef2014-07-16 18:38:08 -07001816 }
1817 break;
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001818 case Primitive::Type::kPrimInt:
1819 if (test_type == type) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001820 GetSet32Static(&f, self, m.Get(), test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001821 }
1822 break;
1823
1824 case Primitive::Type::kPrimLong:
1825 if (test_type == type) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001826 GetSet64Static(&f, self, m.Get(), test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001827 }
1828 break;
1829
1830 case Primitive::Type::kPrimNot:
1831 // Don't try array.
Mathieu Chartier61c5ebc2014-06-05 17:42:53 -07001832 if (test_type == type && f->GetTypeDescriptor()[0] != '[') {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001833 GetSetObjStatic(&f, self, m.Get(), test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001834 }
1835 break;
1836
1837 default:
1838 break; // Skip.
1839 }
1840 }
1841 }
1842
1843 // Instance fields.
1844 {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001845 Handle<mirror::ObjectArray<mirror::ArtField>> fields(hs.NewHandle(c.Get()->GetIFields()));
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001846 int32_t num_fields = fields->GetLength();
1847 for (int32_t i = 0; i < num_fields; ++i) {
Andreas Gampe277ccbd2014-11-03 21:36:10 -08001848 StackHandleScope<1> hs2(self);
1849 Handle<mirror::ArtField> f(hs2.NewHandle(fields->Get(i)));
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001850
Mathieu Chartier61c5ebc2014-06-05 17:42:53 -07001851 Primitive::Type type = f->GetTypeAsPrimitiveType();
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001852 switch (type) {
Fred Shih37f05ef2014-07-16 18:38:08 -07001853 case Primitive::Type::kPrimBoolean:
1854 if (test_type == type) {
1855 GetSetBooleanInstance(&obj, &f, self, m.Get(), test);
1856 }
1857 break;
1858 case Primitive::Type::kPrimByte:
1859 if (test_type == type) {
1860 GetSetByteInstance(&obj, &f, self, m.Get(), test);
1861 }
1862 break;
1863 case Primitive::Type::kPrimChar:
1864 if (test_type == type) {
1865 GetSetCharInstance(&obj, &f, self, m.Get(), test);
1866 }
1867 break;
1868 case Primitive::Type::kPrimShort:
1869 if (test_type == type) {
1870 GetSetShortInstance(&obj, &f, self, m.Get(), test);
1871 }
1872 break;
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001873 case Primitive::Type::kPrimInt:
1874 if (test_type == type) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001875 GetSet32Instance(&obj, &f, self, m.Get(), test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001876 }
1877 break;
1878
1879 case Primitive::Type::kPrimLong:
1880 if (test_type == type) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001881 GetSet64Instance(&obj, &f, self, m.Get(), test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001882 }
1883 break;
1884
1885 case Primitive::Type::kPrimNot:
1886 // Don't try array.
Mathieu Chartier61c5ebc2014-06-05 17:42:53 -07001887 if (test_type == type && f->GetTypeDescriptor()[0] != '[') {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001888 GetSetObjInstance(&obj, &f, self, m.Get(), test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001889 }
1890 break;
1891
1892 default:
1893 break; // Skip.
1894 }
1895 }
1896 }
1897
1898 // TODO: Deallocate things.
1899}
1900
Fred Shih37f05ef2014-07-16 18:38:08 -07001901TEST_F(StubTest, Fields8) {
1902 TEST_DISABLED_FOR_HEAP_REFERENCE_POISONING();
1903
1904 Thread* self = Thread::Current();
1905
1906 self->TransitionFromSuspendedToRunnable();
1907 LoadDex("AllFields");
1908 bool started = runtime_->Start();
1909 CHECK(started);
1910
1911 TestFields(self, this, Primitive::Type::kPrimBoolean);
1912 TestFields(self, this, Primitive::Type::kPrimByte);
1913}
1914
1915TEST_F(StubTest, Fields16) {
1916 TEST_DISABLED_FOR_HEAP_REFERENCE_POISONING();
1917
1918 Thread* self = Thread::Current();
1919
1920 self->TransitionFromSuspendedToRunnable();
1921 LoadDex("AllFields");
1922 bool started = runtime_->Start();
1923 CHECK(started);
1924
1925 TestFields(self, this, Primitive::Type::kPrimChar);
1926 TestFields(self, this, Primitive::Type::kPrimShort);
1927}
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001928
1929TEST_F(StubTest, Fields32) {
1930 TEST_DISABLED_FOR_HEAP_REFERENCE_POISONING();
1931
1932 Thread* self = Thread::Current();
1933
1934 self->TransitionFromSuspendedToRunnable();
1935 LoadDex("AllFields");
1936 bool started = runtime_->Start();
1937 CHECK(started);
1938
1939 TestFields(self, this, Primitive::Type::kPrimInt);
1940}
1941
1942TEST_F(StubTest, FieldsObj) {
1943 TEST_DISABLED_FOR_HEAP_REFERENCE_POISONING();
1944
1945 Thread* self = Thread::Current();
1946
1947 self->TransitionFromSuspendedToRunnable();
1948 LoadDex("AllFields");
1949 bool started = runtime_->Start();
1950 CHECK(started);
1951
1952 TestFields(self, this, Primitive::Type::kPrimNot);
1953}
1954
1955TEST_F(StubTest, Fields64) {
1956 TEST_DISABLED_FOR_HEAP_REFERENCE_POISONING();
1957
1958 Thread* self = Thread::Current();
1959
1960 self->TransitionFromSuspendedToRunnable();
1961 LoadDex("AllFields");
1962 bool started = runtime_->Start();
1963 CHECK(started);
1964
1965 TestFields(self, this, Primitive::Type::kPrimLong);
1966}
1967
Andreas Gampe51f76352014-05-21 08:28:48 -07001968TEST_F(StubTest, IMT) {
Ian Rogersc3ccc102014-06-25 11:52:14 -07001969#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe51f76352014-05-21 08:28:48 -07001970 TEST_DISABLED_FOR_HEAP_REFERENCE_POISONING();
1971
1972 Thread* self = Thread::Current();
1973
1974 ScopedObjectAccess soa(self);
1975 StackHandleScope<7> hs(self);
1976
1977 JNIEnv* env = Thread::Current()->GetJniEnv();
1978
1979 // ArrayList
1980
1981 // Load ArrayList and used methods (JNI).
1982 jclass arraylist_jclass = env->FindClass("java/util/ArrayList");
1983 ASSERT_NE(nullptr, arraylist_jclass);
1984 jmethodID arraylist_constructor = env->GetMethodID(arraylist_jclass, "<init>", "()V");
1985 ASSERT_NE(nullptr, arraylist_constructor);
1986 jmethodID contains_jmethod = env->GetMethodID(arraylist_jclass, "contains", "(Ljava/lang/Object;)Z");
1987 ASSERT_NE(nullptr, contains_jmethod);
1988 jmethodID add_jmethod = env->GetMethodID(arraylist_jclass, "add", "(Ljava/lang/Object;)Z");
1989 ASSERT_NE(nullptr, add_jmethod);
1990
1991 // Get mirror representation.
1992 Handle<mirror::ArtMethod> contains_amethod(hs.NewHandle(soa.DecodeMethod(contains_jmethod)));
1993
1994 // Patch up ArrayList.contains.
1995 if (contains_amethod.Get()->GetEntryPointFromQuickCompiledCode() == nullptr) {
1996 contains_amethod.Get()->SetEntryPointFromQuickCompiledCode(reinterpret_cast<void*>(
Andreas Gampe29b38412014-08-13 00:15:43 -07001997 StubTest::GetEntrypoint(self, kQuickQuickToInterpreterBridge)));
Andreas Gampe51f76352014-05-21 08:28:48 -07001998 }
1999
2000 // List
2001
2002 // Load List and used methods (JNI).
2003 jclass list_jclass = env->FindClass("java/util/List");
2004 ASSERT_NE(nullptr, list_jclass);
2005 jmethodID inf_contains_jmethod = env->GetMethodID(list_jclass, "contains", "(Ljava/lang/Object;)Z");
2006 ASSERT_NE(nullptr, inf_contains_jmethod);
2007
2008 // Get mirror representation.
2009 Handle<mirror::ArtMethod> inf_contains(hs.NewHandle(soa.DecodeMethod(inf_contains_jmethod)));
2010
2011 // Object
2012
2013 jclass obj_jclass = env->FindClass("java/lang/Object");
2014 ASSERT_NE(nullptr, obj_jclass);
2015 jmethodID obj_constructor = env->GetMethodID(obj_jclass, "<init>", "()V");
2016 ASSERT_NE(nullptr, obj_constructor);
2017
Andreas Gampe51f76352014-05-21 08:28:48 -07002018 // Create instances.
2019
2020 jobject jarray_list = env->NewObject(arraylist_jclass, arraylist_constructor);
2021 ASSERT_NE(nullptr, jarray_list);
2022 Handle<mirror::Object> array_list(hs.NewHandle(soa.Decode<mirror::Object*>(jarray_list)));
2023
2024 jobject jobj = env->NewObject(obj_jclass, obj_constructor);
2025 ASSERT_NE(nullptr, jobj);
2026 Handle<mirror::Object> obj(hs.NewHandle(soa.Decode<mirror::Object*>(jobj)));
2027
Andreas Gampe1a7e2922014-05-21 15:37:53 -07002028 // Invocation tests.
2029
2030 // 1. imt_conflict
2031
2032 // Contains.
Andreas Gampe51f76352014-05-21 08:28:48 -07002033
2034 size_t result =
2035 Invoke3WithReferrerAndHidden(0U, reinterpret_cast<size_t>(array_list.Get()),
2036 reinterpret_cast<size_t>(obj.Get()),
Andreas Gampe29b38412014-08-13 00:15:43 -07002037 StubTest::GetEntrypoint(self, kQuickQuickImtConflictTrampoline),
Andreas Gampe51f76352014-05-21 08:28:48 -07002038 self, contains_amethod.Get(),
2039 static_cast<size_t>(inf_contains.Get()->GetDexMethodIndex()));
2040
2041 ASSERT_FALSE(self->IsExceptionPending());
2042 EXPECT_EQ(static_cast<size_t>(JNI_FALSE), result);
2043
2044 // Add object.
2045
2046 env->CallBooleanMethod(jarray_list, add_jmethod, jobj);
2047
2048 ASSERT_FALSE(self->IsExceptionPending()) << PrettyTypeOf(self->GetException(nullptr));
2049
Andreas Gampe1a7e2922014-05-21 15:37:53 -07002050 // Contains.
Andreas Gampe51f76352014-05-21 08:28:48 -07002051
2052 result = Invoke3WithReferrerAndHidden(0U, reinterpret_cast<size_t>(array_list.Get()),
2053 reinterpret_cast<size_t>(obj.Get()),
Andreas Gampe29b38412014-08-13 00:15:43 -07002054 StubTest::GetEntrypoint(self, kQuickQuickImtConflictTrampoline),
Andreas Gampe51f76352014-05-21 08:28:48 -07002055 self, contains_amethod.Get(),
2056 static_cast<size_t>(inf_contains.Get()->GetDexMethodIndex()));
2057
2058 ASSERT_FALSE(self->IsExceptionPending());
2059 EXPECT_EQ(static_cast<size_t>(JNI_TRUE), result);
Andreas Gampe1a7e2922014-05-21 15:37:53 -07002060
2061 // 2. regular interface trampoline
2062
2063 result = Invoke3WithReferrer(static_cast<size_t>(inf_contains.Get()->GetDexMethodIndex()),
2064 reinterpret_cast<size_t>(array_list.Get()),
2065 reinterpret_cast<size_t>(obj.Get()),
2066 StubTest::GetEntrypoint(self,
2067 kQuickInvokeInterfaceTrampolineWithAccessCheck),
2068 self, contains_amethod.Get());
2069
2070 ASSERT_FALSE(self->IsExceptionPending());
2071 EXPECT_EQ(static_cast<size_t>(JNI_TRUE), result);
2072
2073 result = Invoke3WithReferrer(static_cast<size_t>(inf_contains.Get()->GetDexMethodIndex()),
2074 reinterpret_cast<size_t>(array_list.Get()),
2075 reinterpret_cast<size_t>(array_list.Get()),
2076 StubTest::GetEntrypoint(self,
2077 kQuickInvokeInterfaceTrampolineWithAccessCheck),
2078 self, contains_amethod.Get());
2079
2080 ASSERT_FALSE(self->IsExceptionPending());
2081 EXPECT_EQ(static_cast<size_t>(JNI_FALSE), result);
Andreas Gampe51f76352014-05-21 08:28:48 -07002082#else
Andreas Gampe6aac3552014-06-09 14:55:53 -07002083 LOG(INFO) << "Skipping imt as I don't know how to do that on " << kRuntimeISA;
Andreas Gampe51f76352014-05-21 08:28:48 -07002084 // Force-print to std::cout so it's also outside the logcat.
Andreas Gampe6aac3552014-06-09 14:55:53 -07002085 std::cout << "Skipping imt as I don't know how to do that on " << kRuntimeISA << std::endl;
2086#endif
2087}
2088
Andreas Gampe6aac3552014-06-09 14:55:53 -07002089TEST_F(StubTest, StringIndexOf) {
2090#if defined(__arm__) || defined(__aarch64__)
Hiroshi Yamauchi52fa8142014-06-16 12:59:49 -07002091 TEST_DISABLED_FOR_HEAP_REFERENCE_POISONING();
2092
Andreas Gampe6aac3552014-06-09 14:55:53 -07002093 Thread* self = Thread::Current();
2094 ScopedObjectAccess soa(self);
2095 // garbage is created during ClassLinker::Init
2096
2097 // Create some strings
2098 // Use array so we can index into it and use a matrix for expected results
2099 // Setup: The first half is standard. The second half uses a non-zero offset.
2100 // TODO: Shared backing arrays.
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07002101 const char* c_str[] = { "", "a", "ba", "cba", "dcba", "edcba", "asdfghjkl" };
2102 static constexpr size_t kStringCount = arraysize(c_str);
2103 const char c_char[] = { 'a', 'b', 'c', 'd', 'e' };
2104 static constexpr size_t kCharCount = arraysize(c_char);
Andreas Gampe6aac3552014-06-09 14:55:53 -07002105
2106 StackHandleScope<kStringCount> hs(self);
2107 Handle<mirror::String> s[kStringCount];
2108
2109 for (size_t i = 0; i < kStringCount; ++i) {
2110 s[i] = hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), c_str[i]));
2111 }
2112
2113 // Matrix of expectations. First component is first parameter. Note we only check against the
2114 // sign, not the value. As we are testing random offsets, we need to compute this and need to
2115 // rely on String::CompareTo being correct.
2116 static constexpr size_t kMaxLen = 9;
2117 DCHECK_LE(strlen(c_str[kStringCount-1]), kMaxLen) << "Please fix the indexof test.";
2118
2119 // Last dimension: start, offset by 1.
2120 int32_t expected[kStringCount][kCharCount][kMaxLen + 3];
2121 for (size_t x = 0; x < kStringCount; ++x) {
2122 for (size_t y = 0; y < kCharCount; ++y) {
2123 for (size_t z = 0; z <= kMaxLen + 2; ++z) {
2124 expected[x][y][z] = s[x]->FastIndexOf(c_char[y], static_cast<int32_t>(z) - 1);
2125 }
2126 }
2127 }
2128
2129 // Play with it...
2130
2131 for (size_t x = 0; x < kStringCount; ++x) {
2132 for (size_t y = 0; y < kCharCount; ++y) {
2133 for (size_t z = 0; z <= kMaxLen + 2; ++z) {
2134 int32_t start = static_cast<int32_t>(z) - 1;
2135
2136 // Test string_compareto x y
2137 size_t result = Invoke3(reinterpret_cast<size_t>(s[x].Get()), c_char[y], start,
Andreas Gampe29b38412014-08-13 00:15:43 -07002138 StubTest::GetEntrypoint(self, kQuickIndexOf), self);
Andreas Gampe6aac3552014-06-09 14:55:53 -07002139
2140 EXPECT_FALSE(self->IsExceptionPending());
2141
2142 // The result is a 32b signed integer
2143 union {
2144 size_t r;
2145 int32_t i;
2146 } conv;
2147 conv.r = result;
2148
2149 EXPECT_EQ(expected[x][y][z], conv.i) << "Wrong result for " << c_str[x] << " / " <<
2150 c_char[y] << " @ " << start;
2151 }
2152 }
2153 }
2154
2155 // TODO: Deallocate things.
2156
2157 // Tests done.
2158#else
2159 LOG(INFO) << "Skipping indexof as I don't know how to do that on " << kRuntimeISA;
2160 // Force-print to std::cout so it's also outside the logcat.
2161 std::cout << "Skipping indexof as I don't know how to do that on " << kRuntimeISA << std::endl;
Andreas Gampe51f76352014-05-21 08:28:48 -07002162#endif
2163}
2164
Andreas Gampe525cde22014-04-22 15:44:50 -07002165} // namespace art