blob: 0769687716bf1111addc0fc8cfe39a6089dbcf08 [file] [log] [blame]
Andreas Gampe525cde22014-04-22 15:44:50 -07001/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
Ian Rogerse63db272014-07-15 15:36:11 -070017#include <cstdio>
18
Andreas Gampe525cde22014-04-22 15:44:50 -070019#include "common_runtime_test.h"
Andreas Gampe29b38412014-08-13 00:15:43 -070020#include "entrypoints/quick/quick_entrypoints_enum.h"
Andreas Gampe6e4e59c2014-05-05 20:11:02 -070021#include "mirror/art_field-inl.h"
Andreas Gampe51f76352014-05-21 08:28:48 -070022#include "mirror/art_method-inl.h"
23#include "mirror/class-inl.h"
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -070024#include "mirror/string-inl.h"
Ian Rogerse63db272014-07-15 15:36:11 -070025#include "scoped_thread_state_change.h"
Andreas Gampe525cde22014-04-22 15:44:50 -070026
27namespace art {
28
29
30class StubTest : public CommonRuntimeTest {
31 protected:
32 // We need callee-save methods set up in the Runtime for exceptions.
33 void SetUp() OVERRIDE {
34 // Do the normal setup.
35 CommonRuntimeTest::SetUp();
36
37 {
38 // Create callee-save methods
39 ScopedObjectAccess soa(Thread::Current());
Vladimir Marko7624d252014-05-02 14:40:15 +010040 runtime_->SetInstructionSet(kRuntimeISA);
Andreas Gampe525cde22014-04-22 15:44:50 -070041 for (int i = 0; i < Runtime::kLastCalleeSaveType; i++) {
42 Runtime::CalleeSaveType type = Runtime::CalleeSaveType(i);
43 if (!runtime_->HasCalleeSaveMethod(type)) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -070044 runtime_->SetCalleeSaveMethod(runtime_->CreateCalleeSaveMethod(), type);
Andreas Gampe525cde22014-04-22 15:44:50 -070045 }
46 }
47 }
48 }
49
Ian Rogerse63db272014-07-15 15:36:11 -070050 void SetUpRuntimeOptions(RuntimeOptions *options) OVERRIDE {
Andreas Gampe00c1e6d2014-04-25 15:47:13 -070051 // Use a smaller heap
52 for (std::pair<std::string, const void*>& pair : *options) {
53 if (pair.first.find("-Xmx") == 0) {
54 pair.first = "-Xmx4M"; // Smallest we can go.
55 }
56 }
Andreas Gampe51f76352014-05-21 08:28:48 -070057 options->push_back(std::make_pair("-Xint", nullptr));
Andreas Gampe00c1e6d2014-04-25 15:47:13 -070058 }
Andreas Gampe525cde22014-04-22 15:44:50 -070059
Mathieu Chartier119c6bd2014-05-09 14:11:47 -070060 // Helper function needed since TEST_F makes a new class.
61 Thread::tls_ptr_sized_values* GetTlsPtr(Thread* self) {
62 return &self->tlsPtr_;
63 }
64
Andreas Gampe4fc046e2014-05-06 16:56:39 -070065 public:
Andreas Gampe525cde22014-04-22 15:44:50 -070066 size_t Invoke3(size_t arg0, size_t arg1, size_t arg2, uintptr_t code, Thread* self) {
Andreas Gampe6cf80102014-05-19 11:32:41 -070067 return Invoke3WithReferrer(arg0, arg1, arg2, code, self, nullptr);
Andreas Gampe525cde22014-04-22 15:44:50 -070068 }
Andreas Gampe6e4e59c2014-05-05 20:11:02 -070069
Andreas Gampe6e4e59c2014-05-05 20:11:02 -070070 // TODO: Set up a frame according to referrer's specs.
71 size_t Invoke3WithReferrer(size_t arg0, size_t arg1, size_t arg2, uintptr_t code, Thread* self,
72 mirror::ArtMethod* referrer) {
73 // Push a transition back into managed code onto the linked list in thread.
74 ManagedStack fragment;
75 self->PushManagedStackFragment(&fragment);
76
77 size_t result;
Andreas Gampe6cf80102014-05-19 11:32:41 -070078 size_t fpr_result = 0;
Andreas Gampe6e4e59c2014-05-05 20:11:02 -070079#if defined(__i386__)
80 // TODO: Set the thread?
81 __asm__ __volatile__(
Ian Rogersc5f17732014-06-05 20:48:42 -070082 "subl $12, %%esp\n\t" // Align stack.
83 "pushl %[referrer]\n\t" // Store referrer.
Andreas Gampe6e4e59c2014-05-05 20:11:02 -070084 "call *%%edi\n\t" // Call the stub
Ian Rogersc5f17732014-06-05 20:48:42 -070085 "addl $16, %%esp" // Pop referrer
Andreas Gampe6e4e59c2014-05-05 20:11:02 -070086 : "=a" (result)
87 // Use the result from eax
Andreas Gampe2f6e3512014-06-07 01:32:33 -070088 : "a"(arg0), "c"(arg1), "d"(arg2), "D"(code), [referrer]"r"(referrer)
89 // This places code into edi, arg0 into eax, arg1 into ecx, and arg2 into edx
90 : "memory"); // clobber.
Andreas Gampe6e4e59c2014-05-05 20:11:02 -070091 // TODO: Should we clobber the other registers? EBX gets clobbered by some of the stubs,
92 // but compilation fails when declaring that.
93#elif defined(__arm__)
94 __asm__ __volatile__(
95 "push {r1-r12, lr}\n\t" // Save state, 13*4B = 52B
96 ".cfi_adjust_cfa_offset 52\n\t"
97 "push {r9}\n\t"
98 ".cfi_adjust_cfa_offset 4\n\t"
99 "mov r9, %[referrer]\n\n"
100 "str r9, [sp, #-8]!\n\t" // Push referrer, +8B padding so 16B aligned
101 ".cfi_adjust_cfa_offset 8\n\t"
102 "ldr r9, [sp, #8]\n\t"
103
104 // Push everything on the stack, so we don't rely on the order. What a mess. :-(
105 "sub sp, sp, #20\n\t"
106 "str %[arg0], [sp]\n\t"
107 "str %[arg1], [sp, #4]\n\t"
108 "str %[arg2], [sp, #8]\n\t"
109 "str %[code], [sp, #12]\n\t"
110 "str %[self], [sp, #16]\n\t"
111 "ldr r0, [sp]\n\t"
112 "ldr r1, [sp, #4]\n\t"
113 "ldr r2, [sp, #8]\n\t"
114 "ldr r3, [sp, #12]\n\t"
115 "ldr r9, [sp, #16]\n\t"
116 "add sp, sp, #20\n\t"
117
118 "blx r3\n\t" // Call the stub
119 "add sp, sp, #12\n\t" // Pop nullptr and padding
120 ".cfi_adjust_cfa_offset -12\n\t"
121 "pop {r1-r12, lr}\n\t" // Restore state
122 ".cfi_adjust_cfa_offset -52\n\t"
123 "mov %[result], r0\n\t" // Save the result
124 : [result] "=r" (result)
125 // Use the result from r0
126 : [arg0] "r"(arg0), [arg1] "r"(arg1), [arg2] "r"(arg2), [code] "r"(code), [self] "r"(self),
127 [referrer] "r"(referrer)
Andreas Gampe2f6e3512014-06-07 01:32:33 -0700128 : "memory"); // clobber.
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700129#elif defined(__aarch64__)
130 __asm__ __volatile__(
Andreas Gampef39b3782014-06-03 14:38:30 -0700131 // Spill x0-x7 which we say we don't clobber. May contain args.
Andreas Gampe6cf80102014-05-19 11:32:41 -0700132 "sub sp, sp, #64\n\t"
133 ".cfi_adjust_cfa_offset 64\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700134 "stp x0, x1, [sp]\n\t"
135 "stp x2, x3, [sp, #16]\n\t"
136 "stp x4, x5, [sp, #32]\n\t"
137 "stp x6, x7, [sp, #48]\n\t"
Andreas Gampe6cf80102014-05-19 11:32:41 -0700138
Andreas Gampef39b3782014-06-03 14:38:30 -0700139 "sub sp, sp, #16\n\t" // Reserve stack space, 16B aligned
140 ".cfi_adjust_cfa_offset 16\n\t"
141 "str %[referrer], [sp]\n\t" // referrer
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700142
143 // Push everything on the stack, so we don't rely on the order. What a mess. :-(
144 "sub sp, sp, #48\n\t"
Andreas Gampe6cf80102014-05-19 11:32:41 -0700145 ".cfi_adjust_cfa_offset 48\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700146 // All things are "r" constraints, so direct str/stp should work.
147 "stp %[arg0], %[arg1], [sp]\n\t"
148 "stp %[arg2], %[code], [sp, #16]\n\t"
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700149 "str %[self], [sp, #32]\n\t"
Andreas Gampe6cf80102014-05-19 11:32:41 -0700150
151 // Now we definitely have x0-x3 free, use it to garble d8 - d15
152 "movk x0, #0xfad0\n\t"
153 "movk x0, #0xebad, lsl #16\n\t"
154 "movk x0, #0xfad0, lsl #32\n\t"
155 "movk x0, #0xebad, lsl #48\n\t"
156 "fmov d8, x0\n\t"
157 "add x0, x0, 1\n\t"
158 "fmov d9, x0\n\t"
159 "add x0, x0, 1\n\t"
160 "fmov d10, x0\n\t"
161 "add x0, x0, 1\n\t"
162 "fmov d11, x0\n\t"
163 "add x0, x0, 1\n\t"
164 "fmov d12, x0\n\t"
165 "add x0, x0, 1\n\t"
166 "fmov d13, x0\n\t"
167 "add x0, x0, 1\n\t"
168 "fmov d14, x0\n\t"
169 "add x0, x0, 1\n\t"
170 "fmov d15, x0\n\t"
171
Andreas Gampef39b3782014-06-03 14:38:30 -0700172 // Load call params into the right registers.
173 "ldp x0, x1, [sp]\n\t"
174 "ldp x2, x3, [sp, #16]\n\t"
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700175 "ldr x18, [sp, #32]\n\t"
176 "add sp, sp, #48\n\t"
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700177 ".cfi_adjust_cfa_offset -48\n\t"
178
Andreas Gampe6cf80102014-05-19 11:32:41 -0700179
180 "blr x3\n\t" // Call the stub
Andreas Gampef39b3782014-06-03 14:38:30 -0700181 "mov x8, x0\n\t" // Store result
182 "add sp, sp, #16\n\t" // Drop the quick "frame"
183 ".cfi_adjust_cfa_offset -16\n\t"
Andreas Gampe6cf80102014-05-19 11:32:41 -0700184
185 // Test d8 - d15. We can use x1 and x2.
186 "movk x1, #0xfad0\n\t"
187 "movk x1, #0xebad, lsl #16\n\t"
188 "movk x1, #0xfad0, lsl #32\n\t"
189 "movk x1, #0xebad, lsl #48\n\t"
190 "fmov x2, d8\n\t"
191 "cmp x1, x2\n\t"
192 "b.ne 1f\n\t"
193 "add x1, x1, 1\n\t"
194
195 "fmov x2, d9\n\t"
196 "cmp x1, x2\n\t"
197 "b.ne 1f\n\t"
198 "add x1, x1, 1\n\t"
199
200 "fmov x2, d10\n\t"
201 "cmp x1, x2\n\t"
202 "b.ne 1f\n\t"
203 "add x1, x1, 1\n\t"
204
205 "fmov x2, d11\n\t"
206 "cmp x1, x2\n\t"
207 "b.ne 1f\n\t"
208 "add x1, x1, 1\n\t"
209
210 "fmov x2, d12\n\t"
211 "cmp x1, x2\n\t"
212 "b.ne 1f\n\t"
213 "add x1, x1, 1\n\t"
214
215 "fmov x2, d13\n\t"
216 "cmp x1, x2\n\t"
217 "b.ne 1f\n\t"
218 "add x1, x1, 1\n\t"
219
220 "fmov x2, d14\n\t"
221 "cmp x1, x2\n\t"
222 "b.ne 1f\n\t"
223 "add x1, x1, 1\n\t"
224
225 "fmov x2, d15\n\t"
226 "cmp x1, x2\n\t"
227 "b.ne 1f\n\t"
228
Andreas Gampef39b3782014-06-03 14:38:30 -0700229 "mov x9, #0\n\t" // Use x9 as flag, in clobber list
Andreas Gampe6cf80102014-05-19 11:32:41 -0700230
231 // Finish up.
232 "2:\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700233 "ldp x0, x1, [sp]\n\t" // Restore stuff not named clobbered, may contain fpr_result
234 "ldp x2, x3, [sp, #16]\n\t"
235 "ldp x4, x5, [sp, #32]\n\t"
236 "ldp x6, x7, [sp, #48]\n\t"
237 "add sp, sp, #64\n\t" // Free stack space, now sp as on entry
Andreas Gampe6cf80102014-05-19 11:32:41 -0700238 ".cfi_adjust_cfa_offset -64\n\t"
239
Andreas Gampef39b3782014-06-03 14:38:30 -0700240 "str x9, %[fpr_result]\n\t" // Store the FPR comparison result
241 "mov %[result], x8\n\t" // Store the call result
242
Andreas Gampe6cf80102014-05-19 11:32:41 -0700243 "b 3f\n\t" // Goto end
244
245 // Failed fpr verification.
246 "1:\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700247 "mov x9, #1\n\t"
Andreas Gampe6cf80102014-05-19 11:32:41 -0700248 "b 2b\n\t" // Goto finish-up
249
250 // End
251 "3:\n\t"
Andreas Gampecf4035a2014-05-28 22:43:01 -0700252 : [result] "=r" (result)
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700253 // Use the result from r0
254 : [arg0] "0"(arg0), [arg1] "r"(arg1), [arg2] "r"(arg2), [code] "r"(code), [self] "r"(self),
Andreas Gampecf4035a2014-05-28 22:43:01 -0700255 [referrer] "r"(referrer), [fpr_result] "m" (fpr_result)
Andreas Gampef39b3782014-06-03 14:38:30 -0700256 : "x8", "x9", "x10", "x11", "x12", "x13", "x14", "x15", "x16", "x17", "x18", "x19", "x20",
257 "x21", "x22", "x23", "x24", "x25", "x26", "x27", "x28", "x30",
258 "d0", "d1", "d2", "d3", "d4", "d5", "d6", "d7",
259 "d8", "d9", "d10", "d11", "d12", "d13", "d14", "d15",
260 "d16", "d17", "d18", "d19", "d20", "d21", "d22", "d23",
Andreas Gampe2f6e3512014-06-07 01:32:33 -0700261 "d24", "d25", "d26", "d27", "d28", "d29", "d30", "d31",
262 "memory"); // clobber.
Ian Rogers6f3dbba2014-10-14 17:41:57 -0700263#elif defined(__x86_64__) && !defined(__APPLE__) && defined(__clang__)
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700264 // Note: Uses the native convention
265 // TODO: Set the thread?
266 __asm__ __volatile__(
267 "pushq %[referrer]\n\t" // Push referrer
268 "pushq (%%rsp)\n\t" // & 16B alignment padding
269 ".cfi_adjust_cfa_offset 16\n\t"
270 "call *%%rax\n\t" // Call the stub
271 "addq $16, %%rsp\n\t" // Pop nullptr and padding
272 ".cfi_adjust_cfa_offset -16\n\t"
273 : "=a" (result)
274 // Use the result from rax
Andreas Gampe5c3d3a92015-01-21 12:23:50 -0800275 : "D"(arg0), "S"(arg1), "d"(arg2), "a"(code), [referrer] "c"(referrer)
Andreas Gampe2f6e3512014-06-07 01:32:33 -0700276 // This places arg0 into rdi, arg1 into rsi, arg2 into rdx, and code into rax
Andreas Gampe5c3d3a92015-01-21 12:23:50 -0800277 : "rbx", "rbp", "r8", "r9", "r10", "r11", "r12", "r13", "r14", "r15",
Andreas Gampe2f6e3512014-06-07 01:32:33 -0700278 "memory"); // clobber all
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700279 // TODO: Should we clobber the other registers?
280#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -0800281 UNUSED(arg0, arg1, arg2, code, referrer);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700282 LOG(WARNING) << "Was asked to invoke for an architecture I do not understand.";
283 result = 0;
284#endif
285 // Pop transition.
286 self->PopManagedStackFragment(fragment);
Andreas Gampe6cf80102014-05-19 11:32:41 -0700287
288 fp_result = fpr_result;
289 EXPECT_EQ(0U, fp_result);
290
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700291 return result;
292 }
293
Andreas Gampe51f76352014-05-21 08:28:48 -0700294 // TODO: Set up a frame according to referrer's specs.
295 size_t Invoke3WithReferrerAndHidden(size_t arg0, size_t arg1, size_t arg2, uintptr_t code,
296 Thread* self, mirror::ArtMethod* referrer, size_t hidden) {
297 // Push a transition back into managed code onto the linked list in thread.
298 ManagedStack fragment;
299 self->PushManagedStackFragment(&fragment);
300
301 size_t result;
302 size_t fpr_result = 0;
303#if defined(__i386__)
304 // TODO: Set the thread?
305 __asm__ __volatile__(
Mark P Mendell966c3ae2015-01-27 15:45:27 +0000306 "movd %[hidden], %%xmm7\n\t"
Ian Rogersc5f17732014-06-05 20:48:42 -0700307 "subl $12, %%esp\n\t" // Align stack.
Andreas Gampe51f76352014-05-21 08:28:48 -0700308 "pushl %[referrer]\n\t" // Store referrer
309 "call *%%edi\n\t" // Call the stub
Ian Rogersc5f17732014-06-05 20:48:42 -0700310 "addl $16, %%esp" // Pop referrer
Andreas Gampe51f76352014-05-21 08:28:48 -0700311 : "=a" (result)
312 // Use the result from eax
Andreas Gampe1a7e2922014-05-21 15:37:53 -0700313 : "a"(arg0), "c"(arg1), "d"(arg2), "D"(code), [referrer]"r"(referrer), [hidden]"m"(hidden)
Andreas Gampe2f6e3512014-06-07 01:32:33 -0700314 // This places code into edi, arg0 into eax, arg1 into ecx, and arg2 into edx
315 : "memory"); // clobber.
Andreas Gampe51f76352014-05-21 08:28:48 -0700316 // TODO: Should we clobber the other registers? EBX gets clobbered by some of the stubs,
317 // but compilation fails when declaring that.
318#elif defined(__arm__)
319 __asm__ __volatile__(
320 "push {r1-r12, lr}\n\t" // Save state, 13*4B = 52B
321 ".cfi_adjust_cfa_offset 52\n\t"
322 "push {r9}\n\t"
323 ".cfi_adjust_cfa_offset 4\n\t"
324 "mov r9, %[referrer]\n\n"
325 "str r9, [sp, #-8]!\n\t" // Push referrer, +8B padding so 16B aligned
326 ".cfi_adjust_cfa_offset 8\n\t"
327 "ldr r9, [sp, #8]\n\t"
328
329 // Push everything on the stack, so we don't rely on the order. What a mess. :-(
330 "sub sp, sp, #24\n\t"
331 "str %[arg0], [sp]\n\t"
332 "str %[arg1], [sp, #4]\n\t"
333 "str %[arg2], [sp, #8]\n\t"
334 "str %[code], [sp, #12]\n\t"
335 "str %[self], [sp, #16]\n\t"
336 "str %[hidden], [sp, #20]\n\t"
337 "ldr r0, [sp]\n\t"
338 "ldr r1, [sp, #4]\n\t"
339 "ldr r2, [sp, #8]\n\t"
340 "ldr r3, [sp, #12]\n\t"
341 "ldr r9, [sp, #16]\n\t"
342 "ldr r12, [sp, #20]\n\t"
343 "add sp, sp, #24\n\t"
344
345 "blx r3\n\t" // Call the stub
346 "add sp, sp, #12\n\t" // Pop nullptr and padding
347 ".cfi_adjust_cfa_offset -12\n\t"
348 "pop {r1-r12, lr}\n\t" // Restore state
349 ".cfi_adjust_cfa_offset -52\n\t"
350 "mov %[result], r0\n\t" // Save the result
351 : [result] "=r" (result)
352 // Use the result from r0
Andreas Gampe2f6e3512014-06-07 01:32:33 -0700353 : [arg0] "r"(arg0), [arg1] "r"(arg1), [arg2] "r"(arg2), [code] "r"(code), [self] "r"(self),
354 [referrer] "r"(referrer), [hidden] "r"(hidden)
355 : "memory"); // clobber.
Andreas Gampe51f76352014-05-21 08:28:48 -0700356#elif defined(__aarch64__)
357 __asm__ __volatile__(
Andreas Gampef39b3782014-06-03 14:38:30 -0700358 // Spill x0-x7 which we say we don't clobber. May contain args.
Andreas Gampe51f76352014-05-21 08:28:48 -0700359 "sub sp, sp, #64\n\t"
360 ".cfi_adjust_cfa_offset 64\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700361 "stp x0, x1, [sp]\n\t"
362 "stp x2, x3, [sp, #16]\n\t"
363 "stp x4, x5, [sp, #32]\n\t"
364 "stp x6, x7, [sp, #48]\n\t"
Andreas Gampe51f76352014-05-21 08:28:48 -0700365
Andreas Gampef39b3782014-06-03 14:38:30 -0700366 "sub sp, sp, #16\n\t" // Reserve stack space, 16B aligned
367 ".cfi_adjust_cfa_offset 16\n\t"
368 "str %[referrer], [sp]\n\t" // referrer
Andreas Gampe51f76352014-05-21 08:28:48 -0700369
370 // Push everything on the stack, so we don't rely on the order. What a mess. :-(
371 "sub sp, sp, #48\n\t"
372 ".cfi_adjust_cfa_offset 48\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700373 // All things are "r" constraints, so direct str/stp should work.
374 "stp %[arg0], %[arg1], [sp]\n\t"
375 "stp %[arg2], %[code], [sp, #16]\n\t"
376 "stp %[self], %[hidden], [sp, #32]\n\t"
Andreas Gampe51f76352014-05-21 08:28:48 -0700377
378 // Now we definitely have x0-x3 free, use it to garble d8 - d15
379 "movk x0, #0xfad0\n\t"
380 "movk x0, #0xebad, lsl #16\n\t"
381 "movk x0, #0xfad0, lsl #32\n\t"
382 "movk x0, #0xebad, lsl #48\n\t"
383 "fmov d8, x0\n\t"
384 "add x0, x0, 1\n\t"
385 "fmov d9, x0\n\t"
386 "add x0, x0, 1\n\t"
387 "fmov d10, x0\n\t"
388 "add x0, x0, 1\n\t"
389 "fmov d11, x0\n\t"
390 "add x0, x0, 1\n\t"
391 "fmov d12, x0\n\t"
392 "add x0, x0, 1\n\t"
393 "fmov d13, x0\n\t"
394 "add x0, x0, 1\n\t"
395 "fmov d14, x0\n\t"
396 "add x0, x0, 1\n\t"
397 "fmov d15, x0\n\t"
398
Andreas Gampef39b3782014-06-03 14:38:30 -0700399 // Load call params into the right registers.
400 "ldp x0, x1, [sp]\n\t"
401 "ldp x2, x3, [sp, #16]\n\t"
Andreas Gampe1a7e2922014-05-21 15:37:53 -0700402 "ldp x18, x17, [sp, #32]\n\t"
Andreas Gampe51f76352014-05-21 08:28:48 -0700403 "add sp, sp, #48\n\t"
404 ".cfi_adjust_cfa_offset -48\n\t"
405
Andreas Gampe51f76352014-05-21 08:28:48 -0700406 "blr x3\n\t" // Call the stub
Andreas Gampef39b3782014-06-03 14:38:30 -0700407 "mov x8, x0\n\t" // Store result
408 "add sp, sp, #16\n\t" // Drop the quick "frame"
409 ".cfi_adjust_cfa_offset -16\n\t"
Andreas Gampe51f76352014-05-21 08:28:48 -0700410
411 // Test d8 - d15. We can use x1 and x2.
412 "movk x1, #0xfad0\n\t"
413 "movk x1, #0xebad, lsl #16\n\t"
414 "movk x1, #0xfad0, lsl #32\n\t"
415 "movk x1, #0xebad, lsl #48\n\t"
416 "fmov x2, d8\n\t"
417 "cmp x1, x2\n\t"
418 "b.ne 1f\n\t"
419 "add x1, x1, 1\n\t"
420
421 "fmov x2, d9\n\t"
422 "cmp x1, x2\n\t"
423 "b.ne 1f\n\t"
424 "add x1, x1, 1\n\t"
425
426 "fmov x2, d10\n\t"
427 "cmp x1, x2\n\t"
428 "b.ne 1f\n\t"
429 "add x1, x1, 1\n\t"
430
431 "fmov x2, d11\n\t"
432 "cmp x1, x2\n\t"
433 "b.ne 1f\n\t"
434 "add x1, x1, 1\n\t"
435
436 "fmov x2, d12\n\t"
437 "cmp x1, x2\n\t"
438 "b.ne 1f\n\t"
439 "add x1, x1, 1\n\t"
440
441 "fmov x2, d13\n\t"
442 "cmp x1, x2\n\t"
443 "b.ne 1f\n\t"
444 "add x1, x1, 1\n\t"
445
446 "fmov x2, d14\n\t"
447 "cmp x1, x2\n\t"
448 "b.ne 1f\n\t"
449 "add x1, x1, 1\n\t"
450
451 "fmov x2, d15\n\t"
452 "cmp x1, x2\n\t"
453 "b.ne 1f\n\t"
454
Andreas Gampef39b3782014-06-03 14:38:30 -0700455 "mov x9, #0\n\t" // Use x9 as flag, in clobber list
Andreas Gampe51f76352014-05-21 08:28:48 -0700456
457 // Finish up.
458 "2:\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700459 "ldp x0, x1, [sp]\n\t" // Restore stuff not named clobbered, may contain fpr_result
460 "ldp x2, x3, [sp, #16]\n\t"
461 "ldp x4, x5, [sp, #32]\n\t"
462 "ldp x6, x7, [sp, #48]\n\t"
463 "add sp, sp, #64\n\t" // Free stack space, now sp as on entry
Andreas Gampe51f76352014-05-21 08:28:48 -0700464 ".cfi_adjust_cfa_offset -64\n\t"
465
Andreas Gampef39b3782014-06-03 14:38:30 -0700466 "str x9, %[fpr_result]\n\t" // Store the FPR comparison result
467 "mov %[result], x8\n\t" // Store the call result
468
Andreas Gampe51f76352014-05-21 08:28:48 -0700469 "b 3f\n\t" // Goto end
470
471 // Failed fpr verification.
472 "1:\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700473 "mov x9, #1\n\t"
Andreas Gampe51f76352014-05-21 08:28:48 -0700474 "b 2b\n\t" // Goto finish-up
475
476 // End
477 "3:\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700478 : [result] "=r" (result)
479 // Use the result from r0
Andreas Gampe51f76352014-05-21 08:28:48 -0700480 : [arg0] "0"(arg0), [arg1] "r"(arg1), [arg2] "r"(arg2), [code] "r"(code), [self] "r"(self),
Andreas Gampef39b3782014-06-03 14:38:30 -0700481 [referrer] "r"(referrer), [hidden] "r"(hidden), [fpr_result] "m" (fpr_result)
482 : "x8", "x9", "x10", "x11", "x12", "x13", "x14", "x15", "x16", "x17", "x18", "x19", "x20",
483 "x21", "x22", "x23", "x24", "x25", "x26", "x27", "x28", "x30",
484 "d0", "d1", "d2", "d3", "d4", "d5", "d6", "d7",
485 "d8", "d9", "d10", "d11", "d12", "d13", "d14", "d15",
486 "d16", "d17", "d18", "d19", "d20", "d21", "d22", "d23",
Andreas Gampe2f6e3512014-06-07 01:32:33 -0700487 "d24", "d25", "d26", "d27", "d28", "d29", "d30", "d31",
488 "memory"); // clobber.
Ian Rogers6f3dbba2014-10-14 17:41:57 -0700489#elif defined(__x86_64__) && !defined(__APPLE__) && defined(__clang__)
Andreas Gampe51f76352014-05-21 08:28:48 -0700490 // Note: Uses the native convention
491 // TODO: Set the thread?
492 __asm__ __volatile__(
Andreas Gampe51f76352014-05-21 08:28:48 -0700493 "pushq %[referrer]\n\t" // Push referrer
494 "pushq (%%rsp)\n\t" // & 16B alignment padding
495 ".cfi_adjust_cfa_offset 16\n\t"
Andreas Gampe1a7e2922014-05-21 15:37:53 -0700496 "call *%%rbx\n\t" // Call the stub
Andreas Gampe51f76352014-05-21 08:28:48 -0700497 "addq $16, %%rsp\n\t" // Pop nullptr and padding
498 ".cfi_adjust_cfa_offset -16\n\t"
499 : "=a" (result)
500 // Use the result from rax
Andreas Gampe1a7e2922014-05-21 15:37:53 -0700501 : "D"(arg0), "S"(arg1), "d"(arg2), "b"(code), [referrer] "c"(referrer), [hidden] "a"(hidden)
Andreas Gampe51f76352014-05-21 08:28:48 -0700502 // This places arg0 into rdi, arg1 into rsi, arg2 into rdx, and code into rax
Andreas Gampe1a7e2922014-05-21 15:37:53 -0700503 : "rbp", "r8", "r9", "r10", "r11", "r12", "r13", "r14", "r15",
Andreas Gampe2f6e3512014-06-07 01:32:33 -0700504 "memory"); // clobber all
Andreas Gampe51f76352014-05-21 08:28:48 -0700505 // TODO: Should we clobber the other registers?
506#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -0800507 UNUSED(arg0, arg1, arg2, code, referrer, hidden);
Andreas Gampe51f76352014-05-21 08:28:48 -0700508 LOG(WARNING) << "Was asked to invoke for an architecture I do not understand.";
509 result = 0;
510#endif
511 // Pop transition.
512 self->PopManagedStackFragment(fragment);
513
514 fp_result = fpr_result;
515 EXPECT_EQ(0U, fp_result);
516
517 return result;
518 }
519
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700520 // Method with 32b arg0, 64b arg1
521 size_t Invoke3UWithReferrer(size_t arg0, uint64_t arg1, uintptr_t code, Thread* self,
522 mirror::ArtMethod* referrer) {
Ian Rogersc3ccc102014-06-25 11:52:14 -0700523#if (defined(__x86_64__) && !defined(__APPLE__)) || defined(__aarch64__)
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700524 // Just pass through.
525 return Invoke3WithReferrer(arg0, arg1, 0U, code, self, referrer);
526#else
527 // Need to split up arguments.
528 uint32_t lower = static_cast<uint32_t>(arg1 & 0xFFFFFFFF);
529 uint32_t upper = static_cast<uint32_t>((arg1 >> 32) & 0xFFFFFFFF);
530
531 return Invoke3WithReferrer(arg0, lower, upper, code, self, referrer);
532#endif
533 }
534
Andreas Gampe29b38412014-08-13 00:15:43 -0700535 static uintptr_t GetEntrypoint(Thread* self, QuickEntrypointEnum entrypoint) {
536 int32_t offset;
537#ifdef __LP64__
538 offset = GetThreadOffset<8>(entrypoint).Int32Value();
539#else
540 offset = GetThreadOffset<4>(entrypoint).Int32Value();
541#endif
542 return *reinterpret_cast<uintptr_t*>(reinterpret_cast<uint8_t*>(self) + offset);
543 }
544
Andreas Gampe6cf80102014-05-19 11:32:41 -0700545 protected:
546 size_t fp_result;
Andreas Gampe525cde22014-04-22 15:44:50 -0700547};
548
549
Andreas Gampe525cde22014-04-22 15:44:50 -0700550TEST_F(StubTest, Memcpy) {
Ian Rogersc3ccc102014-06-25 11:52:14 -0700551#if defined(__i386__) || (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe525cde22014-04-22 15:44:50 -0700552 Thread* self = Thread::Current();
553
554 uint32_t orig[20];
555 uint32_t trg[20];
556 for (size_t i = 0; i < 20; ++i) {
557 orig[i] = i;
558 trg[i] = 0;
559 }
560
561 Invoke3(reinterpret_cast<size_t>(&trg[4]), reinterpret_cast<size_t>(&orig[4]),
Andreas Gampe29b38412014-08-13 00:15:43 -0700562 10 * sizeof(uint32_t), StubTest::GetEntrypoint(self, kQuickMemcpy), self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700563
564 EXPECT_EQ(orig[0], trg[0]);
565
566 for (size_t i = 1; i < 4; ++i) {
567 EXPECT_NE(orig[i], trg[i]);
568 }
569
570 for (size_t i = 4; i < 14; ++i) {
571 EXPECT_EQ(orig[i], trg[i]);
572 }
573
574 for (size_t i = 14; i < 20; ++i) {
575 EXPECT_NE(orig[i], trg[i]);
576 }
577
578 // TODO: Test overlapping?
579
580#else
581 LOG(INFO) << "Skipping memcpy as I don't know how to do that on " << kRuntimeISA;
582 // Force-print to std::cout so it's also outside the logcat.
583 std::cout << "Skipping memcpy as I don't know how to do that on " << kRuntimeISA << std::endl;
584#endif
585}
586
Andreas Gampe525cde22014-04-22 15:44:50 -0700587TEST_F(StubTest, LockObject) {
Ian Rogersc3ccc102014-06-25 11:52:14 -0700588#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -0700589 static constexpr size_t kThinLockLoops = 100;
590
Andreas Gampe525cde22014-04-22 15:44:50 -0700591 Thread* self = Thread::Current();
Andreas Gampe29b38412014-08-13 00:15:43 -0700592
593 const uintptr_t art_quick_lock_object = StubTest::GetEntrypoint(self, kQuickLockObject);
594
Andreas Gampe525cde22014-04-22 15:44:50 -0700595 // Create an object
596 ScopedObjectAccess soa(self);
597 // garbage is created during ClassLinker::Init
598
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700599 StackHandleScope<2> hs(soa.Self());
600 Handle<mirror::String> obj(
601 hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), "hello, world!")));
Andreas Gampe525cde22014-04-22 15:44:50 -0700602 LockWord lock = obj->GetLockWord(false);
603 LockWord::LockState old_state = lock.GetState();
604 EXPECT_EQ(LockWord::LockState::kUnlocked, old_state);
605
Andreas Gampe29b38412014-08-13 00:15:43 -0700606 Invoke3(reinterpret_cast<size_t>(obj.Get()), 0U, 0U, art_quick_lock_object, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700607
608 LockWord lock_after = obj->GetLockWord(false);
609 LockWord::LockState new_state = lock_after.GetState();
610 EXPECT_EQ(LockWord::LockState::kThinLocked, new_state);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700611 EXPECT_EQ(lock_after.ThinLockCount(), 0U); // Thin lock starts count at zero
612
613 for (size_t i = 1; i < kThinLockLoops; ++i) {
Andreas Gampe29b38412014-08-13 00:15:43 -0700614 Invoke3(reinterpret_cast<size_t>(obj.Get()), 0U, 0U, art_quick_lock_object, self);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700615
616 // Check we're at lock count i
617
618 LockWord l_inc = obj->GetLockWord(false);
619 LockWord::LockState l_inc_state = l_inc.GetState();
620 EXPECT_EQ(LockWord::LockState::kThinLocked, l_inc_state);
621 EXPECT_EQ(l_inc.ThinLockCount(), i);
622 }
623
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700624 // Force a fat lock by running identity hashcode to fill up lock word.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700625 Handle<mirror::String> obj2(hs.NewHandle(
626 mirror::String::AllocFromModifiedUtf8(soa.Self(), "hello, world!")));
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700627
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700628 obj2->IdentityHashCode();
629
Andreas Gampe29b38412014-08-13 00:15:43 -0700630 Invoke3(reinterpret_cast<size_t>(obj2.Get()), 0U, 0U, art_quick_lock_object, self);
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700631
632 LockWord lock_after2 = obj2->GetLockWord(false);
633 LockWord::LockState new_state2 = lock_after2.GetState();
634 EXPECT_EQ(LockWord::LockState::kFatLocked, new_state2);
635 EXPECT_NE(lock_after2.FatLockMonitor(), static_cast<Monitor*>(nullptr));
636
637 // Test done.
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700638#else
639 LOG(INFO) << "Skipping lock_object as I don't know how to do that on " << kRuntimeISA;
640 // Force-print to std::cout so it's also outside the logcat.
641 std::cout << "Skipping lock_object as I don't know how to do that on " << kRuntimeISA << std::endl;
642#endif
643}
644
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700645
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700646class RandGen {
647 public:
648 explicit RandGen(uint32_t seed) : val_(seed) {}
649
650 uint32_t next() {
651 val_ = val_ * 48271 % 2147483647 + 13;
652 return val_;
653 }
654
655 uint32_t val_;
656};
657
658
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700659// NO_THREAD_SAFETY_ANALYSIS as we do not want to grab exclusive mutator lock for MonitorInfo.
660static void TestUnlockObject(StubTest* test) NO_THREAD_SAFETY_ANALYSIS {
Ian Rogersc3ccc102014-06-25 11:52:14 -0700661#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -0700662 static constexpr size_t kThinLockLoops = 100;
663
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700664 Thread* self = Thread::Current();
Andreas Gampe29b38412014-08-13 00:15:43 -0700665
666 const uintptr_t art_quick_lock_object = StubTest::GetEntrypoint(self, kQuickLockObject);
667 const uintptr_t art_quick_unlock_object = StubTest::GetEntrypoint(self, kQuickUnlockObject);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700668 // Create an object
669 ScopedObjectAccess soa(self);
670 // garbage is created during ClassLinker::Init
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700671 static constexpr size_t kNumberOfLocks = 10; // Number of objects = lock
672 StackHandleScope<kNumberOfLocks + 1> hs(self);
673 Handle<mirror::String> obj(
674 hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), "hello, world!")));
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700675 LockWord lock = obj->GetLockWord(false);
676 LockWord::LockState old_state = lock.GetState();
677 EXPECT_EQ(LockWord::LockState::kUnlocked, old_state);
678
Andreas Gampe29b38412014-08-13 00:15:43 -0700679 test->Invoke3(reinterpret_cast<size_t>(obj.Get()), 0U, 0U, art_quick_unlock_object, self);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700680 // This should be an illegal monitor state.
681 EXPECT_TRUE(self->IsExceptionPending());
682 self->ClearException();
683
684 LockWord lock_after = obj->GetLockWord(false);
685 LockWord::LockState new_state = lock_after.GetState();
686 EXPECT_EQ(LockWord::LockState::kUnlocked, new_state);
Andreas Gampe525cde22014-04-22 15:44:50 -0700687
Andreas Gampe29b38412014-08-13 00:15:43 -0700688 test->Invoke3(reinterpret_cast<size_t>(obj.Get()), 0U, 0U, art_quick_lock_object, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700689
690 LockWord lock_after2 = obj->GetLockWord(false);
691 LockWord::LockState new_state2 = lock_after2.GetState();
692 EXPECT_EQ(LockWord::LockState::kThinLocked, new_state2);
693
Andreas Gampe29b38412014-08-13 00:15:43 -0700694 test->Invoke3(reinterpret_cast<size_t>(obj.Get()), 0U, 0U, art_quick_unlock_object, self);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700695
696 LockWord lock_after3 = obj->GetLockWord(false);
697 LockWord::LockState new_state3 = lock_after3.GetState();
698 EXPECT_EQ(LockWord::LockState::kUnlocked, new_state3);
699
700 // Stress test:
701 // Keep a number of objects and their locks in flight. Randomly lock or unlock one of them in
702 // each step.
703
704 RandGen r(0x1234);
705
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700706 constexpr size_t kIterations = 10000; // Number of iterations
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700707 constexpr size_t kMoveToFat = 1000; // Chance of 1:kMoveFat to make a lock fat.
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700708
709 size_t counts[kNumberOfLocks];
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700710 bool fat[kNumberOfLocks]; // Whether a lock should be thin or fat.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700711 Handle<mirror::String> objects[kNumberOfLocks];
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700712
713 // Initialize = allocate.
714 for (size_t i = 0; i < kNumberOfLocks; ++i) {
715 counts[i] = 0;
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700716 fat[i] = false;
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700717 objects[i] = hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), ""));
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700718 }
719
720 for (size_t i = 0; i < kIterations; ++i) {
721 // Select which lock to update.
722 size_t index = r.next() % kNumberOfLocks;
723
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700724 // Make lock fat?
725 if (!fat[index] && (r.next() % kMoveToFat == 0)) {
726 fat[index] = true;
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700727 objects[index]->IdentityHashCode();
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700728
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700729 LockWord lock_iter = objects[index]->GetLockWord(false);
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700730 LockWord::LockState iter_state = lock_iter.GetState();
731 if (counts[index] == 0) {
732 EXPECT_EQ(LockWord::LockState::kHashCode, iter_state);
733 } else {
734 EXPECT_EQ(LockWord::LockState::kFatLocked, iter_state);
735 }
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700736 } else {
Andreas Gampe277ccbd2014-11-03 21:36:10 -0800737 bool take_lock; // Whether to lock or unlock in this step.
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700738 if (counts[index] == 0) {
Andreas Gampe277ccbd2014-11-03 21:36:10 -0800739 take_lock = true;
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700740 } else if (counts[index] == kThinLockLoops) {
Andreas Gampe277ccbd2014-11-03 21:36:10 -0800741 take_lock = false;
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700742 } else {
743 // Randomly.
Andreas Gampe277ccbd2014-11-03 21:36:10 -0800744 take_lock = r.next() % 2 == 0;
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700745 }
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700746
Andreas Gampe277ccbd2014-11-03 21:36:10 -0800747 if (take_lock) {
Andreas Gampe29b38412014-08-13 00:15:43 -0700748 test->Invoke3(reinterpret_cast<size_t>(objects[index].Get()), 0U, 0U, art_quick_lock_object,
749 self);
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700750 counts[index]++;
751 } else {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700752 test->Invoke3(reinterpret_cast<size_t>(objects[index].Get()), 0U, 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -0700753 art_quick_unlock_object, self);
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700754 counts[index]--;
755 }
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700756
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700757 EXPECT_FALSE(self->IsExceptionPending());
758
759 // Check the new state.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700760 LockWord lock_iter = objects[index]->GetLockWord(true);
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700761 LockWord::LockState iter_state = lock_iter.GetState();
762 if (fat[index]) {
763 // Abuse MonitorInfo.
764 EXPECT_EQ(LockWord::LockState::kFatLocked, iter_state) << index;
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700765 MonitorInfo info(objects[index].Get());
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700766 EXPECT_EQ(counts[index], info.entry_count_) << index;
767 } else {
768 if (counts[index] > 0) {
769 EXPECT_EQ(LockWord::LockState::kThinLocked, iter_state);
770 EXPECT_EQ(counts[index] - 1, lock_iter.ThinLockCount());
771 } else {
772 EXPECT_EQ(LockWord::LockState::kUnlocked, iter_state);
773 }
774 }
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700775 }
776 }
777
778 // Unlock the remaining count times and then check it's unlocked. Then deallocate.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700779 // Go reverse order to correctly handle Handles.
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700780 for (size_t i = 0; i < kNumberOfLocks; ++i) {
781 size_t index = kNumberOfLocks - 1 - i;
782 size_t count = counts[index];
783 while (count > 0) {
Andreas Gampe29b38412014-08-13 00:15:43 -0700784 test->Invoke3(reinterpret_cast<size_t>(objects[index].Get()), 0U, 0U, art_quick_unlock_object,
785 self);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700786 count--;
787 }
788
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700789 LockWord lock_after4 = objects[index]->GetLockWord(false);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700790 LockWord::LockState new_state4 = lock_after4.GetState();
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700791 EXPECT_TRUE(LockWord::LockState::kUnlocked == new_state4
792 || LockWord::LockState::kFatLocked == new_state4);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700793 }
794
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700795 // Test done.
Andreas Gampe525cde22014-04-22 15:44:50 -0700796#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -0800797 UNUSED(test);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700798 LOG(INFO) << "Skipping unlock_object as I don't know how to do that on " << kRuntimeISA;
Andreas Gampe525cde22014-04-22 15:44:50 -0700799 // Force-print to std::cout so it's also outside the logcat.
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700800 std::cout << "Skipping unlock_object as I don't know how to do that on " << kRuntimeISA << std::endl;
Andreas Gampe525cde22014-04-22 15:44:50 -0700801#endif
802}
803
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700804TEST_F(StubTest, UnlockObject) {
Andreas Gampe369810a2015-01-14 19:53:31 -0800805 // This will lead to monitor error messages in the log.
806 ScopedLogSeverity sls(LogSeverity::FATAL);
807
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700808 TestUnlockObject(this);
809}
Andreas Gampe525cde22014-04-22 15:44:50 -0700810
Ian Rogersc3ccc102014-06-25 11:52:14 -0700811#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe525cde22014-04-22 15:44:50 -0700812extern "C" void art_quick_check_cast(void);
813#endif
814
815TEST_F(StubTest, CheckCast) {
Ian Rogersc3ccc102014-06-25 11:52:14 -0700816#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe525cde22014-04-22 15:44:50 -0700817 Thread* self = Thread::Current();
Andreas Gampe29b38412014-08-13 00:15:43 -0700818
819 const uintptr_t art_quick_check_cast = StubTest::GetEntrypoint(self, kQuickCheckCast);
820
Andreas Gampe525cde22014-04-22 15:44:50 -0700821 // Find some classes.
822 ScopedObjectAccess soa(self);
823 // garbage is created during ClassLinker::Init
824
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700825 StackHandleScope<2> hs(soa.Self());
826 Handle<mirror::Class> c(
827 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "[Ljava/lang/Object;")));
828 Handle<mirror::Class> c2(
829 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "[Ljava/lang/String;")));
Andreas Gampe525cde22014-04-22 15:44:50 -0700830
831 EXPECT_FALSE(self->IsExceptionPending());
832
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700833 Invoke3(reinterpret_cast<size_t>(c.Get()), reinterpret_cast<size_t>(c.Get()), 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -0700834 art_quick_check_cast, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700835
836 EXPECT_FALSE(self->IsExceptionPending());
837
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700838 Invoke3(reinterpret_cast<size_t>(c2.Get()), reinterpret_cast<size_t>(c2.Get()), 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -0700839 art_quick_check_cast, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700840
841 EXPECT_FALSE(self->IsExceptionPending());
842
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700843 Invoke3(reinterpret_cast<size_t>(c.Get()), reinterpret_cast<size_t>(c2.Get()), 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -0700844 art_quick_check_cast, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700845
846 EXPECT_FALSE(self->IsExceptionPending());
847
848 // TODO: Make the following work. But that would require correct managed frames.
849
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700850 Invoke3(reinterpret_cast<size_t>(c2.Get()), reinterpret_cast<size_t>(c.Get()), 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -0700851 art_quick_check_cast, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700852
853 EXPECT_TRUE(self->IsExceptionPending());
854 self->ClearException();
855
856#else
857 LOG(INFO) << "Skipping check_cast as I don't know how to do that on " << kRuntimeISA;
858 // Force-print to std::cout so it's also outside the logcat.
859 std::cout << "Skipping check_cast as I don't know how to do that on " << kRuntimeISA << std::endl;
860#endif
861}
862
863
Andreas Gampe525cde22014-04-22 15:44:50 -0700864TEST_F(StubTest, APutObj) {
Hiroshi Yamauchid6881ae2014-04-28 17:21:48 -0700865 TEST_DISABLED_FOR_HEAP_REFERENCE_POISONING();
866
Ian Rogersc3ccc102014-06-25 11:52:14 -0700867#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe525cde22014-04-22 15:44:50 -0700868 Thread* self = Thread::Current();
Andreas Gampe29b38412014-08-13 00:15:43 -0700869
870 // Do not check non-checked ones, we'd need handlers and stuff...
871 const uintptr_t art_quick_aput_obj_with_null_and_bound_check =
872 StubTest::GetEntrypoint(self, kQuickAputObjectWithNullAndBoundCheck);
873
Andreas Gampe525cde22014-04-22 15:44:50 -0700874 // Create an object
875 ScopedObjectAccess soa(self);
876 // garbage is created during ClassLinker::Init
877
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700878 StackHandleScope<5> hs(soa.Self());
879 Handle<mirror::Class> c(
880 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "Ljava/lang/Object;")));
881 Handle<mirror::Class> ca(
882 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "[Ljava/lang/String;")));
Andreas Gampe525cde22014-04-22 15:44:50 -0700883
884 // Build a string array of size 1
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700885 Handle<mirror::ObjectArray<mirror::Object>> array(
886 hs.NewHandle(mirror::ObjectArray<mirror::Object>::Alloc(soa.Self(), ca.Get(), 10)));
Andreas Gampe525cde22014-04-22 15:44:50 -0700887
888 // Build a string -> should be assignable
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700889 Handle<mirror::String> str_obj(
890 hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), "hello, world!")));
Andreas Gampe525cde22014-04-22 15:44:50 -0700891
892 // Build a generic object -> should fail assigning
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700893 Handle<mirror::Object> obj_obj(hs.NewHandle(c->AllocObject(soa.Self())));
Andreas Gampe525cde22014-04-22 15:44:50 -0700894
895 // Play with it...
896
897 // 1) Success cases
Andreas Gampef4e910b2014-04-29 16:55:52 -0700898 // 1.1) Assign str_obj to array[0..3]
Andreas Gampe525cde22014-04-22 15:44:50 -0700899
900 EXPECT_FALSE(self->IsExceptionPending());
901
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700902 Invoke3(reinterpret_cast<size_t>(array.Get()), 0U, reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampe29b38412014-08-13 00:15:43 -0700903 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700904
905 EXPECT_FALSE(self->IsExceptionPending());
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700906 EXPECT_EQ(str_obj.Get(), array->Get(0));
Andreas Gampe525cde22014-04-22 15:44:50 -0700907
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700908 Invoke3(reinterpret_cast<size_t>(array.Get()), 1U, reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampe29b38412014-08-13 00:15:43 -0700909 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampef4e910b2014-04-29 16:55:52 -0700910
911 EXPECT_FALSE(self->IsExceptionPending());
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700912 EXPECT_EQ(str_obj.Get(), array->Get(1));
Andreas Gampef4e910b2014-04-29 16:55:52 -0700913
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700914 Invoke3(reinterpret_cast<size_t>(array.Get()), 2U, reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampe29b38412014-08-13 00:15:43 -0700915 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampef4e910b2014-04-29 16:55:52 -0700916
917 EXPECT_FALSE(self->IsExceptionPending());
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700918 EXPECT_EQ(str_obj.Get(), array->Get(2));
Andreas Gampef4e910b2014-04-29 16:55:52 -0700919
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700920 Invoke3(reinterpret_cast<size_t>(array.Get()), 3U, reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampe29b38412014-08-13 00:15:43 -0700921 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampef4e910b2014-04-29 16:55:52 -0700922
923 EXPECT_FALSE(self->IsExceptionPending());
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700924 EXPECT_EQ(str_obj.Get(), array->Get(3));
Andreas Gampef4e910b2014-04-29 16:55:52 -0700925
926 // 1.2) Assign null to array[0..3]
Andreas Gampe525cde22014-04-22 15:44:50 -0700927
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700928 Invoke3(reinterpret_cast<size_t>(array.Get()), 0U, reinterpret_cast<size_t>(nullptr),
Andreas Gampe29b38412014-08-13 00:15:43 -0700929 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700930
931 EXPECT_FALSE(self->IsExceptionPending());
Andreas Gampef4e910b2014-04-29 16:55:52 -0700932 EXPECT_EQ(nullptr, array->Get(0));
933
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700934 Invoke3(reinterpret_cast<size_t>(array.Get()), 1U, reinterpret_cast<size_t>(nullptr),
Andreas Gampe29b38412014-08-13 00:15:43 -0700935 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampef4e910b2014-04-29 16:55:52 -0700936
937 EXPECT_FALSE(self->IsExceptionPending());
938 EXPECT_EQ(nullptr, array->Get(1));
939
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700940 Invoke3(reinterpret_cast<size_t>(array.Get()), 2U, reinterpret_cast<size_t>(nullptr),
Andreas Gampe29b38412014-08-13 00:15:43 -0700941 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampef4e910b2014-04-29 16:55:52 -0700942
943 EXPECT_FALSE(self->IsExceptionPending());
944 EXPECT_EQ(nullptr, array->Get(2));
945
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700946 Invoke3(reinterpret_cast<size_t>(array.Get()), 3U, reinterpret_cast<size_t>(nullptr),
Andreas Gampe29b38412014-08-13 00:15:43 -0700947 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampef4e910b2014-04-29 16:55:52 -0700948
949 EXPECT_FALSE(self->IsExceptionPending());
950 EXPECT_EQ(nullptr, array->Get(3));
Andreas Gampe525cde22014-04-22 15:44:50 -0700951
952 // TODO: Check _which_ exception is thrown. Then make 3) check that it's the right check order.
953
954 // 2) Failure cases (str into str[])
955 // 2.1) Array = null
956 // TODO: Throwing NPE needs actual DEX code
957
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700958// Invoke3(reinterpret_cast<size_t>(nullptr), 0U, reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampe525cde22014-04-22 15:44:50 -0700959// reinterpret_cast<uintptr_t>(&art_quick_aput_obj_with_null_and_bound_check), self);
960//
961// EXPECT_TRUE(self->IsExceptionPending());
962// self->ClearException();
963
964 // 2.2) Index < 0
965
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700966 Invoke3(reinterpret_cast<size_t>(array.Get()), static_cast<size_t>(-1),
967 reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampe29b38412014-08-13 00:15:43 -0700968 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700969
970 EXPECT_TRUE(self->IsExceptionPending());
971 self->ClearException();
972
973 // 2.3) Index > 0
974
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700975 Invoke3(reinterpret_cast<size_t>(array.Get()), 10U, reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampe29b38412014-08-13 00:15:43 -0700976 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700977
978 EXPECT_TRUE(self->IsExceptionPending());
979 self->ClearException();
980
981 // 3) Failure cases (obj into str[])
982
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700983 Invoke3(reinterpret_cast<size_t>(array.Get()), 0U, reinterpret_cast<size_t>(obj_obj.Get()),
Andreas Gampe29b38412014-08-13 00:15:43 -0700984 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700985
986 EXPECT_TRUE(self->IsExceptionPending());
987 self->ClearException();
988
989 // Tests done.
990#else
991 LOG(INFO) << "Skipping aput_obj as I don't know how to do that on " << kRuntimeISA;
992 // Force-print to std::cout so it's also outside the logcat.
993 std::cout << "Skipping aput_obj as I don't know how to do that on " << kRuntimeISA << std::endl;
994#endif
995}
996
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700997TEST_F(StubTest, AllocObject) {
998 TEST_DISABLED_FOR_HEAP_REFERENCE_POISONING();
999
Ian Rogersc3ccc102014-06-25 11:52:14 -07001000#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe369810a2015-01-14 19:53:31 -08001001 // This will lead to OOM error messages in the log.
1002 ScopedLogSeverity sls(LogSeverity::FATAL);
1003
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001004 // TODO: Check the "Unresolved" allocation stubs
1005
1006 Thread* self = Thread::Current();
1007 // Create an object
1008 ScopedObjectAccess soa(self);
1009 // garbage is created during ClassLinker::Init
1010
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001011 StackHandleScope<2> hs(soa.Self());
1012 Handle<mirror::Class> c(
1013 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "Ljava/lang/Object;")));
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001014
1015 // Play with it...
1016
1017 EXPECT_FALSE(self->IsExceptionPending());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001018 {
1019 // Use an arbitrary method from c to use as referrer
1020 size_t result = Invoke3(static_cast<size_t>(c->GetDexTypeIndex()), // type_idx
1021 reinterpret_cast<size_t>(c->GetVirtualMethod(0)), // arbitrary
1022 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001023 StubTest::GetEntrypoint(self, kQuickAllocObject),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001024 self);
1025
1026 EXPECT_FALSE(self->IsExceptionPending());
1027 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
1028 mirror::Object* obj = reinterpret_cast<mirror::Object*>(result);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001029 EXPECT_EQ(c.Get(), obj->GetClass());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001030 VerifyObject(obj);
1031 }
1032
1033 {
1034 // We can use nullptr in the second argument as we do not need a method here (not used in
1035 // resolved/initialized cases)
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001036 size_t result = Invoke3(reinterpret_cast<size_t>(c.Get()), reinterpret_cast<size_t>(nullptr), 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001037 StubTest::GetEntrypoint(self, kQuickAllocObjectResolved),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001038 self);
1039
1040 EXPECT_FALSE(self->IsExceptionPending());
1041 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
1042 mirror::Object* obj = reinterpret_cast<mirror::Object*>(result);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001043 EXPECT_EQ(c.Get(), obj->GetClass());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001044 VerifyObject(obj);
1045 }
1046
1047 {
1048 // We can use nullptr in the second argument as we do not need a method here (not used in
1049 // resolved/initialized cases)
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001050 size_t result = Invoke3(reinterpret_cast<size_t>(c.Get()), reinterpret_cast<size_t>(nullptr), 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001051 StubTest::GetEntrypoint(self, kQuickAllocObjectInitialized),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001052 self);
1053
1054 EXPECT_FALSE(self->IsExceptionPending());
1055 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
1056 mirror::Object* obj = reinterpret_cast<mirror::Object*>(result);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001057 EXPECT_EQ(c.Get(), obj->GetClass());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001058 VerifyObject(obj);
1059 }
1060
1061 // Failure tests.
1062
1063 // Out-of-memory.
1064 {
1065 Runtime::Current()->GetHeap()->SetIdealFootprint(1 * GB);
1066
1067 // Array helps to fill memory faster.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001068 Handle<mirror::Class> ca(
1069 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "[Ljava/lang/Object;")));
1070
1071 // Use arbitrary large amount for now.
1072 static const size_t kMaxHandles = 1000000;
Ian Rogers700a4022014-05-19 16:49:03 -07001073 std::unique_ptr<StackHandleScope<kMaxHandles>> hsp(new StackHandleScope<kMaxHandles>(self));
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001074
1075 std::vector<Handle<mirror::Object>> handles;
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001076 // Start allocating with 128K
1077 size_t length = 128 * KB / 4;
1078 while (length > 10) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001079 Handle<mirror::Object> h(hsp->NewHandle<mirror::Object>(
1080 mirror::ObjectArray<mirror::Object>::Alloc(soa.Self(), ca.Get(), length / 4)));
1081 if (self->IsExceptionPending() || h.Get() == nullptr) {
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001082 self->ClearException();
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001083
1084 // Try a smaller length
1085 length = length / 8;
1086 // Use at most half the reported free space.
1087 size_t mem = Runtime::Current()->GetHeap()->GetFreeMemory();
1088 if (length * 8 > mem) {
1089 length = mem / 8;
1090 }
1091 } else {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001092 handles.push_back(h);
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001093 }
1094 }
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001095 LOG(INFO) << "Used " << handles.size() << " arrays to fill space.";
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001096
1097 // Allocate simple objects till it fails.
1098 while (!self->IsExceptionPending()) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001099 Handle<mirror::Object> h = hsp->NewHandle(c->AllocObject(soa.Self()));
1100 if (!self->IsExceptionPending() && h.Get() != nullptr) {
1101 handles.push_back(h);
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001102 }
1103 }
1104 self->ClearException();
1105
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001106 size_t result = Invoke3(reinterpret_cast<size_t>(c.Get()), reinterpret_cast<size_t>(nullptr), 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001107 StubTest::GetEntrypoint(self, kQuickAllocObjectInitialized),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001108 self);
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001109 EXPECT_TRUE(self->IsExceptionPending());
1110 self->ClearException();
1111 EXPECT_EQ(reinterpret_cast<size_t>(nullptr), result);
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001112 }
1113
1114 // Tests done.
1115#else
1116 LOG(INFO) << "Skipping alloc_object as I don't know how to do that on " << kRuntimeISA;
1117 // Force-print to std::cout so it's also outside the logcat.
1118 std::cout << "Skipping alloc_object as I don't know how to do that on " << kRuntimeISA << std::endl;
1119#endif
1120}
1121
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001122TEST_F(StubTest, AllocObjectArray) {
1123 TEST_DISABLED_FOR_HEAP_REFERENCE_POISONING();
1124
Ian Rogersc3ccc102014-06-25 11:52:14 -07001125#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001126 // TODO: Check the "Unresolved" allocation stubs
1127
Andreas Gampe369810a2015-01-14 19:53:31 -08001128 // This will lead to OOM error messages in the log.
1129 ScopedLogSeverity sls(LogSeverity::FATAL);
1130
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001131 Thread* self = Thread::Current();
1132 // Create an object
1133 ScopedObjectAccess soa(self);
1134 // garbage is created during ClassLinker::Init
1135
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001136 StackHandleScope<2> hs(self);
1137 Handle<mirror::Class> c(
1138 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "[Ljava/lang/Object;")));
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001139
1140 // Needed to have a linked method.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001141 Handle<mirror::Class> c_obj(
1142 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "Ljava/lang/Object;")));
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001143
1144 // Play with it...
1145
1146 EXPECT_FALSE(self->IsExceptionPending());
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001147
1148 // For some reason this does not work, as the type_idx is artificial and outside what the
1149 // resolved types of c_obj allow...
1150
Ian Rogerscf7f1912014-10-22 22:06:39 -07001151 if ((false)) {
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001152 // Use an arbitrary method from c to use as referrer
1153 size_t result = Invoke3(static_cast<size_t>(c->GetDexTypeIndex()), // type_idx
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001154 10U,
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08001155 reinterpret_cast<size_t>(c_obj->GetVirtualMethod(0)), // arbitrary
Andreas Gampe29b38412014-08-13 00:15:43 -07001156 StubTest::GetEntrypoint(self, kQuickAllocArray),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001157 self);
1158
1159 EXPECT_FALSE(self->IsExceptionPending());
1160 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
1161 mirror::Array* obj = reinterpret_cast<mirror::Array*>(result);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001162 EXPECT_EQ(c.Get(), obj->GetClass());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001163 VerifyObject(obj);
1164 EXPECT_EQ(obj->GetLength(), 10);
1165 }
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001166
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001167 {
1168 // We can use nullptr in the second argument as we do not need a method here (not used in
1169 // resolved/initialized cases)
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08001170 size_t result = Invoke3(reinterpret_cast<size_t>(c.Get()), 10U,
1171 reinterpret_cast<size_t>(nullptr),
Andreas Gampe29b38412014-08-13 00:15:43 -07001172 StubTest::GetEntrypoint(self, kQuickAllocArrayResolved),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001173 self);
Nicolas Geoffray14691c52015-03-05 10:40:17 +00001174 EXPECT_FALSE(self->IsExceptionPending()) << PrettyTypeOf(self->GetException());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001175 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
1176 mirror::Object* obj = reinterpret_cast<mirror::Object*>(result);
1177 EXPECT_TRUE(obj->IsArrayInstance());
1178 EXPECT_TRUE(obj->IsObjectArray());
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001179 EXPECT_EQ(c.Get(), obj->GetClass());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001180 VerifyObject(obj);
1181 mirror::Array* array = reinterpret_cast<mirror::Array*>(result);
1182 EXPECT_EQ(array->GetLength(), 10);
1183 }
1184
1185 // Failure tests.
1186
1187 // Out-of-memory.
1188 {
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08001189 size_t result = Invoke3(reinterpret_cast<size_t>(c.Get()),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001190 GB, // that should fail...
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08001191 reinterpret_cast<size_t>(nullptr),
Andreas Gampe29b38412014-08-13 00:15:43 -07001192 StubTest::GetEntrypoint(self, kQuickAllocArrayResolved),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001193 self);
1194
1195 EXPECT_TRUE(self->IsExceptionPending());
1196 self->ClearException();
1197 EXPECT_EQ(reinterpret_cast<size_t>(nullptr), result);
1198 }
1199
1200 // Tests done.
1201#else
1202 LOG(INFO) << "Skipping alloc_array as I don't know how to do that on " << kRuntimeISA;
1203 // Force-print to std::cout so it's also outside the logcat.
1204 std::cout << "Skipping alloc_array as I don't know how to do that on " << kRuntimeISA << std::endl;
1205#endif
1206}
1207
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001208
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001209TEST_F(StubTest, StringCompareTo) {
1210 TEST_DISABLED_FOR_HEAP_REFERENCE_POISONING();
1211
Ian Rogersc3ccc102014-06-25 11:52:14 -07001212#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001213 // TODO: Check the "Unresolved" allocation stubs
1214
1215 Thread* self = Thread::Current();
Andreas Gampe29b38412014-08-13 00:15:43 -07001216
1217 const uintptr_t art_quick_string_compareto = StubTest::GetEntrypoint(self, kQuickStringCompareTo);
1218
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001219 ScopedObjectAccess soa(self);
1220 // garbage is created during ClassLinker::Init
1221
1222 // Create some strings
1223 // Use array so we can index into it and use a matrix for expected results
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -07001224 // Setup: The first half is standard. The second half uses a non-zero offset.
1225 // TODO: Shared backing arrays.
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001226 const char* c[] = { "", "", "a", "aa", "ab",
Serban Constantinescu86797a72014-06-19 16:17:56 +01001227 "aacaacaacaacaacaac", // This one's under the default limit to go to __memcmp16.
1228 "aacaacaacaacaacaacaacaacaacaacaacaac", // This one's over.
1229 "aacaacaacaacaacaacaacaacaacaacaacaaca" }; // As is this one. We need a separate one to
1230 // defeat object-equal optimizations.
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001231 static constexpr size_t kBaseStringCount = arraysize(c);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001232 static constexpr size_t kStringCount = 2 * kBaseStringCount;
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001233
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001234 StackHandleScope<kStringCount> hs(self);
1235 Handle<mirror::String> s[kStringCount];
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001236
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001237 for (size_t i = 0; i < kBaseStringCount; ++i) {
1238 s[i] = hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), c[i]));
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001239 }
1240
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -07001241 RandGen r(0x1234);
1242
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001243 for (size_t i = kBaseStringCount; i < kStringCount; ++i) {
1244 s[i] = hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), c[i - kBaseStringCount]));
1245 int32_t length = s[i]->GetLength();
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -07001246 if (length > 1) {
1247 // Set a random offset and length.
1248 int32_t new_offset = 1 + (r.next() % (length - 1));
1249 int32_t rest = length - new_offset - 1;
1250 int32_t new_length = 1 + (rest > 0 ? r.next() % rest : 0);
1251
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001252 s[i]->SetField32<false>(mirror::String::CountOffset(), new_length);
1253 s[i]->SetField32<false>(mirror::String::OffsetOffset(), new_offset);
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -07001254 }
1255 }
1256
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001257 // TODO: wide characters
1258
1259 // Matrix of expectations. First component is first parameter. Note we only check against the
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -07001260 // sign, not the value. As we are testing random offsets, we need to compute this and need to
1261 // rely on String::CompareTo being correct.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001262 int32_t expected[kStringCount][kStringCount];
1263 for (size_t x = 0; x < kStringCount; ++x) {
1264 for (size_t y = 0; y < kStringCount; ++y) {
1265 expected[x][y] = s[x]->CompareTo(s[y].Get());
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -07001266 }
1267 }
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001268
1269 // Play with it...
1270
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001271 for (size_t x = 0; x < kStringCount; ++x) {
1272 for (size_t y = 0; y < kStringCount; ++y) {
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001273 // Test string_compareto x y
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001274 size_t result = Invoke3(reinterpret_cast<size_t>(s[x].Get()),
1275 reinterpret_cast<size_t>(s[y].Get()), 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001276 art_quick_string_compareto, self);
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001277
1278 EXPECT_FALSE(self->IsExceptionPending());
1279
1280 // The result is a 32b signed integer
1281 union {
1282 size_t r;
1283 int32_t i;
1284 } conv;
1285 conv.r = result;
1286 int32_t e = expected[x][y];
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -07001287 EXPECT_TRUE(e == 0 ? conv.i == 0 : true) << "x=" << c[x] << " y=" << c[y] << " res=" <<
1288 conv.r;
1289 EXPECT_TRUE(e < 0 ? conv.i < 0 : true) << "x=" << c[x] << " y=" << c[y] << " res=" <<
1290 conv.r;
1291 EXPECT_TRUE(e > 0 ? conv.i > 0 : true) << "x=" << c[x] << " y=" << c[y] << " res=" <<
1292 conv.r;
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001293 }
1294 }
1295
Andreas Gampe7177d7c2014-05-02 12:10:02 -07001296 // TODO: Deallocate things.
1297
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001298 // Tests done.
1299#else
1300 LOG(INFO) << "Skipping string_compareto as I don't know how to do that on " << kRuntimeISA;
1301 // Force-print to std::cout so it's also outside the logcat.
1302 std::cout << "Skipping string_compareto as I don't know how to do that on " << kRuntimeISA <<
1303 std::endl;
1304#endif
1305}
1306
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001307
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001308static void GetSetBooleanStatic(Handle<mirror::ArtField>* f, Thread* self,
1309 mirror::ArtMethod* referrer, StubTest* test)
Fred Shih37f05ef2014-07-16 18:38:08 -07001310 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1311#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
1312 constexpr size_t num_values = 5;
1313 uint8_t values[num_values] = { 0, 1, 2, 128, 0xFF };
1314
1315 for (size_t i = 0; i < num_values; ++i) {
1316 test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
1317 static_cast<size_t>(values[i]),
1318 0U,
1319 StubTest::GetEntrypoint(self, kQuickSet8Static),
1320 self,
1321 referrer);
1322
1323 size_t res = test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
1324 0U, 0U,
1325 StubTest::GetEntrypoint(self, kQuickGetBooleanStatic),
1326 self,
1327 referrer);
1328 // Boolean currently stores bools as uint8_t, be more zealous about asserting correct writes/gets.
1329 EXPECT_EQ(values[i], static_cast<uint8_t>(res)) << "Iteration " << i;
1330 }
1331#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001332 UNUSED(f, self, referrer, test);
Fred Shih37f05ef2014-07-16 18:38:08 -07001333 LOG(INFO) << "Skipping set_boolean_static as I don't know how to do that on " << kRuntimeISA;
1334 // Force-print to std::cout so it's also outside the logcat.
1335 std::cout << "Skipping set_boolean_static as I don't know how to do that on " << kRuntimeISA << std::endl;
1336#endif
1337}
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001338static void GetSetByteStatic(Handle<mirror::ArtField>* f, Thread* self,
1339 mirror::ArtMethod* referrer, StubTest* test)
Fred Shih37f05ef2014-07-16 18:38:08 -07001340 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1341#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001342 int8_t values[] = { -128, -64, 0, 64, 127 };
Fred Shih37f05ef2014-07-16 18:38:08 -07001343
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001344 for (size_t i = 0; i < arraysize(values); ++i) {
Fred Shih37f05ef2014-07-16 18:38:08 -07001345 test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
1346 static_cast<size_t>(values[i]),
1347 0U,
1348 StubTest::GetEntrypoint(self, kQuickSet8Static),
1349 self,
1350 referrer);
1351
1352 size_t res = test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
1353 0U, 0U,
1354 StubTest::GetEntrypoint(self, kQuickGetByteStatic),
1355 self,
1356 referrer);
1357 EXPECT_EQ(values[i], static_cast<int8_t>(res)) << "Iteration " << i;
1358 }
1359#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001360 UNUSED(f, self, referrer, test);
Fred Shih37f05ef2014-07-16 18:38:08 -07001361 LOG(INFO) << "Skipping set_byte_static as I don't know how to do that on " << kRuntimeISA;
1362 // Force-print to std::cout so it's also outside the logcat.
1363 std::cout << "Skipping set_byte_static as I don't know how to do that on " << kRuntimeISA << std::endl;
1364#endif
1365}
1366
1367
Fred Shih37f05ef2014-07-16 18:38:08 -07001368static void GetSetBooleanInstance(Handle<mirror::Object>* obj, Handle<mirror::ArtField>* f,
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001369 Thread* self, mirror::ArtMethod* referrer, StubTest* test)
Fred Shih37f05ef2014-07-16 18:38:08 -07001370 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1371#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001372 uint8_t values[] = { 0, true, 2, 128, 0xFF };
Fred Shih37f05ef2014-07-16 18:38:08 -07001373
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001374 for (size_t i = 0; i < arraysize(values); ++i) {
Fred Shih37f05ef2014-07-16 18:38:08 -07001375 test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
1376 reinterpret_cast<size_t>(obj->Get()),
1377 static_cast<size_t>(values[i]),
1378 StubTest::GetEntrypoint(self, kQuickSet8Instance),
1379 self,
1380 referrer);
1381
1382 uint8_t res = f->Get()->GetBoolean(obj->Get());
1383 EXPECT_EQ(values[i], res) << "Iteration " << i;
1384
1385 f->Get()->SetBoolean<false>(obj->Get(), res);
1386
1387 size_t res2 = test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
1388 reinterpret_cast<size_t>(obj->Get()),
1389 0U,
1390 StubTest::GetEntrypoint(self, kQuickGetBooleanInstance),
1391 self,
1392 referrer);
1393 EXPECT_EQ(res, static_cast<uint8_t>(res2));
1394 }
1395#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001396 UNUSED(obj, f, self, referrer, test);
Fred Shih37f05ef2014-07-16 18:38:08 -07001397 LOG(INFO) << "Skipping set_boolean_instance as I don't know how to do that on " << kRuntimeISA;
1398 // Force-print to std::cout so it's also outside the logcat.
1399 std::cout << "Skipping set_boolean_instance as I don't know how to do that on " << kRuntimeISA << std::endl;
1400#endif
1401}
1402static void GetSetByteInstance(Handle<mirror::Object>* obj, Handle<mirror::ArtField>* f,
1403 Thread* self, mirror::ArtMethod* referrer, StubTest* test)
1404 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1405#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001406 int8_t values[] = { -128, -64, 0, 64, 127 };
Fred Shih37f05ef2014-07-16 18:38:08 -07001407
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001408 for (size_t i = 0; i < arraysize(values); ++i) {
Fred Shih37f05ef2014-07-16 18:38:08 -07001409 test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
1410 reinterpret_cast<size_t>(obj->Get()),
1411 static_cast<size_t>(values[i]),
1412 StubTest::GetEntrypoint(self, kQuickSet8Instance),
1413 self,
1414 referrer);
1415
1416 int8_t res = f->Get()->GetByte(obj->Get());
1417 EXPECT_EQ(res, values[i]) << "Iteration " << i;
1418 f->Get()->SetByte<false>(obj->Get(), ++res);
1419
1420 size_t res2 = test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
1421 reinterpret_cast<size_t>(obj->Get()),
1422 0U,
1423 StubTest::GetEntrypoint(self, kQuickGetByteInstance),
1424 self,
1425 referrer);
1426 EXPECT_EQ(res, static_cast<int8_t>(res2));
1427 }
1428#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001429 UNUSED(obj, f, self, referrer, test);
Fred Shih37f05ef2014-07-16 18:38:08 -07001430 LOG(INFO) << "Skipping set_byte_instance as I don't know how to do that on " << kRuntimeISA;
1431 // Force-print to std::cout so it's also outside the logcat.
1432 std::cout << "Skipping set_byte_instance as I don't know how to do that on " << kRuntimeISA << std::endl;
1433#endif
1434}
1435
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001436static void GetSetCharStatic(Handle<mirror::ArtField>* f, Thread* self, mirror::ArtMethod* referrer,
1437 StubTest* test)
Fred Shih37f05ef2014-07-16 18:38:08 -07001438 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1439#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001440 uint16_t values[] = { 0, 1, 2, 255, 32768, 0xFFFF };
Fred Shih37f05ef2014-07-16 18:38:08 -07001441
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001442 for (size_t i = 0; i < arraysize(values); ++i) {
Fred Shih37f05ef2014-07-16 18:38:08 -07001443 test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
1444 static_cast<size_t>(values[i]),
1445 0U,
1446 StubTest::GetEntrypoint(self, kQuickSet16Static),
1447 self,
1448 referrer);
1449
1450 size_t res = test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
1451 0U, 0U,
1452 StubTest::GetEntrypoint(self, kQuickGetCharStatic),
1453 self,
1454 referrer);
1455
1456 EXPECT_EQ(values[i], static_cast<uint16_t>(res)) << "Iteration " << i;
1457 }
1458#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001459 UNUSED(f, self, referrer, test);
Fred Shih37f05ef2014-07-16 18:38:08 -07001460 LOG(INFO) << "Skipping set_char_static as I don't know how to do that on " << kRuntimeISA;
1461 // Force-print to std::cout so it's also outside the logcat.
1462 std::cout << "Skipping set_char_static as I don't know how to do that on " << kRuntimeISA << std::endl;
1463#endif
1464}
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001465static void GetSetShortStatic(Handle<mirror::ArtField>* f, Thread* self,
1466 mirror::ArtMethod* referrer, StubTest* test)
Fred Shih37f05ef2014-07-16 18:38:08 -07001467 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1468#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001469 int16_t values[] = { -0x7FFF, -32768, 0, 255, 32767, 0x7FFE };
Fred Shih37f05ef2014-07-16 18:38:08 -07001470
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001471 for (size_t i = 0; i < arraysize(values); ++i) {
Fred Shih37f05ef2014-07-16 18:38:08 -07001472 test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
1473 static_cast<size_t>(values[i]),
1474 0U,
1475 StubTest::GetEntrypoint(self, kQuickSet16Static),
1476 self,
1477 referrer);
1478
1479 size_t res = test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
1480 0U, 0U,
1481 StubTest::GetEntrypoint(self, kQuickGetShortStatic),
1482 self,
1483 referrer);
1484
1485 EXPECT_EQ(static_cast<int16_t>(res), values[i]) << "Iteration " << i;
1486 }
1487#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001488 UNUSED(f, self, referrer, test);
Fred Shih37f05ef2014-07-16 18:38:08 -07001489 LOG(INFO) << "Skipping set_short_static as I don't know how to do that on " << kRuntimeISA;
1490 // Force-print to std::cout so it's also outside the logcat.
1491 std::cout << "Skipping set_short_static as I don't know how to do that on " << kRuntimeISA << std::endl;
1492#endif
1493}
1494
Fred Shih37f05ef2014-07-16 18:38:08 -07001495static void GetSetCharInstance(Handle<mirror::Object>* obj, Handle<mirror::ArtField>* f,
1496 Thread* self, mirror::ArtMethod* referrer, StubTest* test)
1497 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1498#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001499 uint16_t values[] = { 0, 1, 2, 255, 32768, 0xFFFF };
Fred Shih37f05ef2014-07-16 18:38:08 -07001500
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001501 for (size_t i = 0; i < arraysize(values); ++i) {
Fred Shih37f05ef2014-07-16 18:38:08 -07001502 test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
1503 reinterpret_cast<size_t>(obj->Get()),
1504 static_cast<size_t>(values[i]),
1505 StubTest::GetEntrypoint(self, kQuickSet16Instance),
1506 self,
1507 referrer);
1508
1509 uint16_t res = f->Get()->GetChar(obj->Get());
1510 EXPECT_EQ(res, values[i]) << "Iteration " << i;
1511 f->Get()->SetChar<false>(obj->Get(), ++res);
1512
1513 size_t res2 = test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
1514 reinterpret_cast<size_t>(obj->Get()),
1515 0U,
1516 StubTest::GetEntrypoint(self, kQuickGetCharInstance),
1517 self,
1518 referrer);
1519 EXPECT_EQ(res, static_cast<uint16_t>(res2));
1520 }
1521#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001522 UNUSED(obj, f, self, referrer, test);
Fred Shih37f05ef2014-07-16 18:38:08 -07001523 LOG(INFO) << "Skipping set_char_instance as I don't know how to do that on " << kRuntimeISA;
1524 // Force-print to std::cout so it's also outside the logcat.
1525 std::cout << "Skipping set_char_instance as I don't know how to do that on " << kRuntimeISA << std::endl;
1526#endif
1527}
1528static void GetSetShortInstance(Handle<mirror::Object>* obj, Handle<mirror::ArtField>* f,
1529 Thread* self, mirror::ArtMethod* referrer, StubTest* test)
1530 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1531#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001532 int16_t values[] = { -0x7FFF, -32768, 0, 255, 32767, 0x7FFE };
Fred Shih37f05ef2014-07-16 18:38:08 -07001533
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001534 for (size_t i = 0; i < arraysize(values); ++i) {
Fred Shih37f05ef2014-07-16 18:38:08 -07001535 test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
1536 reinterpret_cast<size_t>(obj->Get()),
1537 static_cast<size_t>(values[i]),
1538 StubTest::GetEntrypoint(self, kQuickSet16Instance),
1539 self,
1540 referrer);
1541
1542 int16_t res = f->Get()->GetShort(obj->Get());
1543 EXPECT_EQ(res, values[i]) << "Iteration " << i;
1544 f->Get()->SetShort<false>(obj->Get(), ++res);
1545
1546 size_t res2 = test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
1547 reinterpret_cast<size_t>(obj->Get()),
1548 0U,
1549 StubTest::GetEntrypoint(self, kQuickGetShortInstance),
1550 self,
1551 referrer);
1552 EXPECT_EQ(res, static_cast<int16_t>(res2));
1553 }
1554#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001555 UNUSED(obj, f, self, referrer, test);
Fred Shih37f05ef2014-07-16 18:38:08 -07001556 LOG(INFO) << "Skipping set_short_instance as I don't know how to do that on " << kRuntimeISA;
1557 // Force-print to std::cout so it's also outside the logcat.
1558 std::cout << "Skipping set_short_instance as I don't know how to do that on " << kRuntimeISA << std::endl;
1559#endif
1560}
1561
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001562static void GetSet32Static(Handle<mirror::ArtField>* f, Thread* self, mirror::ArtMethod* referrer,
1563 StubTest* test)
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001564 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Ian Rogersc3ccc102014-06-25 11:52:14 -07001565#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001566 uint32_t values[] = { 0, 1, 2, 255, 32768, 1000000, 0xFFFFFFFF };
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001567
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001568 for (size_t i = 0; i < arraysize(values); ++i) {
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001569 test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
1570 static_cast<size_t>(values[i]),
1571 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001572 StubTest::GetEntrypoint(self, kQuickSet32Static),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001573 self,
1574 referrer);
1575
1576 size_t res = test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
1577 0U, 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001578 StubTest::GetEntrypoint(self, kQuickGet32Static),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001579 self,
1580 referrer);
1581
1582 EXPECT_EQ(res, values[i]) << "Iteration " << i;
1583 }
1584#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001585 UNUSED(f, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001586 LOG(INFO) << "Skipping set32static as I don't know how to do that on " << kRuntimeISA;
1587 // Force-print to std::cout so it's also outside the logcat.
1588 std::cout << "Skipping set32static as I don't know how to do that on " << kRuntimeISA << std::endl;
1589#endif
1590}
1591
1592
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001593static void GetSet32Instance(Handle<mirror::Object>* obj, Handle<mirror::ArtField>* f,
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001594 Thread* self, mirror::ArtMethod* referrer, StubTest* test)
1595 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Ian Rogersc3ccc102014-06-25 11:52:14 -07001596#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001597 uint32_t values[] = { 0, 1, 2, 255, 32768, 1000000, 0xFFFFFFFF };
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001598
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001599 for (size_t i = 0; i < arraysize(values); ++i) {
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001600 test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001601 reinterpret_cast<size_t>(obj->Get()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001602 static_cast<size_t>(values[i]),
Andreas Gampe29b38412014-08-13 00:15:43 -07001603 StubTest::GetEntrypoint(self, kQuickSet32Instance),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001604 self,
1605 referrer);
1606
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001607 int32_t res = f->Get()->GetInt(obj->Get());
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001608 EXPECT_EQ(res, static_cast<int32_t>(values[i])) << "Iteration " << i;
1609
1610 res++;
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001611 f->Get()->SetInt<false>(obj->Get(), res);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001612
1613 size_t res2 = test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001614 reinterpret_cast<size_t>(obj->Get()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001615 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001616 StubTest::GetEntrypoint(self, kQuickGet32Instance),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001617 self,
1618 referrer);
1619 EXPECT_EQ(res, static_cast<int32_t>(res2));
1620 }
1621#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001622 UNUSED(obj, f, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001623 LOG(INFO) << "Skipping set32instance as I don't know how to do that on " << kRuntimeISA;
1624 // Force-print to std::cout so it's also outside the logcat.
1625 std::cout << "Skipping set32instance as I don't know how to do that on " << kRuntimeISA << std::endl;
1626#endif
1627}
1628
1629
Ian Rogersc3ccc102014-06-25 11:52:14 -07001630#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001631
1632static void set_and_check_static(uint32_t f_idx, mirror::Object* val, Thread* self,
1633 mirror::ArtMethod* referrer, StubTest* test)
1634 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1635 test->Invoke3WithReferrer(static_cast<size_t>(f_idx),
1636 reinterpret_cast<size_t>(val),
1637 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001638 StubTest::GetEntrypoint(self, kQuickSetObjStatic),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001639 self,
1640 referrer);
1641
1642 size_t res = test->Invoke3WithReferrer(static_cast<size_t>(f_idx),
1643 0U, 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001644 StubTest::GetEntrypoint(self, kQuickGetObjStatic),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001645 self,
1646 referrer);
1647
1648 EXPECT_EQ(res, reinterpret_cast<size_t>(val)) << "Value " << val;
1649}
1650#endif
1651
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001652static void GetSetObjStatic(Handle<mirror::ArtField>* f, Thread* self, mirror::ArtMethod* referrer,
1653 StubTest* test)
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001654 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Ian Rogersc3ccc102014-06-25 11:52:14 -07001655#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001656 set_and_check_static((*f)->GetDexFieldIndex(), nullptr, self, referrer, test);
1657
1658 // Allocate a string object for simplicity.
1659 mirror::String* str = mirror::String::AllocFromModifiedUtf8(self, "Test");
1660 set_and_check_static((*f)->GetDexFieldIndex(), str, self, referrer, test);
1661
1662 set_and_check_static((*f)->GetDexFieldIndex(), nullptr, self, referrer, test);
1663#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001664 UNUSED(f, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001665 LOG(INFO) << "Skipping setObjstatic as I don't know how to do that on " << kRuntimeISA;
1666 // Force-print to std::cout so it's also outside the logcat.
1667 std::cout << "Skipping setObjstatic as I don't know how to do that on " << kRuntimeISA << std::endl;
1668#endif
1669}
1670
1671
Ian Rogersc3ccc102014-06-25 11:52:14 -07001672#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001673static void set_and_check_instance(Handle<mirror::ArtField>* f, mirror::Object* trg,
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001674 mirror::Object* val, Thread* self, mirror::ArtMethod* referrer,
1675 StubTest* test)
1676 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1677 test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
1678 reinterpret_cast<size_t>(trg),
1679 reinterpret_cast<size_t>(val),
Andreas Gampe29b38412014-08-13 00:15:43 -07001680 StubTest::GetEntrypoint(self, kQuickSetObjInstance),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001681 self,
1682 referrer);
1683
1684 size_t res = test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
1685 reinterpret_cast<size_t>(trg),
1686 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001687 StubTest::GetEntrypoint(self, kQuickGetObjInstance),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001688 self,
1689 referrer);
1690
1691 EXPECT_EQ(res, reinterpret_cast<size_t>(val)) << "Value " << val;
1692
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001693 EXPECT_EQ(val, f->Get()->GetObj(trg));
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001694}
1695#endif
1696
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001697static void GetSetObjInstance(Handle<mirror::Object>* obj, Handle<mirror::ArtField>* f,
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001698 Thread* self, mirror::ArtMethod* referrer, StubTest* test)
1699 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Ian Rogersc3ccc102014-06-25 11:52:14 -07001700#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001701 set_and_check_instance(f, obj->Get(), nullptr, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001702
1703 // Allocate a string object for simplicity.
1704 mirror::String* str = mirror::String::AllocFromModifiedUtf8(self, "Test");
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001705 set_and_check_instance(f, obj->Get(), str, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001706
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001707 set_and_check_instance(f, obj->Get(), nullptr, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001708#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001709 UNUSED(obj, f, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001710 LOG(INFO) << "Skipping setObjinstance as I don't know how to do that on " << kRuntimeISA;
1711 // Force-print to std::cout so it's also outside the logcat.
1712 std::cout << "Skipping setObjinstance as I don't know how to do that on " << kRuntimeISA << std::endl;
1713#endif
1714}
1715
1716
1717// TODO: Complete these tests for 32b architectures.
1718
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001719static void GetSet64Static(Handle<mirror::ArtField>* f, Thread* self, mirror::ArtMethod* referrer,
1720 StubTest* test)
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001721 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Ian Rogersc3ccc102014-06-25 11:52:14 -07001722#if (defined(__x86_64__) && !defined(__APPLE__)) || defined(__aarch64__)
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001723 uint64_t values[] = { 0, 1, 2, 255, 32768, 1000000, 0xFFFFFFFF, 0xFFFFFFFFFFFF };
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001724
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001725 for (size_t i = 0; i < arraysize(values); ++i) {
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001726 test->Invoke3UWithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
1727 values[i],
Andreas Gampe29b38412014-08-13 00:15:43 -07001728 StubTest::GetEntrypoint(self, kQuickSet64Static),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001729 self,
1730 referrer);
1731
1732 size_t res = test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
1733 0U, 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001734 StubTest::GetEntrypoint(self, kQuickGet64Static),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001735 self,
1736 referrer);
1737
1738 EXPECT_EQ(res, values[i]) << "Iteration " << i;
1739 }
1740#else
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001741 UNUSED(f, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001742 LOG(INFO) << "Skipping set64static as I don't know how to do that on " << kRuntimeISA;
1743 // Force-print to std::cout so it's also outside the logcat.
1744 std::cout << "Skipping set64static as I don't know how to do that on " << kRuntimeISA << std::endl;
1745#endif
1746}
1747
1748
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001749static void GetSet64Instance(Handle<mirror::Object>* obj, Handle<mirror::ArtField>* f,
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001750 Thread* self, mirror::ArtMethod* referrer, StubTest* test)
1751 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Ian Rogersc3ccc102014-06-25 11:52:14 -07001752#if (defined(__x86_64__) && !defined(__APPLE__)) || defined(__aarch64__)
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001753 uint64_t values[] = { 0, 1, 2, 255, 32768, 1000000, 0xFFFFFFFF, 0xFFFFFFFFFFFF };
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001754
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001755 for (size_t i = 0; i < arraysize(values); ++i) {
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001756 test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001757 reinterpret_cast<size_t>(obj->Get()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001758 static_cast<size_t>(values[i]),
Andreas Gampe29b38412014-08-13 00:15:43 -07001759 StubTest::GetEntrypoint(self, kQuickSet64Instance),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001760 self,
1761 referrer);
1762
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001763 int64_t res = f->Get()->GetLong(obj->Get());
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001764 EXPECT_EQ(res, static_cast<int64_t>(values[i])) << "Iteration " << i;
1765
1766 res++;
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001767 f->Get()->SetLong<false>(obj->Get(), res);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001768
1769 size_t res2 = test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001770 reinterpret_cast<size_t>(obj->Get()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001771 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001772 StubTest::GetEntrypoint(self, kQuickGet64Instance),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001773 self,
1774 referrer);
1775 EXPECT_EQ(res, static_cast<int64_t>(res2));
1776 }
1777#else
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001778 UNUSED(obj, f, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001779 LOG(INFO) << "Skipping set64instance as I don't know how to do that on " << kRuntimeISA;
1780 // Force-print to std::cout so it's also outside the logcat.
1781 std::cout << "Skipping set64instance as I don't know how to do that on " << kRuntimeISA << std::endl;
1782#endif
1783}
1784
1785static void TestFields(Thread* self, StubTest* test, Primitive::Type test_type) {
1786 // garbage is created during ClassLinker::Init
1787
1788 JNIEnv* env = Thread::Current()->GetJniEnv();
1789 jclass jc = env->FindClass("AllFields");
1790 CHECK(jc != NULL);
1791 jobject o = env->AllocObject(jc);
1792 CHECK(o != NULL);
1793
1794 ScopedObjectAccess soa(self);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001795 StackHandleScope<5> hs(self);
1796 Handle<mirror::Object> obj(hs.NewHandle(soa.Decode<mirror::Object*>(o)));
1797 Handle<mirror::Class> c(hs.NewHandle(obj->GetClass()));
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001798 // Need a method as a referrer
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001799 Handle<mirror::ArtMethod> m(hs.NewHandle(c->GetDirectMethod(0)));
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001800
1801 // Play with it...
1802
1803 // Static fields.
1804 {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001805 Handle<mirror::ObjectArray<mirror::ArtField>> fields(hs.NewHandle(c.Get()->GetSFields()));
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001806 int32_t num_fields = fields->GetLength();
1807 for (int32_t i = 0; i < num_fields; ++i) {
Andreas Gampe277ccbd2014-11-03 21:36:10 -08001808 StackHandleScope<1> hs2(self);
1809 Handle<mirror::ArtField> f(hs2.NewHandle(fields->Get(i)));
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001810
Mathieu Chartier61c5ebc2014-06-05 17:42:53 -07001811 Primitive::Type type = f->GetTypeAsPrimitiveType();
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001812 switch (type) {
Fred Shih37f05ef2014-07-16 18:38:08 -07001813 case Primitive::Type::kPrimBoolean:
1814 if (test_type == type) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001815 GetSetBooleanStatic(&f, self, m.Get(), test);
Fred Shih37f05ef2014-07-16 18:38:08 -07001816 }
1817 break;
1818 case Primitive::Type::kPrimByte:
1819 if (test_type == type) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001820 GetSetByteStatic(&f, self, m.Get(), test);
Fred Shih37f05ef2014-07-16 18:38:08 -07001821 }
1822 break;
1823 case Primitive::Type::kPrimChar:
1824 if (test_type == type) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001825 GetSetCharStatic(&f, self, m.Get(), test);
Fred Shih37f05ef2014-07-16 18:38:08 -07001826 }
1827 break;
1828 case Primitive::Type::kPrimShort:
1829 if (test_type == type) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001830 GetSetShortStatic(&f, self, m.Get(), test);
Fred Shih37f05ef2014-07-16 18:38:08 -07001831 }
1832 break;
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001833 case Primitive::Type::kPrimInt:
1834 if (test_type == type) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001835 GetSet32Static(&f, self, m.Get(), test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001836 }
1837 break;
1838
1839 case Primitive::Type::kPrimLong:
1840 if (test_type == type) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001841 GetSet64Static(&f, self, m.Get(), test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001842 }
1843 break;
1844
1845 case Primitive::Type::kPrimNot:
1846 // Don't try array.
Mathieu Chartier61c5ebc2014-06-05 17:42:53 -07001847 if (test_type == type && f->GetTypeDescriptor()[0] != '[') {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001848 GetSetObjStatic(&f, self, m.Get(), test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001849 }
1850 break;
1851
1852 default:
1853 break; // Skip.
1854 }
1855 }
1856 }
1857
1858 // Instance fields.
1859 {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001860 Handle<mirror::ObjectArray<mirror::ArtField>> fields(hs.NewHandle(c.Get()->GetIFields()));
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001861 int32_t num_fields = fields->GetLength();
1862 for (int32_t i = 0; i < num_fields; ++i) {
Andreas Gampe277ccbd2014-11-03 21:36:10 -08001863 StackHandleScope<1> hs2(self);
1864 Handle<mirror::ArtField> f(hs2.NewHandle(fields->Get(i)));
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001865
Mathieu Chartier61c5ebc2014-06-05 17:42:53 -07001866 Primitive::Type type = f->GetTypeAsPrimitiveType();
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001867 switch (type) {
Fred Shih37f05ef2014-07-16 18:38:08 -07001868 case Primitive::Type::kPrimBoolean:
1869 if (test_type == type) {
1870 GetSetBooleanInstance(&obj, &f, self, m.Get(), test);
1871 }
1872 break;
1873 case Primitive::Type::kPrimByte:
1874 if (test_type == type) {
1875 GetSetByteInstance(&obj, &f, self, m.Get(), test);
1876 }
1877 break;
1878 case Primitive::Type::kPrimChar:
1879 if (test_type == type) {
1880 GetSetCharInstance(&obj, &f, self, m.Get(), test);
1881 }
1882 break;
1883 case Primitive::Type::kPrimShort:
1884 if (test_type == type) {
1885 GetSetShortInstance(&obj, &f, self, m.Get(), test);
1886 }
1887 break;
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001888 case Primitive::Type::kPrimInt:
1889 if (test_type == type) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001890 GetSet32Instance(&obj, &f, self, m.Get(), test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001891 }
1892 break;
1893
1894 case Primitive::Type::kPrimLong:
1895 if (test_type == type) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001896 GetSet64Instance(&obj, &f, self, m.Get(), test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001897 }
1898 break;
1899
1900 case Primitive::Type::kPrimNot:
1901 // Don't try array.
Mathieu Chartier61c5ebc2014-06-05 17:42:53 -07001902 if (test_type == type && f->GetTypeDescriptor()[0] != '[') {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001903 GetSetObjInstance(&obj, &f, self, m.Get(), test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001904 }
1905 break;
1906
1907 default:
1908 break; // Skip.
1909 }
1910 }
1911 }
1912
1913 // TODO: Deallocate things.
1914}
1915
Fred Shih37f05ef2014-07-16 18:38:08 -07001916TEST_F(StubTest, Fields8) {
1917 TEST_DISABLED_FOR_HEAP_REFERENCE_POISONING();
1918
1919 Thread* self = Thread::Current();
1920
1921 self->TransitionFromSuspendedToRunnable();
1922 LoadDex("AllFields");
1923 bool started = runtime_->Start();
1924 CHECK(started);
1925
1926 TestFields(self, this, Primitive::Type::kPrimBoolean);
1927 TestFields(self, this, Primitive::Type::kPrimByte);
1928}
1929
1930TEST_F(StubTest, Fields16) {
1931 TEST_DISABLED_FOR_HEAP_REFERENCE_POISONING();
1932
1933 Thread* self = Thread::Current();
1934
1935 self->TransitionFromSuspendedToRunnable();
1936 LoadDex("AllFields");
1937 bool started = runtime_->Start();
1938 CHECK(started);
1939
1940 TestFields(self, this, Primitive::Type::kPrimChar);
1941 TestFields(self, this, Primitive::Type::kPrimShort);
1942}
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001943
1944TEST_F(StubTest, Fields32) {
1945 TEST_DISABLED_FOR_HEAP_REFERENCE_POISONING();
1946
1947 Thread* self = Thread::Current();
1948
1949 self->TransitionFromSuspendedToRunnable();
1950 LoadDex("AllFields");
1951 bool started = runtime_->Start();
1952 CHECK(started);
1953
1954 TestFields(self, this, Primitive::Type::kPrimInt);
1955}
1956
1957TEST_F(StubTest, FieldsObj) {
1958 TEST_DISABLED_FOR_HEAP_REFERENCE_POISONING();
1959
1960 Thread* self = Thread::Current();
1961
1962 self->TransitionFromSuspendedToRunnable();
1963 LoadDex("AllFields");
1964 bool started = runtime_->Start();
1965 CHECK(started);
1966
1967 TestFields(self, this, Primitive::Type::kPrimNot);
1968}
1969
1970TEST_F(StubTest, Fields64) {
1971 TEST_DISABLED_FOR_HEAP_REFERENCE_POISONING();
1972
1973 Thread* self = Thread::Current();
1974
1975 self->TransitionFromSuspendedToRunnable();
1976 LoadDex("AllFields");
1977 bool started = runtime_->Start();
1978 CHECK(started);
1979
1980 TestFields(self, this, Primitive::Type::kPrimLong);
1981}
1982
Andreas Gampe51f76352014-05-21 08:28:48 -07001983TEST_F(StubTest, IMT) {
Ian Rogersc3ccc102014-06-25 11:52:14 -07001984#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe51f76352014-05-21 08:28:48 -07001985 TEST_DISABLED_FOR_HEAP_REFERENCE_POISONING();
1986
1987 Thread* self = Thread::Current();
1988
1989 ScopedObjectAccess soa(self);
1990 StackHandleScope<7> hs(self);
1991
1992 JNIEnv* env = Thread::Current()->GetJniEnv();
1993
1994 // ArrayList
1995
1996 // Load ArrayList and used methods (JNI).
1997 jclass arraylist_jclass = env->FindClass("java/util/ArrayList");
1998 ASSERT_NE(nullptr, arraylist_jclass);
1999 jmethodID arraylist_constructor = env->GetMethodID(arraylist_jclass, "<init>", "()V");
2000 ASSERT_NE(nullptr, arraylist_constructor);
2001 jmethodID contains_jmethod = env->GetMethodID(arraylist_jclass, "contains", "(Ljava/lang/Object;)Z");
2002 ASSERT_NE(nullptr, contains_jmethod);
2003 jmethodID add_jmethod = env->GetMethodID(arraylist_jclass, "add", "(Ljava/lang/Object;)Z");
2004 ASSERT_NE(nullptr, add_jmethod);
2005
2006 // Get mirror representation.
2007 Handle<mirror::ArtMethod> contains_amethod(hs.NewHandle(soa.DecodeMethod(contains_jmethod)));
2008
2009 // Patch up ArrayList.contains.
2010 if (contains_amethod.Get()->GetEntryPointFromQuickCompiledCode() == nullptr) {
2011 contains_amethod.Get()->SetEntryPointFromQuickCompiledCode(reinterpret_cast<void*>(
Andreas Gampe29b38412014-08-13 00:15:43 -07002012 StubTest::GetEntrypoint(self, kQuickQuickToInterpreterBridge)));
Andreas Gampe51f76352014-05-21 08:28:48 -07002013 }
2014
2015 // List
2016
2017 // Load List and used methods (JNI).
2018 jclass list_jclass = env->FindClass("java/util/List");
2019 ASSERT_NE(nullptr, list_jclass);
2020 jmethodID inf_contains_jmethod = env->GetMethodID(list_jclass, "contains", "(Ljava/lang/Object;)Z");
2021 ASSERT_NE(nullptr, inf_contains_jmethod);
2022
2023 // Get mirror representation.
2024 Handle<mirror::ArtMethod> inf_contains(hs.NewHandle(soa.DecodeMethod(inf_contains_jmethod)));
2025
2026 // Object
2027
2028 jclass obj_jclass = env->FindClass("java/lang/Object");
2029 ASSERT_NE(nullptr, obj_jclass);
2030 jmethodID obj_constructor = env->GetMethodID(obj_jclass, "<init>", "()V");
2031 ASSERT_NE(nullptr, obj_constructor);
2032
Andreas Gampe51f76352014-05-21 08:28:48 -07002033 // Create instances.
2034
2035 jobject jarray_list = env->NewObject(arraylist_jclass, arraylist_constructor);
2036 ASSERT_NE(nullptr, jarray_list);
2037 Handle<mirror::Object> array_list(hs.NewHandle(soa.Decode<mirror::Object*>(jarray_list)));
2038
2039 jobject jobj = env->NewObject(obj_jclass, obj_constructor);
2040 ASSERT_NE(nullptr, jobj);
2041 Handle<mirror::Object> obj(hs.NewHandle(soa.Decode<mirror::Object*>(jobj)));
2042
Andreas Gampe1a7e2922014-05-21 15:37:53 -07002043 // Invocation tests.
2044
2045 // 1. imt_conflict
2046
2047 // Contains.
Andreas Gampe51f76352014-05-21 08:28:48 -07002048
2049 size_t result =
2050 Invoke3WithReferrerAndHidden(0U, reinterpret_cast<size_t>(array_list.Get()),
2051 reinterpret_cast<size_t>(obj.Get()),
Andreas Gampe29b38412014-08-13 00:15:43 -07002052 StubTest::GetEntrypoint(self, kQuickQuickImtConflictTrampoline),
Andreas Gampe51f76352014-05-21 08:28:48 -07002053 self, contains_amethod.Get(),
2054 static_cast<size_t>(inf_contains.Get()->GetDexMethodIndex()));
2055
2056 ASSERT_FALSE(self->IsExceptionPending());
2057 EXPECT_EQ(static_cast<size_t>(JNI_FALSE), result);
2058
2059 // Add object.
2060
2061 env->CallBooleanMethod(jarray_list, add_jmethod, jobj);
2062
Nicolas Geoffray14691c52015-03-05 10:40:17 +00002063 ASSERT_FALSE(self->IsExceptionPending()) << PrettyTypeOf(self->GetException());
Andreas Gampe51f76352014-05-21 08:28:48 -07002064
Andreas Gampe1a7e2922014-05-21 15:37:53 -07002065 // Contains.
Andreas Gampe51f76352014-05-21 08:28:48 -07002066
2067 result = Invoke3WithReferrerAndHidden(0U, reinterpret_cast<size_t>(array_list.Get()),
2068 reinterpret_cast<size_t>(obj.Get()),
Andreas Gampe29b38412014-08-13 00:15:43 -07002069 StubTest::GetEntrypoint(self, kQuickQuickImtConflictTrampoline),
Andreas Gampe51f76352014-05-21 08:28:48 -07002070 self, contains_amethod.Get(),
2071 static_cast<size_t>(inf_contains.Get()->GetDexMethodIndex()));
2072
2073 ASSERT_FALSE(self->IsExceptionPending());
2074 EXPECT_EQ(static_cast<size_t>(JNI_TRUE), result);
Andreas Gampe1a7e2922014-05-21 15:37:53 -07002075
2076 // 2. regular interface trampoline
2077
2078 result = Invoke3WithReferrer(static_cast<size_t>(inf_contains.Get()->GetDexMethodIndex()),
2079 reinterpret_cast<size_t>(array_list.Get()),
2080 reinterpret_cast<size_t>(obj.Get()),
2081 StubTest::GetEntrypoint(self,
2082 kQuickInvokeInterfaceTrampolineWithAccessCheck),
2083 self, contains_amethod.Get());
2084
2085 ASSERT_FALSE(self->IsExceptionPending());
2086 EXPECT_EQ(static_cast<size_t>(JNI_TRUE), result);
2087
2088 result = Invoke3WithReferrer(static_cast<size_t>(inf_contains.Get()->GetDexMethodIndex()),
2089 reinterpret_cast<size_t>(array_list.Get()),
2090 reinterpret_cast<size_t>(array_list.Get()),
2091 StubTest::GetEntrypoint(self,
2092 kQuickInvokeInterfaceTrampolineWithAccessCheck),
2093 self, contains_amethod.Get());
2094
2095 ASSERT_FALSE(self->IsExceptionPending());
2096 EXPECT_EQ(static_cast<size_t>(JNI_FALSE), result);
Andreas Gampe51f76352014-05-21 08:28:48 -07002097#else
Andreas Gampe6aac3552014-06-09 14:55:53 -07002098 LOG(INFO) << "Skipping imt as I don't know how to do that on " << kRuntimeISA;
Andreas Gampe51f76352014-05-21 08:28:48 -07002099 // Force-print to std::cout so it's also outside the logcat.
Andreas Gampe6aac3552014-06-09 14:55:53 -07002100 std::cout << "Skipping imt as I don't know how to do that on " << kRuntimeISA << std::endl;
2101#endif
2102}
2103
Andreas Gampe6aac3552014-06-09 14:55:53 -07002104TEST_F(StubTest, StringIndexOf) {
2105#if defined(__arm__) || defined(__aarch64__)
Hiroshi Yamauchi52fa8142014-06-16 12:59:49 -07002106 TEST_DISABLED_FOR_HEAP_REFERENCE_POISONING();
2107
Andreas Gampe6aac3552014-06-09 14:55:53 -07002108 Thread* self = Thread::Current();
2109 ScopedObjectAccess soa(self);
2110 // garbage is created during ClassLinker::Init
2111
2112 // Create some strings
2113 // Use array so we can index into it and use a matrix for expected results
2114 // Setup: The first half is standard. The second half uses a non-zero offset.
2115 // TODO: Shared backing arrays.
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07002116 const char* c_str[] = { "", "a", "ba", "cba", "dcba", "edcba", "asdfghjkl" };
2117 static constexpr size_t kStringCount = arraysize(c_str);
2118 const char c_char[] = { 'a', 'b', 'c', 'd', 'e' };
2119 static constexpr size_t kCharCount = arraysize(c_char);
Andreas Gampe6aac3552014-06-09 14:55:53 -07002120
2121 StackHandleScope<kStringCount> hs(self);
2122 Handle<mirror::String> s[kStringCount];
2123
2124 for (size_t i = 0; i < kStringCount; ++i) {
2125 s[i] = hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), c_str[i]));
2126 }
2127
2128 // Matrix of expectations. First component is first parameter. Note we only check against the
2129 // sign, not the value. As we are testing random offsets, we need to compute this and need to
2130 // rely on String::CompareTo being correct.
2131 static constexpr size_t kMaxLen = 9;
2132 DCHECK_LE(strlen(c_str[kStringCount-1]), kMaxLen) << "Please fix the indexof test.";
2133
2134 // Last dimension: start, offset by 1.
2135 int32_t expected[kStringCount][kCharCount][kMaxLen + 3];
2136 for (size_t x = 0; x < kStringCount; ++x) {
2137 for (size_t y = 0; y < kCharCount; ++y) {
2138 for (size_t z = 0; z <= kMaxLen + 2; ++z) {
2139 expected[x][y][z] = s[x]->FastIndexOf(c_char[y], static_cast<int32_t>(z) - 1);
2140 }
2141 }
2142 }
2143
2144 // Play with it...
2145
2146 for (size_t x = 0; x < kStringCount; ++x) {
2147 for (size_t y = 0; y < kCharCount; ++y) {
2148 for (size_t z = 0; z <= kMaxLen + 2; ++z) {
2149 int32_t start = static_cast<int32_t>(z) - 1;
2150
2151 // Test string_compareto x y
2152 size_t result = Invoke3(reinterpret_cast<size_t>(s[x].Get()), c_char[y], start,
Andreas Gampe29b38412014-08-13 00:15:43 -07002153 StubTest::GetEntrypoint(self, kQuickIndexOf), self);
Andreas Gampe6aac3552014-06-09 14:55:53 -07002154
2155 EXPECT_FALSE(self->IsExceptionPending());
2156
2157 // The result is a 32b signed integer
2158 union {
2159 size_t r;
2160 int32_t i;
2161 } conv;
2162 conv.r = result;
2163
2164 EXPECT_EQ(expected[x][y][z], conv.i) << "Wrong result for " << c_str[x] << " / " <<
2165 c_char[y] << " @ " << start;
2166 }
2167 }
2168 }
2169
2170 // TODO: Deallocate things.
2171
2172 // Tests done.
2173#else
2174 LOG(INFO) << "Skipping indexof as I don't know how to do that on " << kRuntimeISA;
2175 // Force-print to std::cout so it's also outside the logcat.
2176 std::cout << "Skipping indexof as I don't know how to do that on " << kRuntimeISA << std::endl;
Andreas Gampe51f76352014-05-21 08:28:48 -07002177#endif
2178}
2179
Andreas Gampe525cde22014-04-22 15:44:50 -07002180} // namespace art