blob: 285007c48d2dab9d4d9606949d612ad09e3124ae [file] [log] [blame]
Andreas Gampe525cde22014-04-22 15:44:50 -07001/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
Ian Rogerse63db272014-07-15 15:36:11 -070017#include <cstdio>
18
Andreas Gampe525cde22014-04-22 15:44:50 -070019#include "common_runtime_test.h"
Andreas Gampe29b38412014-08-13 00:15:43 -070020#include "entrypoints/quick/quick_entrypoints_enum.h"
Andreas Gampe6e4e59c2014-05-05 20:11:02 -070021#include "mirror/art_field-inl.h"
Andreas Gampe51f76352014-05-21 08:28:48 -070022#include "mirror/art_method-inl.h"
23#include "mirror/class-inl.h"
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -070024#include "mirror/string-inl.h"
Ian Rogerse63db272014-07-15 15:36:11 -070025#include "scoped_thread_state_change.h"
Andreas Gampe525cde22014-04-22 15:44:50 -070026
27namespace art {
28
29
30class StubTest : public CommonRuntimeTest {
31 protected:
32 // We need callee-save methods set up in the Runtime for exceptions.
33 void SetUp() OVERRIDE {
34 // Do the normal setup.
35 CommonRuntimeTest::SetUp();
36
37 {
38 // Create callee-save methods
39 ScopedObjectAccess soa(Thread::Current());
Vladimir Marko7624d252014-05-02 14:40:15 +010040 runtime_->SetInstructionSet(kRuntimeISA);
Andreas Gampe525cde22014-04-22 15:44:50 -070041 for (int i = 0; i < Runtime::kLastCalleeSaveType; i++) {
42 Runtime::CalleeSaveType type = Runtime::CalleeSaveType(i);
43 if (!runtime_->HasCalleeSaveMethod(type)) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -070044 runtime_->SetCalleeSaveMethod(runtime_->CreateCalleeSaveMethod(), type);
Andreas Gampe525cde22014-04-22 15:44:50 -070045 }
46 }
47 }
48 }
49
Ian Rogerse63db272014-07-15 15:36:11 -070050 void SetUpRuntimeOptions(RuntimeOptions *options) OVERRIDE {
Andreas Gampe00c1e6d2014-04-25 15:47:13 -070051 // Use a smaller heap
52 for (std::pair<std::string, const void*>& pair : *options) {
53 if (pair.first.find("-Xmx") == 0) {
54 pair.first = "-Xmx4M"; // Smallest we can go.
55 }
56 }
Andreas Gampe51f76352014-05-21 08:28:48 -070057 options->push_back(std::make_pair("-Xint", nullptr));
Andreas Gampe00c1e6d2014-04-25 15:47:13 -070058 }
Andreas Gampe525cde22014-04-22 15:44:50 -070059
Mathieu Chartier119c6bd2014-05-09 14:11:47 -070060 // Helper function needed since TEST_F makes a new class.
61 Thread::tls_ptr_sized_values* GetTlsPtr(Thread* self) {
62 return &self->tlsPtr_;
63 }
64
Andreas Gampe4fc046e2014-05-06 16:56:39 -070065 public:
Andreas Gampe525cde22014-04-22 15:44:50 -070066 size_t Invoke3(size_t arg0, size_t arg1, size_t arg2, uintptr_t code, Thread* self) {
Andreas Gampe6cf80102014-05-19 11:32:41 -070067 return Invoke3WithReferrer(arg0, arg1, arg2, code, self, nullptr);
Andreas Gampe525cde22014-04-22 15:44:50 -070068 }
Andreas Gampe6e4e59c2014-05-05 20:11:02 -070069
Andreas Gampe6e4e59c2014-05-05 20:11:02 -070070 // TODO: Set up a frame according to referrer's specs.
71 size_t Invoke3WithReferrer(size_t arg0, size_t arg1, size_t arg2, uintptr_t code, Thread* self,
72 mirror::ArtMethod* referrer) {
73 // Push a transition back into managed code onto the linked list in thread.
74 ManagedStack fragment;
75 self->PushManagedStackFragment(&fragment);
76
77 size_t result;
Andreas Gampe6cf80102014-05-19 11:32:41 -070078 size_t fpr_result = 0;
Andreas Gampe6e4e59c2014-05-05 20:11:02 -070079#if defined(__i386__)
80 // TODO: Set the thread?
81 __asm__ __volatile__(
Ian Rogersc5f17732014-06-05 20:48:42 -070082 "subl $12, %%esp\n\t" // Align stack.
83 "pushl %[referrer]\n\t" // Store referrer.
Andreas Gampe6e4e59c2014-05-05 20:11:02 -070084 "call *%%edi\n\t" // Call the stub
Ian Rogersc5f17732014-06-05 20:48:42 -070085 "addl $16, %%esp" // Pop referrer
Andreas Gampe6e4e59c2014-05-05 20:11:02 -070086 : "=a" (result)
87 // Use the result from eax
Andreas Gampe2f6e3512014-06-07 01:32:33 -070088 : "a"(arg0), "c"(arg1), "d"(arg2), "D"(code), [referrer]"r"(referrer)
89 // This places code into edi, arg0 into eax, arg1 into ecx, and arg2 into edx
90 : "memory"); // clobber.
Andreas Gampe6e4e59c2014-05-05 20:11:02 -070091 // TODO: Should we clobber the other registers? EBX gets clobbered by some of the stubs,
92 // but compilation fails when declaring that.
93#elif defined(__arm__)
94 __asm__ __volatile__(
95 "push {r1-r12, lr}\n\t" // Save state, 13*4B = 52B
96 ".cfi_adjust_cfa_offset 52\n\t"
97 "push {r9}\n\t"
98 ".cfi_adjust_cfa_offset 4\n\t"
99 "mov r9, %[referrer]\n\n"
100 "str r9, [sp, #-8]!\n\t" // Push referrer, +8B padding so 16B aligned
101 ".cfi_adjust_cfa_offset 8\n\t"
102 "ldr r9, [sp, #8]\n\t"
103
104 // Push everything on the stack, so we don't rely on the order. What a mess. :-(
105 "sub sp, sp, #20\n\t"
106 "str %[arg0], [sp]\n\t"
107 "str %[arg1], [sp, #4]\n\t"
108 "str %[arg2], [sp, #8]\n\t"
109 "str %[code], [sp, #12]\n\t"
110 "str %[self], [sp, #16]\n\t"
111 "ldr r0, [sp]\n\t"
112 "ldr r1, [sp, #4]\n\t"
113 "ldr r2, [sp, #8]\n\t"
114 "ldr r3, [sp, #12]\n\t"
115 "ldr r9, [sp, #16]\n\t"
116 "add sp, sp, #20\n\t"
117
118 "blx r3\n\t" // Call the stub
119 "add sp, sp, #12\n\t" // Pop nullptr and padding
120 ".cfi_adjust_cfa_offset -12\n\t"
121 "pop {r1-r12, lr}\n\t" // Restore state
122 ".cfi_adjust_cfa_offset -52\n\t"
123 "mov %[result], r0\n\t" // Save the result
124 : [result] "=r" (result)
125 // Use the result from r0
126 : [arg0] "r"(arg0), [arg1] "r"(arg1), [arg2] "r"(arg2), [code] "r"(code), [self] "r"(self),
127 [referrer] "r"(referrer)
Andreas Gampe2f6e3512014-06-07 01:32:33 -0700128 : "memory"); // clobber.
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700129#elif defined(__aarch64__)
130 __asm__ __volatile__(
Andreas Gampef39b3782014-06-03 14:38:30 -0700131 // Spill x0-x7 which we say we don't clobber. May contain args.
Andreas Gampe6cf80102014-05-19 11:32:41 -0700132 "sub sp, sp, #64\n\t"
133 ".cfi_adjust_cfa_offset 64\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700134 "stp x0, x1, [sp]\n\t"
135 "stp x2, x3, [sp, #16]\n\t"
136 "stp x4, x5, [sp, #32]\n\t"
137 "stp x6, x7, [sp, #48]\n\t"
Andreas Gampe6cf80102014-05-19 11:32:41 -0700138
Andreas Gampef39b3782014-06-03 14:38:30 -0700139 "sub sp, sp, #16\n\t" // Reserve stack space, 16B aligned
140 ".cfi_adjust_cfa_offset 16\n\t"
141 "str %[referrer], [sp]\n\t" // referrer
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700142
143 // Push everything on the stack, so we don't rely on the order. What a mess. :-(
144 "sub sp, sp, #48\n\t"
Andreas Gampe6cf80102014-05-19 11:32:41 -0700145 ".cfi_adjust_cfa_offset 48\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700146 // All things are "r" constraints, so direct str/stp should work.
147 "stp %[arg0], %[arg1], [sp]\n\t"
148 "stp %[arg2], %[code], [sp, #16]\n\t"
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700149 "str %[self], [sp, #32]\n\t"
Andreas Gampe6cf80102014-05-19 11:32:41 -0700150
151 // Now we definitely have x0-x3 free, use it to garble d8 - d15
152 "movk x0, #0xfad0\n\t"
153 "movk x0, #0xebad, lsl #16\n\t"
154 "movk x0, #0xfad0, lsl #32\n\t"
155 "movk x0, #0xebad, lsl #48\n\t"
156 "fmov d8, x0\n\t"
157 "add x0, x0, 1\n\t"
158 "fmov d9, x0\n\t"
159 "add x0, x0, 1\n\t"
160 "fmov d10, x0\n\t"
161 "add x0, x0, 1\n\t"
162 "fmov d11, x0\n\t"
163 "add x0, x0, 1\n\t"
164 "fmov d12, x0\n\t"
165 "add x0, x0, 1\n\t"
166 "fmov d13, x0\n\t"
167 "add x0, x0, 1\n\t"
168 "fmov d14, x0\n\t"
169 "add x0, x0, 1\n\t"
170 "fmov d15, x0\n\t"
171
Andreas Gampef39b3782014-06-03 14:38:30 -0700172 // Load call params into the right registers.
173 "ldp x0, x1, [sp]\n\t"
174 "ldp x2, x3, [sp, #16]\n\t"
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700175 "ldr x18, [sp, #32]\n\t"
176 "add sp, sp, #48\n\t"
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700177 ".cfi_adjust_cfa_offset -48\n\t"
178
Andreas Gampe6cf80102014-05-19 11:32:41 -0700179
180 "blr x3\n\t" // Call the stub
Andreas Gampef39b3782014-06-03 14:38:30 -0700181 "mov x8, x0\n\t" // Store result
182 "add sp, sp, #16\n\t" // Drop the quick "frame"
183 ".cfi_adjust_cfa_offset -16\n\t"
Andreas Gampe6cf80102014-05-19 11:32:41 -0700184
185 // Test d8 - d15. We can use x1 and x2.
186 "movk x1, #0xfad0\n\t"
187 "movk x1, #0xebad, lsl #16\n\t"
188 "movk x1, #0xfad0, lsl #32\n\t"
189 "movk x1, #0xebad, lsl #48\n\t"
190 "fmov x2, d8\n\t"
191 "cmp x1, x2\n\t"
192 "b.ne 1f\n\t"
193 "add x1, x1, 1\n\t"
194
195 "fmov x2, d9\n\t"
196 "cmp x1, x2\n\t"
197 "b.ne 1f\n\t"
198 "add x1, x1, 1\n\t"
199
200 "fmov x2, d10\n\t"
201 "cmp x1, x2\n\t"
202 "b.ne 1f\n\t"
203 "add x1, x1, 1\n\t"
204
205 "fmov x2, d11\n\t"
206 "cmp x1, x2\n\t"
207 "b.ne 1f\n\t"
208 "add x1, x1, 1\n\t"
209
210 "fmov x2, d12\n\t"
211 "cmp x1, x2\n\t"
212 "b.ne 1f\n\t"
213 "add x1, x1, 1\n\t"
214
215 "fmov x2, d13\n\t"
216 "cmp x1, x2\n\t"
217 "b.ne 1f\n\t"
218 "add x1, x1, 1\n\t"
219
220 "fmov x2, d14\n\t"
221 "cmp x1, x2\n\t"
222 "b.ne 1f\n\t"
223 "add x1, x1, 1\n\t"
224
225 "fmov x2, d15\n\t"
226 "cmp x1, x2\n\t"
227 "b.ne 1f\n\t"
228
Andreas Gampef39b3782014-06-03 14:38:30 -0700229 "mov x9, #0\n\t" // Use x9 as flag, in clobber list
Andreas Gampe6cf80102014-05-19 11:32:41 -0700230
231 // Finish up.
232 "2:\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700233 "ldp x0, x1, [sp]\n\t" // Restore stuff not named clobbered, may contain fpr_result
234 "ldp x2, x3, [sp, #16]\n\t"
235 "ldp x4, x5, [sp, #32]\n\t"
236 "ldp x6, x7, [sp, #48]\n\t"
237 "add sp, sp, #64\n\t" // Free stack space, now sp as on entry
Andreas Gampe6cf80102014-05-19 11:32:41 -0700238 ".cfi_adjust_cfa_offset -64\n\t"
239
Andreas Gampef39b3782014-06-03 14:38:30 -0700240 "str x9, %[fpr_result]\n\t" // Store the FPR comparison result
241 "mov %[result], x8\n\t" // Store the call result
242
Andreas Gampe6cf80102014-05-19 11:32:41 -0700243 "b 3f\n\t" // Goto end
244
245 // Failed fpr verification.
246 "1:\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700247 "mov x9, #1\n\t"
Andreas Gampe6cf80102014-05-19 11:32:41 -0700248 "b 2b\n\t" // Goto finish-up
249
250 // End
251 "3:\n\t"
Andreas Gampecf4035a2014-05-28 22:43:01 -0700252 : [result] "=r" (result)
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700253 // Use the result from r0
254 : [arg0] "0"(arg0), [arg1] "r"(arg1), [arg2] "r"(arg2), [code] "r"(code), [self] "r"(self),
Andreas Gampecf4035a2014-05-28 22:43:01 -0700255 [referrer] "r"(referrer), [fpr_result] "m" (fpr_result)
Andreas Gampef39b3782014-06-03 14:38:30 -0700256 : "x8", "x9", "x10", "x11", "x12", "x13", "x14", "x15", "x16", "x17", "x18", "x19", "x20",
257 "x21", "x22", "x23", "x24", "x25", "x26", "x27", "x28", "x30",
258 "d0", "d1", "d2", "d3", "d4", "d5", "d6", "d7",
259 "d8", "d9", "d10", "d11", "d12", "d13", "d14", "d15",
260 "d16", "d17", "d18", "d19", "d20", "d21", "d22", "d23",
Andreas Gampe2f6e3512014-06-07 01:32:33 -0700261 "d24", "d25", "d26", "d27", "d28", "d29", "d30", "d31",
262 "memory"); // clobber.
Ian Rogers6f3dbba2014-10-14 17:41:57 -0700263#elif defined(__x86_64__) && !defined(__APPLE__) && defined(__clang__)
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700264 // Note: Uses the native convention
265 // TODO: Set the thread?
266 __asm__ __volatile__(
267 "pushq %[referrer]\n\t" // Push referrer
268 "pushq (%%rsp)\n\t" // & 16B alignment padding
269 ".cfi_adjust_cfa_offset 16\n\t"
270 "call *%%rax\n\t" // Call the stub
271 "addq $16, %%rsp\n\t" // Pop nullptr and padding
272 ".cfi_adjust_cfa_offset -16\n\t"
273 : "=a" (result)
274 // Use the result from rax
Andreas Gampe2f6e3512014-06-07 01:32:33 -0700275 : "D"(arg0), "S"(arg1), "d"(arg2), "a"(code), [referrer] "m"(referrer)
276 // This places arg0 into rdi, arg1 into rsi, arg2 into rdx, and code into rax
277 : "rbx", "rcx", "rbp", "r8", "r9", "r10", "r11", "r12", "r13", "r14", "r15",
278 "memory"); // clobber all
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700279 // TODO: Should we clobber the other registers?
280#else
281 LOG(WARNING) << "Was asked to invoke for an architecture I do not understand.";
282 result = 0;
283#endif
284 // Pop transition.
285 self->PopManagedStackFragment(fragment);
Andreas Gampe6cf80102014-05-19 11:32:41 -0700286
287 fp_result = fpr_result;
288 EXPECT_EQ(0U, fp_result);
289
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700290 return result;
291 }
292
Andreas Gampe51f76352014-05-21 08:28:48 -0700293 // TODO: Set up a frame according to referrer's specs.
294 size_t Invoke3WithReferrerAndHidden(size_t arg0, size_t arg1, size_t arg2, uintptr_t code,
295 Thread* self, mirror::ArtMethod* referrer, size_t hidden) {
296 // Push a transition back into managed code onto the linked list in thread.
297 ManagedStack fragment;
298 self->PushManagedStackFragment(&fragment);
299
300 size_t result;
301 size_t fpr_result = 0;
302#if defined(__i386__)
303 // TODO: Set the thread?
304 __asm__ __volatile__(
305 "movd %[hidden], %%xmm0\n\t"
Ian Rogersc5f17732014-06-05 20:48:42 -0700306 "subl $12, %%esp\n\t" // Align stack.
Andreas Gampe51f76352014-05-21 08:28:48 -0700307 "pushl %[referrer]\n\t" // Store referrer
308 "call *%%edi\n\t" // Call the stub
Ian Rogersc5f17732014-06-05 20:48:42 -0700309 "addl $16, %%esp" // Pop referrer
Andreas Gampe51f76352014-05-21 08:28:48 -0700310 : "=a" (result)
311 // Use the result from eax
Andreas Gampe1a7e2922014-05-21 15:37:53 -0700312 : "a"(arg0), "c"(arg1), "d"(arg2), "D"(code), [referrer]"r"(referrer), [hidden]"m"(hidden)
Andreas Gampe2f6e3512014-06-07 01:32:33 -0700313 // This places code into edi, arg0 into eax, arg1 into ecx, and arg2 into edx
314 : "memory"); // clobber.
Andreas Gampe51f76352014-05-21 08:28:48 -0700315 // TODO: Should we clobber the other registers? EBX gets clobbered by some of the stubs,
316 // but compilation fails when declaring that.
317#elif defined(__arm__)
318 __asm__ __volatile__(
319 "push {r1-r12, lr}\n\t" // Save state, 13*4B = 52B
320 ".cfi_adjust_cfa_offset 52\n\t"
321 "push {r9}\n\t"
322 ".cfi_adjust_cfa_offset 4\n\t"
323 "mov r9, %[referrer]\n\n"
324 "str r9, [sp, #-8]!\n\t" // Push referrer, +8B padding so 16B aligned
325 ".cfi_adjust_cfa_offset 8\n\t"
326 "ldr r9, [sp, #8]\n\t"
327
328 // Push everything on the stack, so we don't rely on the order. What a mess. :-(
329 "sub sp, sp, #24\n\t"
330 "str %[arg0], [sp]\n\t"
331 "str %[arg1], [sp, #4]\n\t"
332 "str %[arg2], [sp, #8]\n\t"
333 "str %[code], [sp, #12]\n\t"
334 "str %[self], [sp, #16]\n\t"
335 "str %[hidden], [sp, #20]\n\t"
336 "ldr r0, [sp]\n\t"
337 "ldr r1, [sp, #4]\n\t"
338 "ldr r2, [sp, #8]\n\t"
339 "ldr r3, [sp, #12]\n\t"
340 "ldr r9, [sp, #16]\n\t"
341 "ldr r12, [sp, #20]\n\t"
342 "add sp, sp, #24\n\t"
343
344 "blx r3\n\t" // Call the stub
345 "add sp, sp, #12\n\t" // Pop nullptr and padding
346 ".cfi_adjust_cfa_offset -12\n\t"
347 "pop {r1-r12, lr}\n\t" // Restore state
348 ".cfi_adjust_cfa_offset -52\n\t"
349 "mov %[result], r0\n\t" // Save the result
350 : [result] "=r" (result)
351 // Use the result from r0
Andreas Gampe2f6e3512014-06-07 01:32:33 -0700352 : [arg0] "r"(arg0), [arg1] "r"(arg1), [arg2] "r"(arg2), [code] "r"(code), [self] "r"(self),
353 [referrer] "r"(referrer), [hidden] "r"(hidden)
354 : "memory"); // clobber.
Andreas Gampe51f76352014-05-21 08:28:48 -0700355#elif defined(__aarch64__)
356 __asm__ __volatile__(
Andreas Gampef39b3782014-06-03 14:38:30 -0700357 // Spill x0-x7 which we say we don't clobber. May contain args.
Andreas Gampe51f76352014-05-21 08:28:48 -0700358 "sub sp, sp, #64\n\t"
359 ".cfi_adjust_cfa_offset 64\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700360 "stp x0, x1, [sp]\n\t"
361 "stp x2, x3, [sp, #16]\n\t"
362 "stp x4, x5, [sp, #32]\n\t"
363 "stp x6, x7, [sp, #48]\n\t"
Andreas Gampe51f76352014-05-21 08:28:48 -0700364
Andreas Gampef39b3782014-06-03 14:38:30 -0700365 "sub sp, sp, #16\n\t" // Reserve stack space, 16B aligned
366 ".cfi_adjust_cfa_offset 16\n\t"
367 "str %[referrer], [sp]\n\t" // referrer
Andreas Gampe51f76352014-05-21 08:28:48 -0700368
369 // Push everything on the stack, so we don't rely on the order. What a mess. :-(
370 "sub sp, sp, #48\n\t"
371 ".cfi_adjust_cfa_offset 48\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700372 // All things are "r" constraints, so direct str/stp should work.
373 "stp %[arg0], %[arg1], [sp]\n\t"
374 "stp %[arg2], %[code], [sp, #16]\n\t"
375 "stp %[self], %[hidden], [sp, #32]\n\t"
Andreas Gampe51f76352014-05-21 08:28:48 -0700376
377 // Now we definitely have x0-x3 free, use it to garble d8 - d15
378 "movk x0, #0xfad0\n\t"
379 "movk x0, #0xebad, lsl #16\n\t"
380 "movk x0, #0xfad0, lsl #32\n\t"
381 "movk x0, #0xebad, lsl #48\n\t"
382 "fmov d8, x0\n\t"
383 "add x0, x0, 1\n\t"
384 "fmov d9, x0\n\t"
385 "add x0, x0, 1\n\t"
386 "fmov d10, x0\n\t"
387 "add x0, x0, 1\n\t"
388 "fmov d11, x0\n\t"
389 "add x0, x0, 1\n\t"
390 "fmov d12, x0\n\t"
391 "add x0, x0, 1\n\t"
392 "fmov d13, x0\n\t"
393 "add x0, x0, 1\n\t"
394 "fmov d14, x0\n\t"
395 "add x0, x0, 1\n\t"
396 "fmov d15, x0\n\t"
397
Andreas Gampef39b3782014-06-03 14:38:30 -0700398 // Load call params into the right registers.
399 "ldp x0, x1, [sp]\n\t"
400 "ldp x2, x3, [sp, #16]\n\t"
Andreas Gampe1a7e2922014-05-21 15:37:53 -0700401 "ldp x18, x17, [sp, #32]\n\t"
Andreas Gampe51f76352014-05-21 08:28:48 -0700402 "add sp, sp, #48\n\t"
403 ".cfi_adjust_cfa_offset -48\n\t"
404
Andreas Gampe51f76352014-05-21 08:28:48 -0700405 "blr x3\n\t" // Call the stub
Andreas Gampef39b3782014-06-03 14:38:30 -0700406 "mov x8, x0\n\t" // Store result
407 "add sp, sp, #16\n\t" // Drop the quick "frame"
408 ".cfi_adjust_cfa_offset -16\n\t"
Andreas Gampe51f76352014-05-21 08:28:48 -0700409
410 // Test d8 - d15. We can use x1 and x2.
411 "movk x1, #0xfad0\n\t"
412 "movk x1, #0xebad, lsl #16\n\t"
413 "movk x1, #0xfad0, lsl #32\n\t"
414 "movk x1, #0xebad, lsl #48\n\t"
415 "fmov x2, d8\n\t"
416 "cmp x1, x2\n\t"
417 "b.ne 1f\n\t"
418 "add x1, x1, 1\n\t"
419
420 "fmov x2, d9\n\t"
421 "cmp x1, x2\n\t"
422 "b.ne 1f\n\t"
423 "add x1, x1, 1\n\t"
424
425 "fmov x2, d10\n\t"
426 "cmp x1, x2\n\t"
427 "b.ne 1f\n\t"
428 "add x1, x1, 1\n\t"
429
430 "fmov x2, d11\n\t"
431 "cmp x1, x2\n\t"
432 "b.ne 1f\n\t"
433 "add x1, x1, 1\n\t"
434
435 "fmov x2, d12\n\t"
436 "cmp x1, x2\n\t"
437 "b.ne 1f\n\t"
438 "add x1, x1, 1\n\t"
439
440 "fmov x2, d13\n\t"
441 "cmp x1, x2\n\t"
442 "b.ne 1f\n\t"
443 "add x1, x1, 1\n\t"
444
445 "fmov x2, d14\n\t"
446 "cmp x1, x2\n\t"
447 "b.ne 1f\n\t"
448 "add x1, x1, 1\n\t"
449
450 "fmov x2, d15\n\t"
451 "cmp x1, x2\n\t"
452 "b.ne 1f\n\t"
453
Andreas Gampef39b3782014-06-03 14:38:30 -0700454 "mov x9, #0\n\t" // Use x9 as flag, in clobber list
Andreas Gampe51f76352014-05-21 08:28:48 -0700455
456 // Finish up.
457 "2:\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700458 "ldp x0, x1, [sp]\n\t" // Restore stuff not named clobbered, may contain fpr_result
459 "ldp x2, x3, [sp, #16]\n\t"
460 "ldp x4, x5, [sp, #32]\n\t"
461 "ldp x6, x7, [sp, #48]\n\t"
462 "add sp, sp, #64\n\t" // Free stack space, now sp as on entry
Andreas Gampe51f76352014-05-21 08:28:48 -0700463 ".cfi_adjust_cfa_offset -64\n\t"
464
Andreas Gampef39b3782014-06-03 14:38:30 -0700465 "str x9, %[fpr_result]\n\t" // Store the FPR comparison result
466 "mov %[result], x8\n\t" // Store the call result
467
Andreas Gampe51f76352014-05-21 08:28:48 -0700468 "b 3f\n\t" // Goto end
469
470 // Failed fpr verification.
471 "1:\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700472 "mov x9, #1\n\t"
Andreas Gampe51f76352014-05-21 08:28:48 -0700473 "b 2b\n\t" // Goto finish-up
474
475 // End
476 "3:\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700477 : [result] "=r" (result)
478 // Use the result from r0
Andreas Gampe51f76352014-05-21 08:28:48 -0700479 : [arg0] "0"(arg0), [arg1] "r"(arg1), [arg2] "r"(arg2), [code] "r"(code), [self] "r"(self),
Andreas Gampef39b3782014-06-03 14:38:30 -0700480 [referrer] "r"(referrer), [hidden] "r"(hidden), [fpr_result] "m" (fpr_result)
481 : "x8", "x9", "x10", "x11", "x12", "x13", "x14", "x15", "x16", "x17", "x18", "x19", "x20",
482 "x21", "x22", "x23", "x24", "x25", "x26", "x27", "x28", "x30",
483 "d0", "d1", "d2", "d3", "d4", "d5", "d6", "d7",
484 "d8", "d9", "d10", "d11", "d12", "d13", "d14", "d15",
485 "d16", "d17", "d18", "d19", "d20", "d21", "d22", "d23",
Andreas Gampe2f6e3512014-06-07 01:32:33 -0700486 "d24", "d25", "d26", "d27", "d28", "d29", "d30", "d31",
487 "memory"); // clobber.
Ian Rogers6f3dbba2014-10-14 17:41:57 -0700488#elif defined(__x86_64__) && !defined(__APPLE__) && defined(__clang__)
Andreas Gampe51f76352014-05-21 08:28:48 -0700489 // Note: Uses the native convention
490 // TODO: Set the thread?
491 __asm__ __volatile__(
Andreas Gampe51f76352014-05-21 08:28:48 -0700492 "pushq %[referrer]\n\t" // Push referrer
493 "pushq (%%rsp)\n\t" // & 16B alignment padding
494 ".cfi_adjust_cfa_offset 16\n\t"
Andreas Gampe1a7e2922014-05-21 15:37:53 -0700495 "call *%%rbx\n\t" // Call the stub
Andreas Gampe51f76352014-05-21 08:28:48 -0700496 "addq $16, %%rsp\n\t" // Pop nullptr and padding
497 ".cfi_adjust_cfa_offset -16\n\t"
498 : "=a" (result)
499 // Use the result from rax
Andreas Gampe1a7e2922014-05-21 15:37:53 -0700500 : "D"(arg0), "S"(arg1), "d"(arg2), "b"(code), [referrer] "c"(referrer), [hidden] "a"(hidden)
Andreas Gampe51f76352014-05-21 08:28:48 -0700501 // This places arg0 into rdi, arg1 into rsi, arg2 into rdx, and code into rax
Andreas Gampe1a7e2922014-05-21 15:37:53 -0700502 : "rbp", "r8", "r9", "r10", "r11", "r12", "r13", "r14", "r15",
Andreas Gampe2f6e3512014-06-07 01:32:33 -0700503 "memory"); // clobber all
Andreas Gampe51f76352014-05-21 08:28:48 -0700504 // TODO: Should we clobber the other registers?
505#else
506 LOG(WARNING) << "Was asked to invoke for an architecture I do not understand.";
507 result = 0;
508#endif
509 // Pop transition.
510 self->PopManagedStackFragment(fragment);
511
512 fp_result = fpr_result;
513 EXPECT_EQ(0U, fp_result);
514
515 return result;
516 }
517
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700518 // Method with 32b arg0, 64b arg1
519 size_t Invoke3UWithReferrer(size_t arg0, uint64_t arg1, uintptr_t code, Thread* self,
520 mirror::ArtMethod* referrer) {
Ian Rogersc3ccc102014-06-25 11:52:14 -0700521#if (defined(__x86_64__) && !defined(__APPLE__)) || defined(__aarch64__)
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700522 // Just pass through.
523 return Invoke3WithReferrer(arg0, arg1, 0U, code, self, referrer);
524#else
525 // Need to split up arguments.
526 uint32_t lower = static_cast<uint32_t>(arg1 & 0xFFFFFFFF);
527 uint32_t upper = static_cast<uint32_t>((arg1 >> 32) & 0xFFFFFFFF);
528
529 return Invoke3WithReferrer(arg0, lower, upper, code, self, referrer);
530#endif
531 }
532
Andreas Gampe29b38412014-08-13 00:15:43 -0700533 static uintptr_t GetEntrypoint(Thread* self, QuickEntrypointEnum entrypoint) {
534 int32_t offset;
535#ifdef __LP64__
536 offset = GetThreadOffset<8>(entrypoint).Int32Value();
537#else
538 offset = GetThreadOffset<4>(entrypoint).Int32Value();
539#endif
540 return *reinterpret_cast<uintptr_t*>(reinterpret_cast<uint8_t*>(self) + offset);
541 }
542
Andreas Gampe6cf80102014-05-19 11:32:41 -0700543 protected:
544 size_t fp_result;
Andreas Gampe525cde22014-04-22 15:44:50 -0700545};
546
547
Andreas Gampe525cde22014-04-22 15:44:50 -0700548TEST_F(StubTest, Memcpy) {
Ian Rogersc3ccc102014-06-25 11:52:14 -0700549#if defined(__i386__) || (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe525cde22014-04-22 15:44:50 -0700550 Thread* self = Thread::Current();
551
552 uint32_t orig[20];
553 uint32_t trg[20];
554 for (size_t i = 0; i < 20; ++i) {
555 orig[i] = i;
556 trg[i] = 0;
557 }
558
559 Invoke3(reinterpret_cast<size_t>(&trg[4]), reinterpret_cast<size_t>(&orig[4]),
Andreas Gampe29b38412014-08-13 00:15:43 -0700560 10 * sizeof(uint32_t), StubTest::GetEntrypoint(self, kQuickMemcpy), self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700561
562 EXPECT_EQ(orig[0], trg[0]);
563
564 for (size_t i = 1; i < 4; ++i) {
565 EXPECT_NE(orig[i], trg[i]);
566 }
567
568 for (size_t i = 4; i < 14; ++i) {
569 EXPECT_EQ(orig[i], trg[i]);
570 }
571
572 for (size_t i = 14; i < 20; ++i) {
573 EXPECT_NE(orig[i], trg[i]);
574 }
575
576 // TODO: Test overlapping?
577
578#else
579 LOG(INFO) << "Skipping memcpy as I don't know how to do that on " << kRuntimeISA;
580 // Force-print to std::cout so it's also outside the logcat.
581 std::cout << "Skipping memcpy as I don't know how to do that on " << kRuntimeISA << std::endl;
582#endif
583}
584
Andreas Gampe525cde22014-04-22 15:44:50 -0700585TEST_F(StubTest, LockObject) {
Ian Rogersc3ccc102014-06-25 11:52:14 -0700586#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -0700587 static constexpr size_t kThinLockLoops = 100;
588
Andreas Gampe525cde22014-04-22 15:44:50 -0700589 Thread* self = Thread::Current();
Andreas Gampe29b38412014-08-13 00:15:43 -0700590
591 const uintptr_t art_quick_lock_object = StubTest::GetEntrypoint(self, kQuickLockObject);
592
Andreas Gampe525cde22014-04-22 15:44:50 -0700593 // Create an object
594 ScopedObjectAccess soa(self);
595 // garbage is created during ClassLinker::Init
596
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700597 StackHandleScope<2> hs(soa.Self());
598 Handle<mirror::String> obj(
599 hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), "hello, world!")));
Andreas Gampe525cde22014-04-22 15:44:50 -0700600 LockWord lock = obj->GetLockWord(false);
601 LockWord::LockState old_state = lock.GetState();
602 EXPECT_EQ(LockWord::LockState::kUnlocked, old_state);
603
Andreas Gampe29b38412014-08-13 00:15:43 -0700604 Invoke3(reinterpret_cast<size_t>(obj.Get()), 0U, 0U, art_quick_lock_object, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700605
606 LockWord lock_after = obj->GetLockWord(false);
607 LockWord::LockState new_state = lock_after.GetState();
608 EXPECT_EQ(LockWord::LockState::kThinLocked, new_state);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700609 EXPECT_EQ(lock_after.ThinLockCount(), 0U); // Thin lock starts count at zero
610
611 for (size_t i = 1; i < kThinLockLoops; ++i) {
Andreas Gampe29b38412014-08-13 00:15:43 -0700612 Invoke3(reinterpret_cast<size_t>(obj.Get()), 0U, 0U, art_quick_lock_object, self);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700613
614 // Check we're at lock count i
615
616 LockWord l_inc = obj->GetLockWord(false);
617 LockWord::LockState l_inc_state = l_inc.GetState();
618 EXPECT_EQ(LockWord::LockState::kThinLocked, l_inc_state);
619 EXPECT_EQ(l_inc.ThinLockCount(), i);
620 }
621
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700622 // Force a fat lock by running identity hashcode to fill up lock word.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700623 Handle<mirror::String> obj2(hs.NewHandle(
624 mirror::String::AllocFromModifiedUtf8(soa.Self(), "hello, world!")));
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700625
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700626 obj2->IdentityHashCode();
627
Andreas Gampe29b38412014-08-13 00:15:43 -0700628 Invoke3(reinterpret_cast<size_t>(obj2.Get()), 0U, 0U, art_quick_lock_object, self);
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700629
630 LockWord lock_after2 = obj2->GetLockWord(false);
631 LockWord::LockState new_state2 = lock_after2.GetState();
632 EXPECT_EQ(LockWord::LockState::kFatLocked, new_state2);
633 EXPECT_NE(lock_after2.FatLockMonitor(), static_cast<Monitor*>(nullptr));
634
635 // Test done.
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700636#else
637 LOG(INFO) << "Skipping lock_object as I don't know how to do that on " << kRuntimeISA;
638 // Force-print to std::cout so it's also outside the logcat.
639 std::cout << "Skipping lock_object as I don't know how to do that on " << kRuntimeISA << std::endl;
640#endif
641}
642
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700643
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700644class RandGen {
645 public:
646 explicit RandGen(uint32_t seed) : val_(seed) {}
647
648 uint32_t next() {
649 val_ = val_ * 48271 % 2147483647 + 13;
650 return val_;
651 }
652
653 uint32_t val_;
654};
655
656
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700657// NO_THREAD_SAFETY_ANALYSIS as we do not want to grab exclusive mutator lock for MonitorInfo.
658static void TestUnlockObject(StubTest* test) NO_THREAD_SAFETY_ANALYSIS {
Ian Rogersc3ccc102014-06-25 11:52:14 -0700659#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -0700660 static constexpr size_t kThinLockLoops = 100;
661
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700662 Thread* self = Thread::Current();
Andreas Gampe29b38412014-08-13 00:15:43 -0700663
664 const uintptr_t art_quick_lock_object = StubTest::GetEntrypoint(self, kQuickLockObject);
665 const uintptr_t art_quick_unlock_object = StubTest::GetEntrypoint(self, kQuickUnlockObject);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700666 // Create an object
667 ScopedObjectAccess soa(self);
668 // garbage is created during ClassLinker::Init
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700669 static constexpr size_t kNumberOfLocks = 10; // Number of objects = lock
670 StackHandleScope<kNumberOfLocks + 1> hs(self);
671 Handle<mirror::String> obj(
672 hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), "hello, world!")));
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700673 LockWord lock = obj->GetLockWord(false);
674 LockWord::LockState old_state = lock.GetState();
675 EXPECT_EQ(LockWord::LockState::kUnlocked, old_state);
676
Andreas Gampe29b38412014-08-13 00:15:43 -0700677 test->Invoke3(reinterpret_cast<size_t>(obj.Get()), 0U, 0U, art_quick_unlock_object, self);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700678 // This should be an illegal monitor state.
679 EXPECT_TRUE(self->IsExceptionPending());
680 self->ClearException();
681
682 LockWord lock_after = obj->GetLockWord(false);
683 LockWord::LockState new_state = lock_after.GetState();
684 EXPECT_EQ(LockWord::LockState::kUnlocked, new_state);
Andreas Gampe525cde22014-04-22 15:44:50 -0700685
Andreas Gampe29b38412014-08-13 00:15:43 -0700686 test->Invoke3(reinterpret_cast<size_t>(obj.Get()), 0U, 0U, art_quick_lock_object, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700687
688 LockWord lock_after2 = obj->GetLockWord(false);
689 LockWord::LockState new_state2 = lock_after2.GetState();
690 EXPECT_EQ(LockWord::LockState::kThinLocked, new_state2);
691
Andreas Gampe29b38412014-08-13 00:15:43 -0700692 test->Invoke3(reinterpret_cast<size_t>(obj.Get()), 0U, 0U, art_quick_unlock_object, self);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700693
694 LockWord lock_after3 = obj->GetLockWord(false);
695 LockWord::LockState new_state3 = lock_after3.GetState();
696 EXPECT_EQ(LockWord::LockState::kUnlocked, new_state3);
697
698 // Stress test:
699 // Keep a number of objects and their locks in flight. Randomly lock or unlock one of them in
700 // each step.
701
702 RandGen r(0x1234);
703
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700704 constexpr size_t kIterations = 10000; // Number of iterations
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700705 constexpr size_t kMoveToFat = 1000; // Chance of 1:kMoveFat to make a lock fat.
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700706
707 size_t counts[kNumberOfLocks];
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700708 bool fat[kNumberOfLocks]; // Whether a lock should be thin or fat.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700709 Handle<mirror::String> objects[kNumberOfLocks];
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700710
711 // Initialize = allocate.
712 for (size_t i = 0; i < kNumberOfLocks; ++i) {
713 counts[i] = 0;
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700714 fat[i] = false;
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700715 objects[i] = hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), ""));
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700716 }
717
718 for (size_t i = 0; i < kIterations; ++i) {
719 // Select which lock to update.
720 size_t index = r.next() % kNumberOfLocks;
721
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700722 // Make lock fat?
723 if (!fat[index] && (r.next() % kMoveToFat == 0)) {
724 fat[index] = true;
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700725 objects[index]->IdentityHashCode();
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700726
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700727 LockWord lock_iter = objects[index]->GetLockWord(false);
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700728 LockWord::LockState iter_state = lock_iter.GetState();
729 if (counts[index] == 0) {
730 EXPECT_EQ(LockWord::LockState::kHashCode, iter_state);
731 } else {
732 EXPECT_EQ(LockWord::LockState::kFatLocked, iter_state);
733 }
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700734 } else {
Andreas Gampe277ccbd2014-11-03 21:36:10 -0800735 bool take_lock; // Whether to lock or unlock in this step.
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700736 if (counts[index] == 0) {
Andreas Gampe277ccbd2014-11-03 21:36:10 -0800737 take_lock = true;
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700738 } else if (counts[index] == kThinLockLoops) {
Andreas Gampe277ccbd2014-11-03 21:36:10 -0800739 take_lock = false;
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700740 } else {
741 // Randomly.
Andreas Gampe277ccbd2014-11-03 21:36:10 -0800742 take_lock = r.next() % 2 == 0;
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700743 }
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700744
Andreas Gampe277ccbd2014-11-03 21:36:10 -0800745 if (take_lock) {
Andreas Gampe29b38412014-08-13 00:15:43 -0700746 test->Invoke3(reinterpret_cast<size_t>(objects[index].Get()), 0U, 0U, art_quick_lock_object,
747 self);
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700748 counts[index]++;
749 } else {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700750 test->Invoke3(reinterpret_cast<size_t>(objects[index].Get()), 0U, 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -0700751 art_quick_unlock_object, self);
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700752 counts[index]--;
753 }
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700754
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700755 EXPECT_FALSE(self->IsExceptionPending());
756
757 // Check the new state.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700758 LockWord lock_iter = objects[index]->GetLockWord(true);
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700759 LockWord::LockState iter_state = lock_iter.GetState();
760 if (fat[index]) {
761 // Abuse MonitorInfo.
762 EXPECT_EQ(LockWord::LockState::kFatLocked, iter_state) << index;
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700763 MonitorInfo info(objects[index].Get());
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700764 EXPECT_EQ(counts[index], info.entry_count_) << index;
765 } else {
766 if (counts[index] > 0) {
767 EXPECT_EQ(LockWord::LockState::kThinLocked, iter_state);
768 EXPECT_EQ(counts[index] - 1, lock_iter.ThinLockCount());
769 } else {
770 EXPECT_EQ(LockWord::LockState::kUnlocked, iter_state);
771 }
772 }
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700773 }
774 }
775
776 // Unlock the remaining count times and then check it's unlocked. Then deallocate.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700777 // Go reverse order to correctly handle Handles.
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700778 for (size_t i = 0; i < kNumberOfLocks; ++i) {
779 size_t index = kNumberOfLocks - 1 - i;
780 size_t count = counts[index];
781 while (count > 0) {
Andreas Gampe29b38412014-08-13 00:15:43 -0700782 test->Invoke3(reinterpret_cast<size_t>(objects[index].Get()), 0U, 0U, art_quick_unlock_object,
783 self);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700784 count--;
785 }
786
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700787 LockWord lock_after4 = objects[index]->GetLockWord(false);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700788 LockWord::LockState new_state4 = lock_after4.GetState();
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700789 EXPECT_TRUE(LockWord::LockState::kUnlocked == new_state4
790 || LockWord::LockState::kFatLocked == new_state4);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700791 }
792
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700793 // Test done.
Andreas Gampe525cde22014-04-22 15:44:50 -0700794#else
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700795 LOG(INFO) << "Skipping unlock_object as I don't know how to do that on " << kRuntimeISA;
Andreas Gampe525cde22014-04-22 15:44:50 -0700796 // Force-print to std::cout so it's also outside the logcat.
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700797 std::cout << "Skipping unlock_object as I don't know how to do that on " << kRuntimeISA << std::endl;
Andreas Gampe525cde22014-04-22 15:44:50 -0700798#endif
799}
800
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700801TEST_F(StubTest, UnlockObject) {
802 TestUnlockObject(this);
803}
Andreas Gampe525cde22014-04-22 15:44:50 -0700804
Ian Rogersc3ccc102014-06-25 11:52:14 -0700805#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe525cde22014-04-22 15:44:50 -0700806extern "C" void art_quick_check_cast(void);
807#endif
808
809TEST_F(StubTest, CheckCast) {
Ian Rogersc3ccc102014-06-25 11:52:14 -0700810#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe525cde22014-04-22 15:44:50 -0700811 Thread* self = Thread::Current();
Andreas Gampe29b38412014-08-13 00:15:43 -0700812
813 const uintptr_t art_quick_check_cast = StubTest::GetEntrypoint(self, kQuickCheckCast);
814
Andreas Gampe525cde22014-04-22 15:44:50 -0700815 // Find some classes.
816 ScopedObjectAccess soa(self);
817 // garbage is created during ClassLinker::Init
818
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700819 StackHandleScope<2> hs(soa.Self());
820 Handle<mirror::Class> c(
821 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "[Ljava/lang/Object;")));
822 Handle<mirror::Class> c2(
823 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "[Ljava/lang/String;")));
Andreas Gampe525cde22014-04-22 15:44:50 -0700824
825 EXPECT_FALSE(self->IsExceptionPending());
826
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700827 Invoke3(reinterpret_cast<size_t>(c.Get()), reinterpret_cast<size_t>(c.Get()), 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -0700828 art_quick_check_cast, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700829
830 EXPECT_FALSE(self->IsExceptionPending());
831
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700832 Invoke3(reinterpret_cast<size_t>(c2.Get()), reinterpret_cast<size_t>(c2.Get()), 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -0700833 art_quick_check_cast, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700834
835 EXPECT_FALSE(self->IsExceptionPending());
836
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700837 Invoke3(reinterpret_cast<size_t>(c.Get()), reinterpret_cast<size_t>(c2.Get()), 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -0700838 art_quick_check_cast, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700839
840 EXPECT_FALSE(self->IsExceptionPending());
841
842 // TODO: Make the following work. But that would require correct managed frames.
843
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700844 Invoke3(reinterpret_cast<size_t>(c2.Get()), reinterpret_cast<size_t>(c.Get()), 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -0700845 art_quick_check_cast, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700846
847 EXPECT_TRUE(self->IsExceptionPending());
848 self->ClearException();
849
850#else
851 LOG(INFO) << "Skipping check_cast as I don't know how to do that on " << kRuntimeISA;
852 // Force-print to std::cout so it's also outside the logcat.
853 std::cout << "Skipping check_cast as I don't know how to do that on " << kRuntimeISA << std::endl;
854#endif
855}
856
857
Andreas Gampe525cde22014-04-22 15:44:50 -0700858TEST_F(StubTest, APutObj) {
Hiroshi Yamauchid6881ae2014-04-28 17:21:48 -0700859 TEST_DISABLED_FOR_HEAP_REFERENCE_POISONING();
860
Ian Rogersc3ccc102014-06-25 11:52:14 -0700861#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe525cde22014-04-22 15:44:50 -0700862 Thread* self = Thread::Current();
Andreas Gampe29b38412014-08-13 00:15:43 -0700863
864 // Do not check non-checked ones, we'd need handlers and stuff...
865 const uintptr_t art_quick_aput_obj_with_null_and_bound_check =
866 StubTest::GetEntrypoint(self, kQuickAputObjectWithNullAndBoundCheck);
867
Andreas Gampe525cde22014-04-22 15:44:50 -0700868 // Create an object
869 ScopedObjectAccess soa(self);
870 // garbage is created during ClassLinker::Init
871
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700872 StackHandleScope<5> hs(soa.Self());
873 Handle<mirror::Class> c(
874 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "Ljava/lang/Object;")));
875 Handle<mirror::Class> ca(
876 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "[Ljava/lang/String;")));
Andreas Gampe525cde22014-04-22 15:44:50 -0700877
878 // Build a string array of size 1
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700879 Handle<mirror::ObjectArray<mirror::Object>> array(
880 hs.NewHandle(mirror::ObjectArray<mirror::Object>::Alloc(soa.Self(), ca.Get(), 10)));
Andreas Gampe525cde22014-04-22 15:44:50 -0700881
882 // Build a string -> should be assignable
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700883 Handle<mirror::String> str_obj(
884 hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), "hello, world!")));
Andreas Gampe525cde22014-04-22 15:44:50 -0700885
886 // Build a generic object -> should fail assigning
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700887 Handle<mirror::Object> obj_obj(hs.NewHandle(c->AllocObject(soa.Self())));
Andreas Gampe525cde22014-04-22 15:44:50 -0700888
889 // Play with it...
890
891 // 1) Success cases
Andreas Gampef4e910b2014-04-29 16:55:52 -0700892 // 1.1) Assign str_obj to array[0..3]
Andreas Gampe525cde22014-04-22 15:44:50 -0700893
894 EXPECT_FALSE(self->IsExceptionPending());
895
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700896 Invoke3(reinterpret_cast<size_t>(array.Get()), 0U, reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampe29b38412014-08-13 00:15:43 -0700897 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700898
899 EXPECT_FALSE(self->IsExceptionPending());
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700900 EXPECT_EQ(str_obj.Get(), array->Get(0));
Andreas Gampe525cde22014-04-22 15:44:50 -0700901
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700902 Invoke3(reinterpret_cast<size_t>(array.Get()), 1U, reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampe29b38412014-08-13 00:15:43 -0700903 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampef4e910b2014-04-29 16:55:52 -0700904
905 EXPECT_FALSE(self->IsExceptionPending());
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700906 EXPECT_EQ(str_obj.Get(), array->Get(1));
Andreas Gampef4e910b2014-04-29 16:55:52 -0700907
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700908 Invoke3(reinterpret_cast<size_t>(array.Get()), 2U, reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampe29b38412014-08-13 00:15:43 -0700909 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampef4e910b2014-04-29 16:55:52 -0700910
911 EXPECT_FALSE(self->IsExceptionPending());
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700912 EXPECT_EQ(str_obj.Get(), array->Get(2));
Andreas Gampef4e910b2014-04-29 16:55:52 -0700913
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700914 Invoke3(reinterpret_cast<size_t>(array.Get()), 3U, reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampe29b38412014-08-13 00:15:43 -0700915 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampef4e910b2014-04-29 16:55:52 -0700916
917 EXPECT_FALSE(self->IsExceptionPending());
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700918 EXPECT_EQ(str_obj.Get(), array->Get(3));
Andreas Gampef4e910b2014-04-29 16:55:52 -0700919
920 // 1.2) Assign null to array[0..3]
Andreas Gampe525cde22014-04-22 15:44:50 -0700921
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700922 Invoke3(reinterpret_cast<size_t>(array.Get()), 0U, reinterpret_cast<size_t>(nullptr),
Andreas Gampe29b38412014-08-13 00:15:43 -0700923 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700924
925 EXPECT_FALSE(self->IsExceptionPending());
Andreas Gampef4e910b2014-04-29 16:55:52 -0700926 EXPECT_EQ(nullptr, array->Get(0));
927
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700928 Invoke3(reinterpret_cast<size_t>(array.Get()), 1U, reinterpret_cast<size_t>(nullptr),
Andreas Gampe29b38412014-08-13 00:15:43 -0700929 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampef4e910b2014-04-29 16:55:52 -0700930
931 EXPECT_FALSE(self->IsExceptionPending());
932 EXPECT_EQ(nullptr, array->Get(1));
933
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700934 Invoke3(reinterpret_cast<size_t>(array.Get()), 2U, reinterpret_cast<size_t>(nullptr),
Andreas Gampe29b38412014-08-13 00:15:43 -0700935 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampef4e910b2014-04-29 16:55:52 -0700936
937 EXPECT_FALSE(self->IsExceptionPending());
938 EXPECT_EQ(nullptr, array->Get(2));
939
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700940 Invoke3(reinterpret_cast<size_t>(array.Get()), 3U, reinterpret_cast<size_t>(nullptr),
Andreas Gampe29b38412014-08-13 00:15:43 -0700941 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampef4e910b2014-04-29 16:55:52 -0700942
943 EXPECT_FALSE(self->IsExceptionPending());
944 EXPECT_EQ(nullptr, array->Get(3));
Andreas Gampe525cde22014-04-22 15:44:50 -0700945
946 // TODO: Check _which_ exception is thrown. Then make 3) check that it's the right check order.
947
948 // 2) Failure cases (str into str[])
949 // 2.1) Array = null
950 // TODO: Throwing NPE needs actual DEX code
951
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700952// Invoke3(reinterpret_cast<size_t>(nullptr), 0U, reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampe525cde22014-04-22 15:44:50 -0700953// reinterpret_cast<uintptr_t>(&art_quick_aput_obj_with_null_and_bound_check), self);
954//
955// EXPECT_TRUE(self->IsExceptionPending());
956// self->ClearException();
957
958 // 2.2) Index < 0
959
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700960 Invoke3(reinterpret_cast<size_t>(array.Get()), static_cast<size_t>(-1),
961 reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampe29b38412014-08-13 00:15:43 -0700962 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700963
964 EXPECT_TRUE(self->IsExceptionPending());
965 self->ClearException();
966
967 // 2.3) Index > 0
968
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700969 Invoke3(reinterpret_cast<size_t>(array.Get()), 10U, reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampe29b38412014-08-13 00:15:43 -0700970 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700971
972 EXPECT_TRUE(self->IsExceptionPending());
973 self->ClearException();
974
975 // 3) Failure cases (obj into str[])
976
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700977 Invoke3(reinterpret_cast<size_t>(array.Get()), 0U, reinterpret_cast<size_t>(obj_obj.Get()),
Andreas Gampe29b38412014-08-13 00:15:43 -0700978 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700979
980 EXPECT_TRUE(self->IsExceptionPending());
981 self->ClearException();
982
983 // Tests done.
984#else
985 LOG(INFO) << "Skipping aput_obj as I don't know how to do that on " << kRuntimeISA;
986 // Force-print to std::cout so it's also outside the logcat.
987 std::cout << "Skipping aput_obj as I don't know how to do that on " << kRuntimeISA << std::endl;
988#endif
989}
990
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700991TEST_F(StubTest, AllocObject) {
992 TEST_DISABLED_FOR_HEAP_REFERENCE_POISONING();
993
Ian Rogersc3ccc102014-06-25 11:52:14 -0700994#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700995 // TODO: Check the "Unresolved" allocation stubs
996
997 Thread* self = Thread::Current();
998 // Create an object
999 ScopedObjectAccess soa(self);
1000 // garbage is created during ClassLinker::Init
1001
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001002 StackHandleScope<2> hs(soa.Self());
1003 Handle<mirror::Class> c(
1004 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "Ljava/lang/Object;")));
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001005
1006 // Play with it...
1007
1008 EXPECT_FALSE(self->IsExceptionPending());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001009 {
1010 // Use an arbitrary method from c to use as referrer
1011 size_t result = Invoke3(static_cast<size_t>(c->GetDexTypeIndex()), // type_idx
1012 reinterpret_cast<size_t>(c->GetVirtualMethod(0)), // arbitrary
1013 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001014 StubTest::GetEntrypoint(self, kQuickAllocObject),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001015 self);
1016
1017 EXPECT_FALSE(self->IsExceptionPending());
1018 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
1019 mirror::Object* obj = reinterpret_cast<mirror::Object*>(result);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001020 EXPECT_EQ(c.Get(), obj->GetClass());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001021 VerifyObject(obj);
1022 }
1023
1024 {
1025 // We can use nullptr in the second argument as we do not need a method here (not used in
1026 // resolved/initialized cases)
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001027 size_t result = Invoke3(reinterpret_cast<size_t>(c.Get()), reinterpret_cast<size_t>(nullptr), 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001028 StubTest::GetEntrypoint(self, kQuickAllocObjectResolved),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001029 self);
1030
1031 EXPECT_FALSE(self->IsExceptionPending());
1032 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
1033 mirror::Object* obj = reinterpret_cast<mirror::Object*>(result);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001034 EXPECT_EQ(c.Get(), obj->GetClass());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001035 VerifyObject(obj);
1036 }
1037
1038 {
1039 // We can use nullptr in the second argument as we do not need a method here (not used in
1040 // resolved/initialized cases)
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001041 size_t result = Invoke3(reinterpret_cast<size_t>(c.Get()), reinterpret_cast<size_t>(nullptr), 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001042 StubTest::GetEntrypoint(self, kQuickAllocObjectInitialized),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001043 self);
1044
1045 EXPECT_FALSE(self->IsExceptionPending());
1046 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
1047 mirror::Object* obj = reinterpret_cast<mirror::Object*>(result);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001048 EXPECT_EQ(c.Get(), obj->GetClass());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001049 VerifyObject(obj);
1050 }
1051
1052 // Failure tests.
1053
1054 // Out-of-memory.
1055 {
1056 Runtime::Current()->GetHeap()->SetIdealFootprint(1 * GB);
1057
1058 // Array helps to fill memory faster.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001059 Handle<mirror::Class> ca(
1060 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "[Ljava/lang/Object;")));
1061
1062 // Use arbitrary large amount for now.
1063 static const size_t kMaxHandles = 1000000;
Ian Rogers700a4022014-05-19 16:49:03 -07001064 std::unique_ptr<StackHandleScope<kMaxHandles>> hsp(new StackHandleScope<kMaxHandles>(self));
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001065
1066 std::vector<Handle<mirror::Object>> handles;
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001067 // Start allocating with 128K
1068 size_t length = 128 * KB / 4;
1069 while (length > 10) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001070 Handle<mirror::Object> h(hsp->NewHandle<mirror::Object>(
1071 mirror::ObjectArray<mirror::Object>::Alloc(soa.Self(), ca.Get(), length / 4)));
1072 if (self->IsExceptionPending() || h.Get() == nullptr) {
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001073 self->ClearException();
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001074
1075 // Try a smaller length
1076 length = length / 8;
1077 // Use at most half the reported free space.
1078 size_t mem = Runtime::Current()->GetHeap()->GetFreeMemory();
1079 if (length * 8 > mem) {
1080 length = mem / 8;
1081 }
1082 } else {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001083 handles.push_back(h);
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001084 }
1085 }
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001086 LOG(INFO) << "Used " << handles.size() << " arrays to fill space.";
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001087
1088 // Allocate simple objects till it fails.
1089 while (!self->IsExceptionPending()) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001090 Handle<mirror::Object> h = hsp->NewHandle(c->AllocObject(soa.Self()));
1091 if (!self->IsExceptionPending() && h.Get() != nullptr) {
1092 handles.push_back(h);
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001093 }
1094 }
1095 self->ClearException();
1096
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001097 size_t result = Invoke3(reinterpret_cast<size_t>(c.Get()), reinterpret_cast<size_t>(nullptr), 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001098 StubTest::GetEntrypoint(self, kQuickAllocObjectInitialized),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001099 self);
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001100 EXPECT_TRUE(self->IsExceptionPending());
1101 self->ClearException();
1102 EXPECT_EQ(reinterpret_cast<size_t>(nullptr), result);
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001103 }
1104
1105 // Tests done.
1106#else
1107 LOG(INFO) << "Skipping alloc_object as I don't know how to do that on " << kRuntimeISA;
1108 // Force-print to std::cout so it's also outside the logcat.
1109 std::cout << "Skipping alloc_object as I don't know how to do that on " << kRuntimeISA << std::endl;
1110#endif
1111}
1112
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001113TEST_F(StubTest, AllocObjectArray) {
1114 TEST_DISABLED_FOR_HEAP_REFERENCE_POISONING();
1115
Ian Rogersc3ccc102014-06-25 11:52:14 -07001116#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001117 // TODO: Check the "Unresolved" allocation stubs
1118
1119 Thread* self = Thread::Current();
1120 // Create an object
1121 ScopedObjectAccess soa(self);
1122 // garbage is created during ClassLinker::Init
1123
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001124 StackHandleScope<2> hs(self);
1125 Handle<mirror::Class> c(
1126 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "[Ljava/lang/Object;")));
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001127
1128 // Needed to have a linked method.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001129 Handle<mirror::Class> c_obj(
1130 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "Ljava/lang/Object;")));
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001131
1132 // Play with it...
1133
1134 EXPECT_FALSE(self->IsExceptionPending());
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001135
1136 // For some reason this does not work, as the type_idx is artificial and outside what the
1137 // resolved types of c_obj allow...
1138
Ian Rogerscf7f1912014-10-22 22:06:39 -07001139 if ((false)) {
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001140 // Use an arbitrary method from c to use as referrer
1141 size_t result = Invoke3(static_cast<size_t>(c->GetDexTypeIndex()), // type_idx
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001142 10U,
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08001143 reinterpret_cast<size_t>(c_obj->GetVirtualMethod(0)), // arbitrary
Andreas Gampe29b38412014-08-13 00:15:43 -07001144 StubTest::GetEntrypoint(self, kQuickAllocArray),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001145 self);
1146
1147 EXPECT_FALSE(self->IsExceptionPending());
1148 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
1149 mirror::Array* obj = reinterpret_cast<mirror::Array*>(result);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001150 EXPECT_EQ(c.Get(), obj->GetClass());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001151 VerifyObject(obj);
1152 EXPECT_EQ(obj->GetLength(), 10);
1153 }
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001154
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001155 {
1156 // We can use nullptr in the second argument as we do not need a method here (not used in
1157 // resolved/initialized cases)
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08001158 size_t result = Invoke3(reinterpret_cast<size_t>(c.Get()), 10U,
1159 reinterpret_cast<size_t>(nullptr),
Andreas Gampe29b38412014-08-13 00:15:43 -07001160 StubTest::GetEntrypoint(self, kQuickAllocArrayResolved),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001161 self);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001162 EXPECT_FALSE(self->IsExceptionPending()) << PrettyTypeOf(self->GetException(nullptr));
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001163 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
1164 mirror::Object* obj = reinterpret_cast<mirror::Object*>(result);
1165 EXPECT_TRUE(obj->IsArrayInstance());
1166 EXPECT_TRUE(obj->IsObjectArray());
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001167 EXPECT_EQ(c.Get(), obj->GetClass());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001168 VerifyObject(obj);
1169 mirror::Array* array = reinterpret_cast<mirror::Array*>(result);
1170 EXPECT_EQ(array->GetLength(), 10);
1171 }
1172
1173 // Failure tests.
1174
1175 // Out-of-memory.
1176 {
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08001177 size_t result = Invoke3(reinterpret_cast<size_t>(c.Get()),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001178 GB, // that should fail...
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08001179 reinterpret_cast<size_t>(nullptr),
Andreas Gampe29b38412014-08-13 00:15:43 -07001180 StubTest::GetEntrypoint(self, kQuickAllocArrayResolved),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001181 self);
1182
1183 EXPECT_TRUE(self->IsExceptionPending());
1184 self->ClearException();
1185 EXPECT_EQ(reinterpret_cast<size_t>(nullptr), result);
1186 }
1187
1188 // Tests done.
1189#else
1190 LOG(INFO) << "Skipping alloc_array as I don't know how to do that on " << kRuntimeISA;
1191 // Force-print to std::cout so it's also outside the logcat.
1192 std::cout << "Skipping alloc_array as I don't know how to do that on " << kRuntimeISA << std::endl;
1193#endif
1194}
1195
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001196
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001197TEST_F(StubTest, StringCompareTo) {
1198 TEST_DISABLED_FOR_HEAP_REFERENCE_POISONING();
1199
Ian Rogersc3ccc102014-06-25 11:52:14 -07001200#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001201 // TODO: Check the "Unresolved" allocation stubs
1202
1203 Thread* self = Thread::Current();
Andreas Gampe29b38412014-08-13 00:15:43 -07001204
1205 const uintptr_t art_quick_string_compareto = StubTest::GetEntrypoint(self, kQuickStringCompareTo);
1206
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001207 ScopedObjectAccess soa(self);
1208 // garbage is created during ClassLinker::Init
1209
1210 // Create some strings
1211 // Use array so we can index into it and use a matrix for expected results
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -07001212 // Setup: The first half is standard. The second half uses a non-zero offset.
1213 // TODO: Shared backing arrays.
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001214 const char* c[] = { "", "", "a", "aa", "ab",
Serban Constantinescu86797a72014-06-19 16:17:56 +01001215 "aacaacaacaacaacaac", // This one's under the default limit to go to __memcmp16.
1216 "aacaacaacaacaacaacaacaacaacaacaacaac", // This one's over.
1217 "aacaacaacaacaacaacaacaacaacaacaacaaca" }; // As is this one. We need a separate one to
1218 // defeat object-equal optimizations.
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001219 static constexpr size_t kBaseStringCount = arraysize(c);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001220 static constexpr size_t kStringCount = 2 * kBaseStringCount;
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001221
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001222 StackHandleScope<kStringCount> hs(self);
1223 Handle<mirror::String> s[kStringCount];
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001224
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001225 for (size_t i = 0; i < kBaseStringCount; ++i) {
1226 s[i] = hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), c[i]));
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001227 }
1228
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -07001229 RandGen r(0x1234);
1230
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001231 for (size_t i = kBaseStringCount; i < kStringCount; ++i) {
1232 s[i] = hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), c[i - kBaseStringCount]));
1233 int32_t length = s[i]->GetLength();
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -07001234 if (length > 1) {
1235 // Set a random offset and length.
1236 int32_t new_offset = 1 + (r.next() % (length - 1));
1237 int32_t rest = length - new_offset - 1;
1238 int32_t new_length = 1 + (rest > 0 ? r.next() % rest : 0);
1239
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001240 s[i]->SetField32<false>(mirror::String::CountOffset(), new_length);
1241 s[i]->SetField32<false>(mirror::String::OffsetOffset(), new_offset);
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -07001242 }
1243 }
1244
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001245 // TODO: wide characters
1246
1247 // Matrix of expectations. First component is first parameter. Note we only check against the
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -07001248 // sign, not the value. As we are testing random offsets, we need to compute this and need to
1249 // rely on String::CompareTo being correct.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001250 int32_t expected[kStringCount][kStringCount];
1251 for (size_t x = 0; x < kStringCount; ++x) {
1252 for (size_t y = 0; y < kStringCount; ++y) {
1253 expected[x][y] = s[x]->CompareTo(s[y].Get());
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -07001254 }
1255 }
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001256
1257 // Play with it...
1258
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001259 for (size_t x = 0; x < kStringCount; ++x) {
1260 for (size_t y = 0; y < kStringCount; ++y) {
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001261 // Test string_compareto x y
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001262 size_t result = Invoke3(reinterpret_cast<size_t>(s[x].Get()),
1263 reinterpret_cast<size_t>(s[y].Get()), 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001264 art_quick_string_compareto, self);
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001265
1266 EXPECT_FALSE(self->IsExceptionPending());
1267
1268 // The result is a 32b signed integer
1269 union {
1270 size_t r;
1271 int32_t i;
1272 } conv;
1273 conv.r = result;
1274 int32_t e = expected[x][y];
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -07001275 EXPECT_TRUE(e == 0 ? conv.i == 0 : true) << "x=" << c[x] << " y=" << c[y] << " res=" <<
1276 conv.r;
1277 EXPECT_TRUE(e < 0 ? conv.i < 0 : true) << "x=" << c[x] << " y=" << c[y] << " res=" <<
1278 conv.r;
1279 EXPECT_TRUE(e > 0 ? conv.i > 0 : true) << "x=" << c[x] << " y=" << c[y] << " res=" <<
1280 conv.r;
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001281 }
1282 }
1283
Andreas Gampe7177d7c2014-05-02 12:10:02 -07001284 // TODO: Deallocate things.
1285
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001286 // Tests done.
1287#else
1288 LOG(INFO) << "Skipping string_compareto as I don't know how to do that on " << kRuntimeISA;
1289 // Force-print to std::cout so it's also outside the logcat.
1290 std::cout << "Skipping string_compareto as I don't know how to do that on " << kRuntimeISA <<
1291 std::endl;
1292#endif
1293}
1294
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001295
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001296static void GetSetBooleanStatic(Handle<mirror::ArtField>* f, Thread* self,
1297 mirror::ArtMethod* referrer, StubTest* test)
Fred Shih37f05ef2014-07-16 18:38:08 -07001298 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1299#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
1300 constexpr size_t num_values = 5;
1301 uint8_t values[num_values] = { 0, 1, 2, 128, 0xFF };
1302
1303 for (size_t i = 0; i < num_values; ++i) {
1304 test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
1305 static_cast<size_t>(values[i]),
1306 0U,
1307 StubTest::GetEntrypoint(self, kQuickSet8Static),
1308 self,
1309 referrer);
1310
1311 size_t res = test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
1312 0U, 0U,
1313 StubTest::GetEntrypoint(self, kQuickGetBooleanStatic),
1314 self,
1315 referrer);
1316 // Boolean currently stores bools as uint8_t, be more zealous about asserting correct writes/gets.
1317 EXPECT_EQ(values[i], static_cast<uint8_t>(res)) << "Iteration " << i;
1318 }
1319#else
1320 LOG(INFO) << "Skipping set_boolean_static as I don't know how to do that on " << kRuntimeISA;
1321 // Force-print to std::cout so it's also outside the logcat.
1322 std::cout << "Skipping set_boolean_static as I don't know how to do that on " << kRuntimeISA << std::endl;
1323#endif
1324}
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001325static void GetSetByteStatic(Handle<mirror::ArtField>* f, Thread* self,
1326 mirror::ArtMethod* referrer, StubTest* test)
Fred Shih37f05ef2014-07-16 18:38:08 -07001327 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1328#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001329 int8_t values[] = { -128, -64, 0, 64, 127 };
Fred Shih37f05ef2014-07-16 18:38:08 -07001330
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001331 for (size_t i = 0; i < arraysize(values); ++i) {
Fred Shih37f05ef2014-07-16 18:38:08 -07001332 test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
1333 static_cast<size_t>(values[i]),
1334 0U,
1335 StubTest::GetEntrypoint(self, kQuickSet8Static),
1336 self,
1337 referrer);
1338
1339 size_t res = test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
1340 0U, 0U,
1341 StubTest::GetEntrypoint(self, kQuickGetByteStatic),
1342 self,
1343 referrer);
1344 EXPECT_EQ(values[i], static_cast<int8_t>(res)) << "Iteration " << i;
1345 }
1346#else
1347 LOG(INFO) << "Skipping set_byte_static as I don't know how to do that on " << kRuntimeISA;
1348 // Force-print to std::cout so it's also outside the logcat.
1349 std::cout << "Skipping set_byte_static as I don't know how to do that on " << kRuntimeISA << std::endl;
1350#endif
1351}
1352
1353
Fred Shih37f05ef2014-07-16 18:38:08 -07001354static void GetSetBooleanInstance(Handle<mirror::Object>* obj, Handle<mirror::ArtField>* f,
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001355 Thread* self, mirror::ArtMethod* referrer, StubTest* test)
Fred Shih37f05ef2014-07-16 18:38:08 -07001356 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1357#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001358 uint8_t values[] = { 0, true, 2, 128, 0xFF };
Fred Shih37f05ef2014-07-16 18:38:08 -07001359
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001360 for (size_t i = 0; i < arraysize(values); ++i) {
Fred Shih37f05ef2014-07-16 18:38:08 -07001361 test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
1362 reinterpret_cast<size_t>(obj->Get()),
1363 static_cast<size_t>(values[i]),
1364 StubTest::GetEntrypoint(self, kQuickSet8Instance),
1365 self,
1366 referrer);
1367
1368 uint8_t res = f->Get()->GetBoolean(obj->Get());
1369 EXPECT_EQ(values[i], res) << "Iteration " << i;
1370
1371 f->Get()->SetBoolean<false>(obj->Get(), res);
1372
1373 size_t res2 = test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
1374 reinterpret_cast<size_t>(obj->Get()),
1375 0U,
1376 StubTest::GetEntrypoint(self, kQuickGetBooleanInstance),
1377 self,
1378 referrer);
1379 EXPECT_EQ(res, static_cast<uint8_t>(res2));
1380 }
1381#else
1382 LOG(INFO) << "Skipping set_boolean_instance as I don't know how to do that on " << kRuntimeISA;
1383 // Force-print to std::cout so it's also outside the logcat.
1384 std::cout << "Skipping set_boolean_instance as I don't know how to do that on " << kRuntimeISA << std::endl;
1385#endif
1386}
1387static void GetSetByteInstance(Handle<mirror::Object>* obj, Handle<mirror::ArtField>* f,
1388 Thread* self, mirror::ArtMethod* referrer, StubTest* test)
1389 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1390#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001391 int8_t values[] = { -128, -64, 0, 64, 127 };
Fred Shih37f05ef2014-07-16 18:38:08 -07001392
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001393 for (size_t i = 0; i < arraysize(values); ++i) {
Fred Shih37f05ef2014-07-16 18:38:08 -07001394 test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
1395 reinterpret_cast<size_t>(obj->Get()),
1396 static_cast<size_t>(values[i]),
1397 StubTest::GetEntrypoint(self, kQuickSet8Instance),
1398 self,
1399 referrer);
1400
1401 int8_t res = f->Get()->GetByte(obj->Get());
1402 EXPECT_EQ(res, values[i]) << "Iteration " << i;
1403 f->Get()->SetByte<false>(obj->Get(), ++res);
1404
1405 size_t res2 = test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
1406 reinterpret_cast<size_t>(obj->Get()),
1407 0U,
1408 StubTest::GetEntrypoint(self, kQuickGetByteInstance),
1409 self,
1410 referrer);
1411 EXPECT_EQ(res, static_cast<int8_t>(res2));
1412 }
1413#else
1414 LOG(INFO) << "Skipping set_byte_instance as I don't know how to do that on " << kRuntimeISA;
1415 // Force-print to std::cout so it's also outside the logcat.
1416 std::cout << "Skipping set_byte_instance as I don't know how to do that on " << kRuntimeISA << std::endl;
1417#endif
1418}
1419
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001420static void GetSetCharStatic(Handle<mirror::ArtField>* f, Thread* self, mirror::ArtMethod* referrer,
1421 StubTest* test)
Fred Shih37f05ef2014-07-16 18:38:08 -07001422 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1423#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001424 uint16_t values[] = { 0, 1, 2, 255, 32768, 0xFFFF };
Fred Shih37f05ef2014-07-16 18:38:08 -07001425
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001426 for (size_t i = 0; i < arraysize(values); ++i) {
Fred Shih37f05ef2014-07-16 18:38:08 -07001427 test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
1428 static_cast<size_t>(values[i]),
1429 0U,
1430 StubTest::GetEntrypoint(self, kQuickSet16Static),
1431 self,
1432 referrer);
1433
1434 size_t res = test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
1435 0U, 0U,
1436 StubTest::GetEntrypoint(self, kQuickGetCharStatic),
1437 self,
1438 referrer);
1439
1440 EXPECT_EQ(values[i], static_cast<uint16_t>(res)) << "Iteration " << i;
1441 }
1442#else
1443 LOG(INFO) << "Skipping set_char_static as I don't know how to do that on " << kRuntimeISA;
1444 // Force-print to std::cout so it's also outside the logcat.
1445 std::cout << "Skipping set_char_static as I don't know how to do that on " << kRuntimeISA << std::endl;
1446#endif
1447}
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001448static void GetSetShortStatic(Handle<mirror::ArtField>* f, Thread* self,
1449 mirror::ArtMethod* referrer, StubTest* test)
Fred Shih37f05ef2014-07-16 18:38:08 -07001450 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1451#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001452 int16_t values[] = { -0x7FFF, -32768, 0, 255, 32767, 0x7FFE };
Fred Shih37f05ef2014-07-16 18:38:08 -07001453
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001454 for (size_t i = 0; i < arraysize(values); ++i) {
Fred Shih37f05ef2014-07-16 18:38:08 -07001455 test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
1456 static_cast<size_t>(values[i]),
1457 0U,
1458 StubTest::GetEntrypoint(self, kQuickSet16Static),
1459 self,
1460 referrer);
1461
1462 size_t res = test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
1463 0U, 0U,
1464 StubTest::GetEntrypoint(self, kQuickGetShortStatic),
1465 self,
1466 referrer);
1467
1468 EXPECT_EQ(static_cast<int16_t>(res), values[i]) << "Iteration " << i;
1469 }
1470#else
1471 LOG(INFO) << "Skipping set_short_static as I don't know how to do that on " << kRuntimeISA;
1472 // Force-print to std::cout so it's also outside the logcat.
1473 std::cout << "Skipping set_short_static as I don't know how to do that on " << kRuntimeISA << std::endl;
1474#endif
1475}
1476
Fred Shih37f05ef2014-07-16 18:38:08 -07001477static void GetSetCharInstance(Handle<mirror::Object>* obj, Handle<mirror::ArtField>* f,
1478 Thread* self, mirror::ArtMethod* referrer, StubTest* test)
1479 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1480#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001481 uint16_t values[] = { 0, 1, 2, 255, 32768, 0xFFFF };
Fred Shih37f05ef2014-07-16 18:38:08 -07001482
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001483 for (size_t i = 0; i < arraysize(values); ++i) {
Fred Shih37f05ef2014-07-16 18:38:08 -07001484 test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
1485 reinterpret_cast<size_t>(obj->Get()),
1486 static_cast<size_t>(values[i]),
1487 StubTest::GetEntrypoint(self, kQuickSet16Instance),
1488 self,
1489 referrer);
1490
1491 uint16_t res = f->Get()->GetChar(obj->Get());
1492 EXPECT_EQ(res, values[i]) << "Iteration " << i;
1493 f->Get()->SetChar<false>(obj->Get(), ++res);
1494
1495 size_t res2 = test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
1496 reinterpret_cast<size_t>(obj->Get()),
1497 0U,
1498 StubTest::GetEntrypoint(self, kQuickGetCharInstance),
1499 self,
1500 referrer);
1501 EXPECT_EQ(res, static_cast<uint16_t>(res2));
1502 }
1503#else
1504 LOG(INFO) << "Skipping set_char_instance as I don't know how to do that on " << kRuntimeISA;
1505 // Force-print to std::cout so it's also outside the logcat.
1506 std::cout << "Skipping set_char_instance as I don't know how to do that on " << kRuntimeISA << std::endl;
1507#endif
1508}
1509static void GetSetShortInstance(Handle<mirror::Object>* obj, Handle<mirror::ArtField>* f,
1510 Thread* self, mirror::ArtMethod* referrer, StubTest* test)
1511 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1512#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001513 int16_t values[] = { -0x7FFF, -32768, 0, 255, 32767, 0x7FFE };
Fred Shih37f05ef2014-07-16 18:38:08 -07001514
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001515 for (size_t i = 0; i < arraysize(values); ++i) {
Fred Shih37f05ef2014-07-16 18:38:08 -07001516 test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
1517 reinterpret_cast<size_t>(obj->Get()),
1518 static_cast<size_t>(values[i]),
1519 StubTest::GetEntrypoint(self, kQuickSet16Instance),
1520 self,
1521 referrer);
1522
1523 int16_t res = f->Get()->GetShort(obj->Get());
1524 EXPECT_EQ(res, values[i]) << "Iteration " << i;
1525 f->Get()->SetShort<false>(obj->Get(), ++res);
1526
1527 size_t res2 = test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
1528 reinterpret_cast<size_t>(obj->Get()),
1529 0U,
1530 StubTest::GetEntrypoint(self, kQuickGetShortInstance),
1531 self,
1532 referrer);
1533 EXPECT_EQ(res, static_cast<int16_t>(res2));
1534 }
1535#else
1536 LOG(INFO) << "Skipping set_short_instance as I don't know how to do that on " << kRuntimeISA;
1537 // Force-print to std::cout so it's also outside the logcat.
1538 std::cout << "Skipping set_short_instance as I don't know how to do that on " << kRuntimeISA << std::endl;
1539#endif
1540}
1541
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001542static void GetSet32Static(Handle<mirror::ArtField>* f, Thread* self, mirror::ArtMethod* referrer,
1543 StubTest* test)
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001544 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Ian Rogersc3ccc102014-06-25 11:52:14 -07001545#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001546 uint32_t values[] = { 0, 1, 2, 255, 32768, 1000000, 0xFFFFFFFF };
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001547
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001548 for (size_t i = 0; i < arraysize(values); ++i) {
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001549 test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
1550 static_cast<size_t>(values[i]),
1551 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001552 StubTest::GetEntrypoint(self, kQuickSet32Static),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001553 self,
1554 referrer);
1555
1556 size_t res = test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
1557 0U, 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001558 StubTest::GetEntrypoint(self, kQuickGet32Static),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001559 self,
1560 referrer);
1561
1562 EXPECT_EQ(res, values[i]) << "Iteration " << i;
1563 }
1564#else
1565 LOG(INFO) << "Skipping set32static as I don't know how to do that on " << kRuntimeISA;
1566 // Force-print to std::cout so it's also outside the logcat.
1567 std::cout << "Skipping set32static as I don't know how to do that on " << kRuntimeISA << std::endl;
1568#endif
1569}
1570
1571
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001572static void GetSet32Instance(Handle<mirror::Object>* obj, Handle<mirror::ArtField>* f,
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001573 Thread* self, mirror::ArtMethod* referrer, StubTest* test)
1574 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Ian Rogersc3ccc102014-06-25 11:52:14 -07001575#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001576 uint32_t values[] = { 0, 1, 2, 255, 32768, 1000000, 0xFFFFFFFF };
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001577
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001578 for (size_t i = 0; i < arraysize(values); ++i) {
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001579 test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001580 reinterpret_cast<size_t>(obj->Get()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001581 static_cast<size_t>(values[i]),
Andreas Gampe29b38412014-08-13 00:15:43 -07001582 StubTest::GetEntrypoint(self, kQuickSet32Instance),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001583 self,
1584 referrer);
1585
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001586 int32_t res = f->Get()->GetInt(obj->Get());
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001587 EXPECT_EQ(res, static_cast<int32_t>(values[i])) << "Iteration " << i;
1588
1589 res++;
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001590 f->Get()->SetInt<false>(obj->Get(), res);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001591
1592 size_t res2 = test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001593 reinterpret_cast<size_t>(obj->Get()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001594 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001595 StubTest::GetEntrypoint(self, kQuickGet32Instance),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001596 self,
1597 referrer);
1598 EXPECT_EQ(res, static_cast<int32_t>(res2));
1599 }
1600#else
1601 LOG(INFO) << "Skipping set32instance as I don't know how to do that on " << kRuntimeISA;
1602 // Force-print to std::cout so it's also outside the logcat.
1603 std::cout << "Skipping set32instance as I don't know how to do that on " << kRuntimeISA << std::endl;
1604#endif
1605}
1606
1607
Ian Rogersc3ccc102014-06-25 11:52:14 -07001608#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001609
1610static void set_and_check_static(uint32_t f_idx, mirror::Object* val, Thread* self,
1611 mirror::ArtMethod* referrer, StubTest* test)
1612 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1613 test->Invoke3WithReferrer(static_cast<size_t>(f_idx),
1614 reinterpret_cast<size_t>(val),
1615 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001616 StubTest::GetEntrypoint(self, kQuickSetObjStatic),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001617 self,
1618 referrer);
1619
1620 size_t res = test->Invoke3WithReferrer(static_cast<size_t>(f_idx),
1621 0U, 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001622 StubTest::GetEntrypoint(self, kQuickGetObjStatic),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001623 self,
1624 referrer);
1625
1626 EXPECT_EQ(res, reinterpret_cast<size_t>(val)) << "Value " << val;
1627}
1628#endif
1629
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001630static void GetSetObjStatic(Handle<mirror::ArtField>* f, Thread* self, mirror::ArtMethod* referrer,
1631 StubTest* test)
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001632 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Ian Rogersc3ccc102014-06-25 11:52:14 -07001633#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001634 set_and_check_static((*f)->GetDexFieldIndex(), nullptr, self, referrer, test);
1635
1636 // Allocate a string object for simplicity.
1637 mirror::String* str = mirror::String::AllocFromModifiedUtf8(self, "Test");
1638 set_and_check_static((*f)->GetDexFieldIndex(), str, self, referrer, test);
1639
1640 set_and_check_static((*f)->GetDexFieldIndex(), nullptr, self, referrer, test);
1641#else
1642 LOG(INFO) << "Skipping setObjstatic as I don't know how to do that on " << kRuntimeISA;
1643 // Force-print to std::cout so it's also outside the logcat.
1644 std::cout << "Skipping setObjstatic as I don't know how to do that on " << kRuntimeISA << std::endl;
1645#endif
1646}
1647
1648
Ian Rogersc3ccc102014-06-25 11:52:14 -07001649#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001650static void set_and_check_instance(Handle<mirror::ArtField>* f, mirror::Object* trg,
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001651 mirror::Object* val, Thread* self, mirror::ArtMethod* referrer,
1652 StubTest* test)
1653 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1654 test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
1655 reinterpret_cast<size_t>(trg),
1656 reinterpret_cast<size_t>(val),
Andreas Gampe29b38412014-08-13 00:15:43 -07001657 StubTest::GetEntrypoint(self, kQuickSetObjInstance),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001658 self,
1659 referrer);
1660
1661 size_t res = test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
1662 reinterpret_cast<size_t>(trg),
1663 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001664 StubTest::GetEntrypoint(self, kQuickGetObjInstance),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001665 self,
1666 referrer);
1667
1668 EXPECT_EQ(res, reinterpret_cast<size_t>(val)) << "Value " << val;
1669
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001670 EXPECT_EQ(val, f->Get()->GetObj(trg));
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001671}
1672#endif
1673
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001674static void GetSetObjInstance(Handle<mirror::Object>* obj, Handle<mirror::ArtField>* f,
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001675 Thread* self, mirror::ArtMethod* referrer, StubTest* test)
1676 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Ian Rogersc3ccc102014-06-25 11:52:14 -07001677#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001678 set_and_check_instance(f, obj->Get(), nullptr, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001679
1680 // Allocate a string object for simplicity.
1681 mirror::String* str = mirror::String::AllocFromModifiedUtf8(self, "Test");
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001682 set_and_check_instance(f, obj->Get(), str, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001683
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001684 set_and_check_instance(f, obj->Get(), nullptr, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001685#else
1686 LOG(INFO) << "Skipping setObjinstance as I don't know how to do that on " << kRuntimeISA;
1687 // Force-print to std::cout so it's also outside the logcat.
1688 std::cout << "Skipping setObjinstance as I don't know how to do that on " << kRuntimeISA << std::endl;
1689#endif
1690}
1691
1692
1693// TODO: Complete these tests for 32b architectures.
1694
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001695static void GetSet64Static(Handle<mirror::ArtField>* f, Thread* self, mirror::ArtMethod* referrer,
1696 StubTest* test)
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001697 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Ian Rogersc3ccc102014-06-25 11:52:14 -07001698#if (defined(__x86_64__) && !defined(__APPLE__)) || defined(__aarch64__)
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001699 uint64_t values[] = { 0, 1, 2, 255, 32768, 1000000, 0xFFFFFFFF, 0xFFFFFFFFFFFF };
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001700
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001701 for (size_t i = 0; i < arraysize(values); ++i) {
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001702 test->Invoke3UWithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
1703 values[i],
Andreas Gampe29b38412014-08-13 00:15:43 -07001704 StubTest::GetEntrypoint(self, kQuickSet64Static),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001705 self,
1706 referrer);
1707
1708 size_t res = test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
1709 0U, 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001710 StubTest::GetEntrypoint(self, kQuickGet64Static),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001711 self,
1712 referrer);
1713
1714 EXPECT_EQ(res, values[i]) << "Iteration " << i;
1715 }
1716#else
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001717 UNUSED(f, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001718 LOG(INFO) << "Skipping set64static as I don't know how to do that on " << kRuntimeISA;
1719 // Force-print to std::cout so it's also outside the logcat.
1720 std::cout << "Skipping set64static as I don't know how to do that on " << kRuntimeISA << std::endl;
1721#endif
1722}
1723
1724
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001725static void GetSet64Instance(Handle<mirror::Object>* obj, Handle<mirror::ArtField>* f,
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001726 Thread* self, mirror::ArtMethod* referrer, StubTest* test)
1727 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Ian Rogersc3ccc102014-06-25 11:52:14 -07001728#if (defined(__x86_64__) && !defined(__APPLE__)) || defined(__aarch64__)
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001729 uint64_t values[] = { 0, 1, 2, 255, 32768, 1000000, 0xFFFFFFFF, 0xFFFFFFFFFFFF };
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001730
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001731 for (size_t i = 0; i < arraysize(values); ++i) {
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001732 test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001733 reinterpret_cast<size_t>(obj->Get()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001734 static_cast<size_t>(values[i]),
Andreas Gampe29b38412014-08-13 00:15:43 -07001735 StubTest::GetEntrypoint(self, kQuickSet64Instance),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001736 self,
1737 referrer);
1738
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001739 int64_t res = f->Get()->GetLong(obj->Get());
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001740 EXPECT_EQ(res, static_cast<int64_t>(values[i])) << "Iteration " << i;
1741
1742 res++;
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001743 f->Get()->SetLong<false>(obj->Get(), res);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001744
1745 size_t res2 = test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001746 reinterpret_cast<size_t>(obj->Get()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001747 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001748 StubTest::GetEntrypoint(self, kQuickGet64Instance),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001749 self,
1750 referrer);
1751 EXPECT_EQ(res, static_cast<int64_t>(res2));
1752 }
1753#else
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001754 UNUSED(obj, f, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001755 LOG(INFO) << "Skipping set64instance as I don't know how to do that on " << kRuntimeISA;
1756 // Force-print to std::cout so it's also outside the logcat.
1757 std::cout << "Skipping set64instance as I don't know how to do that on " << kRuntimeISA << std::endl;
1758#endif
1759}
1760
1761static void TestFields(Thread* self, StubTest* test, Primitive::Type test_type) {
1762 // garbage is created during ClassLinker::Init
1763
1764 JNIEnv* env = Thread::Current()->GetJniEnv();
1765 jclass jc = env->FindClass("AllFields");
1766 CHECK(jc != NULL);
1767 jobject o = env->AllocObject(jc);
1768 CHECK(o != NULL);
1769
1770 ScopedObjectAccess soa(self);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001771 StackHandleScope<5> hs(self);
1772 Handle<mirror::Object> obj(hs.NewHandle(soa.Decode<mirror::Object*>(o)));
1773 Handle<mirror::Class> c(hs.NewHandle(obj->GetClass()));
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001774 // Need a method as a referrer
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001775 Handle<mirror::ArtMethod> m(hs.NewHandle(c->GetDirectMethod(0)));
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001776
1777 // Play with it...
1778
1779 // Static fields.
1780 {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001781 Handle<mirror::ObjectArray<mirror::ArtField>> fields(hs.NewHandle(c.Get()->GetSFields()));
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001782 int32_t num_fields = fields->GetLength();
1783 for (int32_t i = 0; i < num_fields; ++i) {
Andreas Gampe277ccbd2014-11-03 21:36:10 -08001784 StackHandleScope<1> hs2(self);
1785 Handle<mirror::ArtField> f(hs2.NewHandle(fields->Get(i)));
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001786
Mathieu Chartier61c5ebc2014-06-05 17:42:53 -07001787 Primitive::Type type = f->GetTypeAsPrimitiveType();
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001788 switch (type) {
Fred Shih37f05ef2014-07-16 18:38:08 -07001789 case Primitive::Type::kPrimBoolean:
1790 if (test_type == type) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001791 GetSetBooleanStatic(&f, self, m.Get(), test);
Fred Shih37f05ef2014-07-16 18:38:08 -07001792 }
1793 break;
1794 case Primitive::Type::kPrimByte:
1795 if (test_type == type) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001796 GetSetByteStatic(&f, self, m.Get(), test);
Fred Shih37f05ef2014-07-16 18:38:08 -07001797 }
1798 break;
1799 case Primitive::Type::kPrimChar:
1800 if (test_type == type) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001801 GetSetCharStatic(&f, self, m.Get(), test);
Fred Shih37f05ef2014-07-16 18:38:08 -07001802 }
1803 break;
1804 case Primitive::Type::kPrimShort:
1805 if (test_type == type) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001806 GetSetShortStatic(&f, self, m.Get(), test);
Fred Shih37f05ef2014-07-16 18:38:08 -07001807 }
1808 break;
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001809 case Primitive::Type::kPrimInt:
1810 if (test_type == type) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001811 GetSet32Static(&f, self, m.Get(), test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001812 }
1813 break;
1814
1815 case Primitive::Type::kPrimLong:
1816 if (test_type == type) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001817 GetSet64Static(&f, self, m.Get(), test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001818 }
1819 break;
1820
1821 case Primitive::Type::kPrimNot:
1822 // Don't try array.
Mathieu Chartier61c5ebc2014-06-05 17:42:53 -07001823 if (test_type == type && f->GetTypeDescriptor()[0] != '[') {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001824 GetSetObjStatic(&f, self, m.Get(), test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001825 }
1826 break;
1827
1828 default:
1829 break; // Skip.
1830 }
1831 }
1832 }
1833
1834 // Instance fields.
1835 {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001836 Handle<mirror::ObjectArray<mirror::ArtField>> fields(hs.NewHandle(c.Get()->GetIFields()));
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001837 int32_t num_fields = fields->GetLength();
1838 for (int32_t i = 0; i < num_fields; ++i) {
Andreas Gampe277ccbd2014-11-03 21:36:10 -08001839 StackHandleScope<1> hs2(self);
1840 Handle<mirror::ArtField> f(hs2.NewHandle(fields->Get(i)));
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001841
Mathieu Chartier61c5ebc2014-06-05 17:42:53 -07001842 Primitive::Type type = f->GetTypeAsPrimitiveType();
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001843 switch (type) {
Fred Shih37f05ef2014-07-16 18:38:08 -07001844 case Primitive::Type::kPrimBoolean:
1845 if (test_type == type) {
1846 GetSetBooleanInstance(&obj, &f, self, m.Get(), test);
1847 }
1848 break;
1849 case Primitive::Type::kPrimByte:
1850 if (test_type == type) {
1851 GetSetByteInstance(&obj, &f, self, m.Get(), test);
1852 }
1853 break;
1854 case Primitive::Type::kPrimChar:
1855 if (test_type == type) {
1856 GetSetCharInstance(&obj, &f, self, m.Get(), test);
1857 }
1858 break;
1859 case Primitive::Type::kPrimShort:
1860 if (test_type == type) {
1861 GetSetShortInstance(&obj, &f, self, m.Get(), test);
1862 }
1863 break;
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001864 case Primitive::Type::kPrimInt:
1865 if (test_type == type) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001866 GetSet32Instance(&obj, &f, self, m.Get(), test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001867 }
1868 break;
1869
1870 case Primitive::Type::kPrimLong:
1871 if (test_type == type) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001872 GetSet64Instance(&obj, &f, self, m.Get(), test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001873 }
1874 break;
1875
1876 case Primitive::Type::kPrimNot:
1877 // Don't try array.
Mathieu Chartier61c5ebc2014-06-05 17:42:53 -07001878 if (test_type == type && f->GetTypeDescriptor()[0] != '[') {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001879 GetSetObjInstance(&obj, &f, self, m.Get(), test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001880 }
1881 break;
1882
1883 default:
1884 break; // Skip.
1885 }
1886 }
1887 }
1888
1889 // TODO: Deallocate things.
1890}
1891
Fred Shih37f05ef2014-07-16 18:38:08 -07001892TEST_F(StubTest, Fields8) {
1893 TEST_DISABLED_FOR_HEAP_REFERENCE_POISONING();
1894
1895 Thread* self = Thread::Current();
1896
1897 self->TransitionFromSuspendedToRunnable();
1898 LoadDex("AllFields");
1899 bool started = runtime_->Start();
1900 CHECK(started);
1901
1902 TestFields(self, this, Primitive::Type::kPrimBoolean);
1903 TestFields(self, this, Primitive::Type::kPrimByte);
1904}
1905
1906TEST_F(StubTest, Fields16) {
1907 TEST_DISABLED_FOR_HEAP_REFERENCE_POISONING();
1908
1909 Thread* self = Thread::Current();
1910
1911 self->TransitionFromSuspendedToRunnable();
1912 LoadDex("AllFields");
1913 bool started = runtime_->Start();
1914 CHECK(started);
1915
1916 TestFields(self, this, Primitive::Type::kPrimChar);
1917 TestFields(self, this, Primitive::Type::kPrimShort);
1918}
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001919
1920TEST_F(StubTest, Fields32) {
1921 TEST_DISABLED_FOR_HEAP_REFERENCE_POISONING();
1922
1923 Thread* self = Thread::Current();
1924
1925 self->TransitionFromSuspendedToRunnable();
1926 LoadDex("AllFields");
1927 bool started = runtime_->Start();
1928 CHECK(started);
1929
1930 TestFields(self, this, Primitive::Type::kPrimInt);
1931}
1932
1933TEST_F(StubTest, FieldsObj) {
1934 TEST_DISABLED_FOR_HEAP_REFERENCE_POISONING();
1935
1936 Thread* self = Thread::Current();
1937
1938 self->TransitionFromSuspendedToRunnable();
1939 LoadDex("AllFields");
1940 bool started = runtime_->Start();
1941 CHECK(started);
1942
1943 TestFields(self, this, Primitive::Type::kPrimNot);
1944}
1945
1946TEST_F(StubTest, Fields64) {
1947 TEST_DISABLED_FOR_HEAP_REFERENCE_POISONING();
1948
1949 Thread* self = Thread::Current();
1950
1951 self->TransitionFromSuspendedToRunnable();
1952 LoadDex("AllFields");
1953 bool started = runtime_->Start();
1954 CHECK(started);
1955
1956 TestFields(self, this, Primitive::Type::kPrimLong);
1957}
1958
Andreas Gampe51f76352014-05-21 08:28:48 -07001959TEST_F(StubTest, IMT) {
Ian Rogersc3ccc102014-06-25 11:52:14 -07001960#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe51f76352014-05-21 08:28:48 -07001961 TEST_DISABLED_FOR_HEAP_REFERENCE_POISONING();
1962
1963 Thread* self = Thread::Current();
1964
1965 ScopedObjectAccess soa(self);
1966 StackHandleScope<7> hs(self);
1967
1968 JNIEnv* env = Thread::Current()->GetJniEnv();
1969
1970 // ArrayList
1971
1972 // Load ArrayList and used methods (JNI).
1973 jclass arraylist_jclass = env->FindClass("java/util/ArrayList");
1974 ASSERT_NE(nullptr, arraylist_jclass);
1975 jmethodID arraylist_constructor = env->GetMethodID(arraylist_jclass, "<init>", "()V");
1976 ASSERT_NE(nullptr, arraylist_constructor);
1977 jmethodID contains_jmethod = env->GetMethodID(arraylist_jclass, "contains", "(Ljava/lang/Object;)Z");
1978 ASSERT_NE(nullptr, contains_jmethod);
1979 jmethodID add_jmethod = env->GetMethodID(arraylist_jclass, "add", "(Ljava/lang/Object;)Z");
1980 ASSERT_NE(nullptr, add_jmethod);
1981
1982 // Get mirror representation.
1983 Handle<mirror::ArtMethod> contains_amethod(hs.NewHandle(soa.DecodeMethod(contains_jmethod)));
1984
1985 // Patch up ArrayList.contains.
1986 if (contains_amethod.Get()->GetEntryPointFromQuickCompiledCode() == nullptr) {
1987 contains_amethod.Get()->SetEntryPointFromQuickCompiledCode(reinterpret_cast<void*>(
Andreas Gampe29b38412014-08-13 00:15:43 -07001988 StubTest::GetEntrypoint(self, kQuickQuickToInterpreterBridge)));
Andreas Gampe51f76352014-05-21 08:28:48 -07001989 }
1990
1991 // List
1992
1993 // Load List and used methods (JNI).
1994 jclass list_jclass = env->FindClass("java/util/List");
1995 ASSERT_NE(nullptr, list_jclass);
1996 jmethodID inf_contains_jmethod = env->GetMethodID(list_jclass, "contains", "(Ljava/lang/Object;)Z");
1997 ASSERT_NE(nullptr, inf_contains_jmethod);
1998
1999 // Get mirror representation.
2000 Handle<mirror::ArtMethod> inf_contains(hs.NewHandle(soa.DecodeMethod(inf_contains_jmethod)));
2001
2002 // Object
2003
2004 jclass obj_jclass = env->FindClass("java/lang/Object");
2005 ASSERT_NE(nullptr, obj_jclass);
2006 jmethodID obj_constructor = env->GetMethodID(obj_jclass, "<init>", "()V");
2007 ASSERT_NE(nullptr, obj_constructor);
2008
Andreas Gampe51f76352014-05-21 08:28:48 -07002009 // Create instances.
2010
2011 jobject jarray_list = env->NewObject(arraylist_jclass, arraylist_constructor);
2012 ASSERT_NE(nullptr, jarray_list);
2013 Handle<mirror::Object> array_list(hs.NewHandle(soa.Decode<mirror::Object*>(jarray_list)));
2014
2015 jobject jobj = env->NewObject(obj_jclass, obj_constructor);
2016 ASSERT_NE(nullptr, jobj);
2017 Handle<mirror::Object> obj(hs.NewHandle(soa.Decode<mirror::Object*>(jobj)));
2018
Andreas Gampe1a7e2922014-05-21 15:37:53 -07002019 // Invocation tests.
2020
2021 // 1. imt_conflict
2022
2023 // Contains.
Andreas Gampe51f76352014-05-21 08:28:48 -07002024
2025 size_t result =
2026 Invoke3WithReferrerAndHidden(0U, reinterpret_cast<size_t>(array_list.Get()),
2027 reinterpret_cast<size_t>(obj.Get()),
Andreas Gampe29b38412014-08-13 00:15:43 -07002028 StubTest::GetEntrypoint(self, kQuickQuickImtConflictTrampoline),
Andreas Gampe51f76352014-05-21 08:28:48 -07002029 self, contains_amethod.Get(),
2030 static_cast<size_t>(inf_contains.Get()->GetDexMethodIndex()));
2031
2032 ASSERT_FALSE(self->IsExceptionPending());
2033 EXPECT_EQ(static_cast<size_t>(JNI_FALSE), result);
2034
2035 // Add object.
2036
2037 env->CallBooleanMethod(jarray_list, add_jmethod, jobj);
2038
2039 ASSERT_FALSE(self->IsExceptionPending()) << PrettyTypeOf(self->GetException(nullptr));
2040
Andreas Gampe1a7e2922014-05-21 15:37:53 -07002041 // Contains.
Andreas Gampe51f76352014-05-21 08:28:48 -07002042
2043 result = Invoke3WithReferrerAndHidden(0U, reinterpret_cast<size_t>(array_list.Get()),
2044 reinterpret_cast<size_t>(obj.Get()),
Andreas Gampe29b38412014-08-13 00:15:43 -07002045 StubTest::GetEntrypoint(self, kQuickQuickImtConflictTrampoline),
Andreas Gampe51f76352014-05-21 08:28:48 -07002046 self, contains_amethod.Get(),
2047 static_cast<size_t>(inf_contains.Get()->GetDexMethodIndex()));
2048
2049 ASSERT_FALSE(self->IsExceptionPending());
2050 EXPECT_EQ(static_cast<size_t>(JNI_TRUE), result);
Andreas Gampe1a7e2922014-05-21 15:37:53 -07002051
2052 // 2. regular interface trampoline
2053
2054 result = Invoke3WithReferrer(static_cast<size_t>(inf_contains.Get()->GetDexMethodIndex()),
2055 reinterpret_cast<size_t>(array_list.Get()),
2056 reinterpret_cast<size_t>(obj.Get()),
2057 StubTest::GetEntrypoint(self,
2058 kQuickInvokeInterfaceTrampolineWithAccessCheck),
2059 self, contains_amethod.Get());
2060
2061 ASSERT_FALSE(self->IsExceptionPending());
2062 EXPECT_EQ(static_cast<size_t>(JNI_TRUE), result);
2063
2064 result = Invoke3WithReferrer(static_cast<size_t>(inf_contains.Get()->GetDexMethodIndex()),
2065 reinterpret_cast<size_t>(array_list.Get()),
2066 reinterpret_cast<size_t>(array_list.Get()),
2067 StubTest::GetEntrypoint(self,
2068 kQuickInvokeInterfaceTrampolineWithAccessCheck),
2069 self, contains_amethod.Get());
2070
2071 ASSERT_FALSE(self->IsExceptionPending());
2072 EXPECT_EQ(static_cast<size_t>(JNI_FALSE), result);
Andreas Gampe51f76352014-05-21 08:28:48 -07002073#else
Andreas Gampe6aac3552014-06-09 14:55:53 -07002074 LOG(INFO) << "Skipping imt as I don't know how to do that on " << kRuntimeISA;
Andreas Gampe51f76352014-05-21 08:28:48 -07002075 // Force-print to std::cout so it's also outside the logcat.
Andreas Gampe6aac3552014-06-09 14:55:53 -07002076 std::cout << "Skipping imt as I don't know how to do that on " << kRuntimeISA << std::endl;
2077#endif
2078}
2079
Andreas Gampe6aac3552014-06-09 14:55:53 -07002080TEST_F(StubTest, StringIndexOf) {
2081#if defined(__arm__) || defined(__aarch64__)
Hiroshi Yamauchi52fa8142014-06-16 12:59:49 -07002082 TEST_DISABLED_FOR_HEAP_REFERENCE_POISONING();
2083
Andreas Gampe6aac3552014-06-09 14:55:53 -07002084 Thread* self = Thread::Current();
2085 ScopedObjectAccess soa(self);
2086 // garbage is created during ClassLinker::Init
2087
2088 // Create some strings
2089 // Use array so we can index into it and use a matrix for expected results
2090 // Setup: The first half is standard. The second half uses a non-zero offset.
2091 // TODO: Shared backing arrays.
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07002092 const char* c_str[] = { "", "a", "ba", "cba", "dcba", "edcba", "asdfghjkl" };
2093 static constexpr size_t kStringCount = arraysize(c_str);
2094 const char c_char[] = { 'a', 'b', 'c', 'd', 'e' };
2095 static constexpr size_t kCharCount = arraysize(c_char);
Andreas Gampe6aac3552014-06-09 14:55:53 -07002096
2097 StackHandleScope<kStringCount> hs(self);
2098 Handle<mirror::String> s[kStringCount];
2099
2100 for (size_t i = 0; i < kStringCount; ++i) {
2101 s[i] = hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), c_str[i]));
2102 }
2103
2104 // Matrix of expectations. First component is first parameter. Note we only check against the
2105 // sign, not the value. As we are testing random offsets, we need to compute this and need to
2106 // rely on String::CompareTo being correct.
2107 static constexpr size_t kMaxLen = 9;
2108 DCHECK_LE(strlen(c_str[kStringCount-1]), kMaxLen) << "Please fix the indexof test.";
2109
2110 // Last dimension: start, offset by 1.
2111 int32_t expected[kStringCount][kCharCount][kMaxLen + 3];
2112 for (size_t x = 0; x < kStringCount; ++x) {
2113 for (size_t y = 0; y < kCharCount; ++y) {
2114 for (size_t z = 0; z <= kMaxLen + 2; ++z) {
2115 expected[x][y][z] = s[x]->FastIndexOf(c_char[y], static_cast<int32_t>(z) - 1);
2116 }
2117 }
2118 }
2119
2120 // Play with it...
2121
2122 for (size_t x = 0; x < kStringCount; ++x) {
2123 for (size_t y = 0; y < kCharCount; ++y) {
2124 for (size_t z = 0; z <= kMaxLen + 2; ++z) {
2125 int32_t start = static_cast<int32_t>(z) - 1;
2126
2127 // Test string_compareto x y
2128 size_t result = Invoke3(reinterpret_cast<size_t>(s[x].Get()), c_char[y], start,
Andreas Gampe29b38412014-08-13 00:15:43 -07002129 StubTest::GetEntrypoint(self, kQuickIndexOf), self);
Andreas Gampe6aac3552014-06-09 14:55:53 -07002130
2131 EXPECT_FALSE(self->IsExceptionPending());
2132
2133 // The result is a 32b signed integer
2134 union {
2135 size_t r;
2136 int32_t i;
2137 } conv;
2138 conv.r = result;
2139
2140 EXPECT_EQ(expected[x][y][z], conv.i) << "Wrong result for " << c_str[x] << " / " <<
2141 c_char[y] << " @ " << start;
2142 }
2143 }
2144 }
2145
2146 // TODO: Deallocate things.
2147
2148 // Tests done.
2149#else
2150 LOG(INFO) << "Skipping indexof as I don't know how to do that on " << kRuntimeISA;
2151 // Force-print to std::cout so it's also outside the logcat.
2152 std::cout << "Skipping indexof as I don't know how to do that on " << kRuntimeISA << std::endl;
Andreas Gampe51f76352014-05-21 08:28:48 -07002153#endif
2154}
2155
Andreas Gampe525cde22014-04-22 15:44:50 -07002156} // namespace art