blob: ea586b85435417b11f57f0cb6ebab6ded111c6b2 [file] [log] [blame]
Andreas Gampe525cde22014-04-22 15:44:50 -07001/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
Ian Rogerse63db272014-07-15 15:36:11 -070017#include <cstdio>
18
Andreas Gampe525cde22014-04-22 15:44:50 -070019#include "common_runtime_test.h"
Andreas Gampe29b38412014-08-13 00:15:43 -070020#include "entrypoints/quick/quick_entrypoints_enum.h"
Andreas Gampe6e4e59c2014-05-05 20:11:02 -070021#include "mirror/art_field-inl.h"
Andreas Gampe51f76352014-05-21 08:28:48 -070022#include "mirror/art_method-inl.h"
23#include "mirror/class-inl.h"
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -070024#include "mirror/string-inl.h"
Ian Rogerse63db272014-07-15 15:36:11 -070025#include "scoped_thread_state_change.h"
Andreas Gampe525cde22014-04-22 15:44:50 -070026
27namespace art {
28
29
30class StubTest : public CommonRuntimeTest {
31 protected:
32 // We need callee-save methods set up in the Runtime for exceptions.
33 void SetUp() OVERRIDE {
34 // Do the normal setup.
35 CommonRuntimeTest::SetUp();
36
37 {
38 // Create callee-save methods
39 ScopedObjectAccess soa(Thread::Current());
Vladimir Marko7624d252014-05-02 14:40:15 +010040 runtime_->SetInstructionSet(kRuntimeISA);
Andreas Gampe525cde22014-04-22 15:44:50 -070041 for (int i = 0; i < Runtime::kLastCalleeSaveType; i++) {
42 Runtime::CalleeSaveType type = Runtime::CalleeSaveType(i);
43 if (!runtime_->HasCalleeSaveMethod(type)) {
Vladimir Marko7624d252014-05-02 14:40:15 +010044 runtime_->SetCalleeSaveMethod(runtime_->CreateCalleeSaveMethod(type), type);
Andreas Gampe525cde22014-04-22 15:44:50 -070045 }
46 }
47 }
48 }
49
Ian Rogerse63db272014-07-15 15:36:11 -070050 void SetUpRuntimeOptions(RuntimeOptions *options) OVERRIDE {
Andreas Gampe00c1e6d2014-04-25 15:47:13 -070051 // Use a smaller heap
52 for (std::pair<std::string, const void*>& pair : *options) {
53 if (pair.first.find("-Xmx") == 0) {
54 pair.first = "-Xmx4M"; // Smallest we can go.
55 }
56 }
Andreas Gampe51f76352014-05-21 08:28:48 -070057 options->push_back(std::make_pair("-Xint", nullptr));
Andreas Gampe00c1e6d2014-04-25 15:47:13 -070058 }
Andreas Gampe525cde22014-04-22 15:44:50 -070059
Mathieu Chartier119c6bd2014-05-09 14:11:47 -070060 // Helper function needed since TEST_F makes a new class.
61 Thread::tls_ptr_sized_values* GetTlsPtr(Thread* self) {
62 return &self->tlsPtr_;
63 }
64
Andreas Gampe4fc046e2014-05-06 16:56:39 -070065 public:
Andreas Gampe525cde22014-04-22 15:44:50 -070066 size_t Invoke3(size_t arg0, size_t arg1, size_t arg2, uintptr_t code, Thread* self) {
Andreas Gampe6cf80102014-05-19 11:32:41 -070067 return Invoke3WithReferrer(arg0, arg1, arg2, code, self, nullptr);
Andreas Gampe525cde22014-04-22 15:44:50 -070068 }
Andreas Gampe6e4e59c2014-05-05 20:11:02 -070069
Andreas Gampe6e4e59c2014-05-05 20:11:02 -070070 // TODO: Set up a frame according to referrer's specs.
71 size_t Invoke3WithReferrer(size_t arg0, size_t arg1, size_t arg2, uintptr_t code, Thread* self,
72 mirror::ArtMethod* referrer) {
73 // Push a transition back into managed code onto the linked list in thread.
74 ManagedStack fragment;
75 self->PushManagedStackFragment(&fragment);
76
77 size_t result;
Andreas Gampe6cf80102014-05-19 11:32:41 -070078 size_t fpr_result = 0;
Andreas Gampe6e4e59c2014-05-05 20:11:02 -070079#if defined(__i386__)
80 // TODO: Set the thread?
81 __asm__ __volatile__(
Ian Rogersc5f17732014-06-05 20:48:42 -070082 "subl $12, %%esp\n\t" // Align stack.
83 "pushl %[referrer]\n\t" // Store referrer.
Andreas Gampe6e4e59c2014-05-05 20:11:02 -070084 "call *%%edi\n\t" // Call the stub
Ian Rogersc5f17732014-06-05 20:48:42 -070085 "addl $16, %%esp" // Pop referrer
Andreas Gampe6e4e59c2014-05-05 20:11:02 -070086 : "=a" (result)
87 // Use the result from eax
Andreas Gampe2f6e3512014-06-07 01:32:33 -070088 : "a"(arg0), "c"(arg1), "d"(arg2), "D"(code), [referrer]"r"(referrer)
89 // This places code into edi, arg0 into eax, arg1 into ecx, and arg2 into edx
90 : "memory"); // clobber.
Andreas Gampe6e4e59c2014-05-05 20:11:02 -070091 // TODO: Should we clobber the other registers? EBX gets clobbered by some of the stubs,
92 // but compilation fails when declaring that.
93#elif defined(__arm__)
94 __asm__ __volatile__(
95 "push {r1-r12, lr}\n\t" // Save state, 13*4B = 52B
96 ".cfi_adjust_cfa_offset 52\n\t"
97 "push {r9}\n\t"
98 ".cfi_adjust_cfa_offset 4\n\t"
99 "mov r9, %[referrer]\n\n"
100 "str r9, [sp, #-8]!\n\t" // Push referrer, +8B padding so 16B aligned
101 ".cfi_adjust_cfa_offset 8\n\t"
102 "ldr r9, [sp, #8]\n\t"
103
104 // Push everything on the stack, so we don't rely on the order. What a mess. :-(
105 "sub sp, sp, #20\n\t"
106 "str %[arg0], [sp]\n\t"
107 "str %[arg1], [sp, #4]\n\t"
108 "str %[arg2], [sp, #8]\n\t"
109 "str %[code], [sp, #12]\n\t"
110 "str %[self], [sp, #16]\n\t"
111 "ldr r0, [sp]\n\t"
112 "ldr r1, [sp, #4]\n\t"
113 "ldr r2, [sp, #8]\n\t"
114 "ldr r3, [sp, #12]\n\t"
115 "ldr r9, [sp, #16]\n\t"
116 "add sp, sp, #20\n\t"
117
118 "blx r3\n\t" // Call the stub
119 "add sp, sp, #12\n\t" // Pop nullptr and padding
120 ".cfi_adjust_cfa_offset -12\n\t"
121 "pop {r1-r12, lr}\n\t" // Restore state
122 ".cfi_adjust_cfa_offset -52\n\t"
123 "mov %[result], r0\n\t" // Save the result
124 : [result] "=r" (result)
125 // Use the result from r0
126 : [arg0] "r"(arg0), [arg1] "r"(arg1), [arg2] "r"(arg2), [code] "r"(code), [self] "r"(self),
127 [referrer] "r"(referrer)
Andreas Gampe2f6e3512014-06-07 01:32:33 -0700128 : "memory"); // clobber.
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700129#elif defined(__aarch64__)
130 __asm__ __volatile__(
Andreas Gampef39b3782014-06-03 14:38:30 -0700131 // Spill x0-x7 which we say we don't clobber. May contain args.
Andreas Gampe6cf80102014-05-19 11:32:41 -0700132 "sub sp, sp, #64\n\t"
133 ".cfi_adjust_cfa_offset 64\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700134 "stp x0, x1, [sp]\n\t"
135 "stp x2, x3, [sp, #16]\n\t"
136 "stp x4, x5, [sp, #32]\n\t"
137 "stp x6, x7, [sp, #48]\n\t"
Andreas Gampe6cf80102014-05-19 11:32:41 -0700138
Andreas Gampef39b3782014-06-03 14:38:30 -0700139 "sub sp, sp, #16\n\t" // Reserve stack space, 16B aligned
140 ".cfi_adjust_cfa_offset 16\n\t"
141 "str %[referrer], [sp]\n\t" // referrer
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700142
143 // Push everything on the stack, so we don't rely on the order. What a mess. :-(
144 "sub sp, sp, #48\n\t"
Andreas Gampe6cf80102014-05-19 11:32:41 -0700145 ".cfi_adjust_cfa_offset 48\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700146 // All things are "r" constraints, so direct str/stp should work.
147 "stp %[arg0], %[arg1], [sp]\n\t"
148 "stp %[arg2], %[code], [sp, #16]\n\t"
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700149 "str %[self], [sp, #32]\n\t"
Andreas Gampe6cf80102014-05-19 11:32:41 -0700150
151 // Now we definitely have x0-x3 free, use it to garble d8 - d15
152 "movk x0, #0xfad0\n\t"
153 "movk x0, #0xebad, lsl #16\n\t"
154 "movk x0, #0xfad0, lsl #32\n\t"
155 "movk x0, #0xebad, lsl #48\n\t"
156 "fmov d8, x0\n\t"
157 "add x0, x0, 1\n\t"
158 "fmov d9, x0\n\t"
159 "add x0, x0, 1\n\t"
160 "fmov d10, x0\n\t"
161 "add x0, x0, 1\n\t"
162 "fmov d11, x0\n\t"
163 "add x0, x0, 1\n\t"
164 "fmov d12, x0\n\t"
165 "add x0, x0, 1\n\t"
166 "fmov d13, x0\n\t"
167 "add x0, x0, 1\n\t"
168 "fmov d14, x0\n\t"
169 "add x0, x0, 1\n\t"
170 "fmov d15, x0\n\t"
171
Andreas Gampef39b3782014-06-03 14:38:30 -0700172 // Load call params into the right registers.
173 "ldp x0, x1, [sp]\n\t"
174 "ldp x2, x3, [sp, #16]\n\t"
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700175 "ldr x18, [sp, #32]\n\t"
176 "add sp, sp, #48\n\t"
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700177 ".cfi_adjust_cfa_offset -48\n\t"
178
Andreas Gampe6cf80102014-05-19 11:32:41 -0700179
180 "blr x3\n\t" // Call the stub
Andreas Gampef39b3782014-06-03 14:38:30 -0700181 "mov x8, x0\n\t" // Store result
182 "add sp, sp, #16\n\t" // Drop the quick "frame"
183 ".cfi_adjust_cfa_offset -16\n\t"
Andreas Gampe6cf80102014-05-19 11:32:41 -0700184
185 // Test d8 - d15. We can use x1 and x2.
186 "movk x1, #0xfad0\n\t"
187 "movk x1, #0xebad, lsl #16\n\t"
188 "movk x1, #0xfad0, lsl #32\n\t"
189 "movk x1, #0xebad, lsl #48\n\t"
190 "fmov x2, d8\n\t"
191 "cmp x1, x2\n\t"
192 "b.ne 1f\n\t"
193 "add x1, x1, 1\n\t"
194
195 "fmov x2, d9\n\t"
196 "cmp x1, x2\n\t"
197 "b.ne 1f\n\t"
198 "add x1, x1, 1\n\t"
199
200 "fmov x2, d10\n\t"
201 "cmp x1, x2\n\t"
202 "b.ne 1f\n\t"
203 "add x1, x1, 1\n\t"
204
205 "fmov x2, d11\n\t"
206 "cmp x1, x2\n\t"
207 "b.ne 1f\n\t"
208 "add x1, x1, 1\n\t"
209
210 "fmov x2, d12\n\t"
211 "cmp x1, x2\n\t"
212 "b.ne 1f\n\t"
213 "add x1, x1, 1\n\t"
214
215 "fmov x2, d13\n\t"
216 "cmp x1, x2\n\t"
217 "b.ne 1f\n\t"
218 "add x1, x1, 1\n\t"
219
220 "fmov x2, d14\n\t"
221 "cmp x1, x2\n\t"
222 "b.ne 1f\n\t"
223 "add x1, x1, 1\n\t"
224
225 "fmov x2, d15\n\t"
226 "cmp x1, x2\n\t"
227 "b.ne 1f\n\t"
228
Andreas Gampef39b3782014-06-03 14:38:30 -0700229 "mov x9, #0\n\t" // Use x9 as flag, in clobber list
Andreas Gampe6cf80102014-05-19 11:32:41 -0700230
231 // Finish up.
232 "2:\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700233 "ldp x0, x1, [sp]\n\t" // Restore stuff not named clobbered, may contain fpr_result
234 "ldp x2, x3, [sp, #16]\n\t"
235 "ldp x4, x5, [sp, #32]\n\t"
236 "ldp x6, x7, [sp, #48]\n\t"
237 "add sp, sp, #64\n\t" // Free stack space, now sp as on entry
Andreas Gampe6cf80102014-05-19 11:32:41 -0700238 ".cfi_adjust_cfa_offset -64\n\t"
239
Andreas Gampef39b3782014-06-03 14:38:30 -0700240 "str x9, %[fpr_result]\n\t" // Store the FPR comparison result
241 "mov %[result], x8\n\t" // Store the call result
242
Andreas Gampe6cf80102014-05-19 11:32:41 -0700243 "b 3f\n\t" // Goto end
244
245 // Failed fpr verification.
246 "1:\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700247 "mov x9, #1\n\t"
Andreas Gampe6cf80102014-05-19 11:32:41 -0700248 "b 2b\n\t" // Goto finish-up
249
250 // End
251 "3:\n\t"
Andreas Gampecf4035a2014-05-28 22:43:01 -0700252 : [result] "=r" (result)
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700253 // Use the result from r0
254 : [arg0] "0"(arg0), [arg1] "r"(arg1), [arg2] "r"(arg2), [code] "r"(code), [self] "r"(self),
Andreas Gampecf4035a2014-05-28 22:43:01 -0700255 [referrer] "r"(referrer), [fpr_result] "m" (fpr_result)
Andreas Gampef39b3782014-06-03 14:38:30 -0700256 : "x8", "x9", "x10", "x11", "x12", "x13", "x14", "x15", "x16", "x17", "x18", "x19", "x20",
257 "x21", "x22", "x23", "x24", "x25", "x26", "x27", "x28", "x30",
258 "d0", "d1", "d2", "d3", "d4", "d5", "d6", "d7",
259 "d8", "d9", "d10", "d11", "d12", "d13", "d14", "d15",
260 "d16", "d17", "d18", "d19", "d20", "d21", "d22", "d23",
Andreas Gampe2f6e3512014-06-07 01:32:33 -0700261 "d24", "d25", "d26", "d27", "d28", "d29", "d30", "d31",
262 "memory"); // clobber.
Ian Rogers6f3dbba2014-10-14 17:41:57 -0700263#elif defined(__x86_64__) && !defined(__APPLE__) && defined(__clang__)
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700264 // Note: Uses the native convention
265 // TODO: Set the thread?
266 __asm__ __volatile__(
267 "pushq %[referrer]\n\t" // Push referrer
268 "pushq (%%rsp)\n\t" // & 16B alignment padding
269 ".cfi_adjust_cfa_offset 16\n\t"
270 "call *%%rax\n\t" // Call the stub
271 "addq $16, %%rsp\n\t" // Pop nullptr and padding
272 ".cfi_adjust_cfa_offset -16\n\t"
273 : "=a" (result)
274 // Use the result from rax
Andreas Gampe2f6e3512014-06-07 01:32:33 -0700275 : "D"(arg0), "S"(arg1), "d"(arg2), "a"(code), [referrer] "m"(referrer)
276 // This places arg0 into rdi, arg1 into rsi, arg2 into rdx, and code into rax
277 : "rbx", "rcx", "rbp", "r8", "r9", "r10", "r11", "r12", "r13", "r14", "r15",
278 "memory"); // clobber all
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700279 // TODO: Should we clobber the other registers?
280#else
281 LOG(WARNING) << "Was asked to invoke for an architecture I do not understand.";
282 result = 0;
283#endif
284 // Pop transition.
285 self->PopManagedStackFragment(fragment);
Andreas Gampe6cf80102014-05-19 11:32:41 -0700286
287 fp_result = fpr_result;
288 EXPECT_EQ(0U, fp_result);
289
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700290 return result;
291 }
292
Andreas Gampe51f76352014-05-21 08:28:48 -0700293 // TODO: Set up a frame according to referrer's specs.
294 size_t Invoke3WithReferrerAndHidden(size_t arg0, size_t arg1, size_t arg2, uintptr_t code,
295 Thread* self, mirror::ArtMethod* referrer, size_t hidden) {
296 // Push a transition back into managed code onto the linked list in thread.
297 ManagedStack fragment;
298 self->PushManagedStackFragment(&fragment);
299
300 size_t result;
301 size_t fpr_result = 0;
302#if defined(__i386__)
303 // TODO: Set the thread?
304 __asm__ __volatile__(
305 "movd %[hidden], %%xmm0\n\t"
Ian Rogersc5f17732014-06-05 20:48:42 -0700306 "subl $12, %%esp\n\t" // Align stack.
Andreas Gampe51f76352014-05-21 08:28:48 -0700307 "pushl %[referrer]\n\t" // Store referrer
308 "call *%%edi\n\t" // Call the stub
Ian Rogersc5f17732014-06-05 20:48:42 -0700309 "addl $16, %%esp" // Pop referrer
Andreas Gampe51f76352014-05-21 08:28:48 -0700310 : "=a" (result)
311 // Use the result from eax
Andreas Gampe1a7e2922014-05-21 15:37:53 -0700312 : "a"(arg0), "c"(arg1), "d"(arg2), "D"(code), [referrer]"r"(referrer), [hidden]"m"(hidden)
Andreas Gampe2f6e3512014-06-07 01:32:33 -0700313 // This places code into edi, arg0 into eax, arg1 into ecx, and arg2 into edx
314 : "memory"); // clobber.
Andreas Gampe51f76352014-05-21 08:28:48 -0700315 // TODO: Should we clobber the other registers? EBX gets clobbered by some of the stubs,
316 // but compilation fails when declaring that.
317#elif defined(__arm__)
318 __asm__ __volatile__(
319 "push {r1-r12, lr}\n\t" // Save state, 13*4B = 52B
320 ".cfi_adjust_cfa_offset 52\n\t"
321 "push {r9}\n\t"
322 ".cfi_adjust_cfa_offset 4\n\t"
323 "mov r9, %[referrer]\n\n"
324 "str r9, [sp, #-8]!\n\t" // Push referrer, +8B padding so 16B aligned
325 ".cfi_adjust_cfa_offset 8\n\t"
326 "ldr r9, [sp, #8]\n\t"
327
328 // Push everything on the stack, so we don't rely on the order. What a mess. :-(
329 "sub sp, sp, #24\n\t"
330 "str %[arg0], [sp]\n\t"
331 "str %[arg1], [sp, #4]\n\t"
332 "str %[arg2], [sp, #8]\n\t"
333 "str %[code], [sp, #12]\n\t"
334 "str %[self], [sp, #16]\n\t"
335 "str %[hidden], [sp, #20]\n\t"
336 "ldr r0, [sp]\n\t"
337 "ldr r1, [sp, #4]\n\t"
338 "ldr r2, [sp, #8]\n\t"
339 "ldr r3, [sp, #12]\n\t"
340 "ldr r9, [sp, #16]\n\t"
341 "ldr r12, [sp, #20]\n\t"
342 "add sp, sp, #24\n\t"
343
344 "blx r3\n\t" // Call the stub
345 "add sp, sp, #12\n\t" // Pop nullptr and padding
346 ".cfi_adjust_cfa_offset -12\n\t"
347 "pop {r1-r12, lr}\n\t" // Restore state
348 ".cfi_adjust_cfa_offset -52\n\t"
349 "mov %[result], r0\n\t" // Save the result
350 : [result] "=r" (result)
351 // Use the result from r0
Andreas Gampe2f6e3512014-06-07 01:32:33 -0700352 : [arg0] "r"(arg0), [arg1] "r"(arg1), [arg2] "r"(arg2), [code] "r"(code), [self] "r"(self),
353 [referrer] "r"(referrer), [hidden] "r"(hidden)
354 : "memory"); // clobber.
Andreas Gampe51f76352014-05-21 08:28:48 -0700355#elif defined(__aarch64__)
356 __asm__ __volatile__(
Andreas Gampef39b3782014-06-03 14:38:30 -0700357 // Spill x0-x7 which we say we don't clobber. May contain args.
Andreas Gampe51f76352014-05-21 08:28:48 -0700358 "sub sp, sp, #64\n\t"
359 ".cfi_adjust_cfa_offset 64\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700360 "stp x0, x1, [sp]\n\t"
361 "stp x2, x3, [sp, #16]\n\t"
362 "stp x4, x5, [sp, #32]\n\t"
363 "stp x6, x7, [sp, #48]\n\t"
Andreas Gampe51f76352014-05-21 08:28:48 -0700364
Andreas Gampef39b3782014-06-03 14:38:30 -0700365 "sub sp, sp, #16\n\t" // Reserve stack space, 16B aligned
366 ".cfi_adjust_cfa_offset 16\n\t"
367 "str %[referrer], [sp]\n\t" // referrer
Andreas Gampe51f76352014-05-21 08:28:48 -0700368
369 // Push everything on the stack, so we don't rely on the order. What a mess. :-(
370 "sub sp, sp, #48\n\t"
371 ".cfi_adjust_cfa_offset 48\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700372 // All things are "r" constraints, so direct str/stp should work.
373 "stp %[arg0], %[arg1], [sp]\n\t"
374 "stp %[arg2], %[code], [sp, #16]\n\t"
375 "stp %[self], %[hidden], [sp, #32]\n\t"
Andreas Gampe51f76352014-05-21 08:28:48 -0700376
377 // Now we definitely have x0-x3 free, use it to garble d8 - d15
378 "movk x0, #0xfad0\n\t"
379 "movk x0, #0xebad, lsl #16\n\t"
380 "movk x0, #0xfad0, lsl #32\n\t"
381 "movk x0, #0xebad, lsl #48\n\t"
382 "fmov d8, x0\n\t"
383 "add x0, x0, 1\n\t"
384 "fmov d9, x0\n\t"
385 "add x0, x0, 1\n\t"
386 "fmov d10, x0\n\t"
387 "add x0, x0, 1\n\t"
388 "fmov d11, x0\n\t"
389 "add x0, x0, 1\n\t"
390 "fmov d12, x0\n\t"
391 "add x0, x0, 1\n\t"
392 "fmov d13, x0\n\t"
393 "add x0, x0, 1\n\t"
394 "fmov d14, x0\n\t"
395 "add x0, x0, 1\n\t"
396 "fmov d15, x0\n\t"
397
Andreas Gampef39b3782014-06-03 14:38:30 -0700398 // Load call params into the right registers.
399 "ldp x0, x1, [sp]\n\t"
400 "ldp x2, x3, [sp, #16]\n\t"
Andreas Gampe1a7e2922014-05-21 15:37:53 -0700401 "ldp x18, x17, [sp, #32]\n\t"
Andreas Gampe51f76352014-05-21 08:28:48 -0700402 "add sp, sp, #48\n\t"
403 ".cfi_adjust_cfa_offset -48\n\t"
404
Andreas Gampe51f76352014-05-21 08:28:48 -0700405 "blr x3\n\t" // Call the stub
Andreas Gampef39b3782014-06-03 14:38:30 -0700406 "mov x8, x0\n\t" // Store result
407 "add sp, sp, #16\n\t" // Drop the quick "frame"
408 ".cfi_adjust_cfa_offset -16\n\t"
Andreas Gampe51f76352014-05-21 08:28:48 -0700409
410 // Test d8 - d15. We can use x1 and x2.
411 "movk x1, #0xfad0\n\t"
412 "movk x1, #0xebad, lsl #16\n\t"
413 "movk x1, #0xfad0, lsl #32\n\t"
414 "movk x1, #0xebad, lsl #48\n\t"
415 "fmov x2, d8\n\t"
416 "cmp x1, x2\n\t"
417 "b.ne 1f\n\t"
418 "add x1, x1, 1\n\t"
419
420 "fmov x2, d9\n\t"
421 "cmp x1, x2\n\t"
422 "b.ne 1f\n\t"
423 "add x1, x1, 1\n\t"
424
425 "fmov x2, d10\n\t"
426 "cmp x1, x2\n\t"
427 "b.ne 1f\n\t"
428 "add x1, x1, 1\n\t"
429
430 "fmov x2, d11\n\t"
431 "cmp x1, x2\n\t"
432 "b.ne 1f\n\t"
433 "add x1, x1, 1\n\t"
434
435 "fmov x2, d12\n\t"
436 "cmp x1, x2\n\t"
437 "b.ne 1f\n\t"
438 "add x1, x1, 1\n\t"
439
440 "fmov x2, d13\n\t"
441 "cmp x1, x2\n\t"
442 "b.ne 1f\n\t"
443 "add x1, x1, 1\n\t"
444
445 "fmov x2, d14\n\t"
446 "cmp x1, x2\n\t"
447 "b.ne 1f\n\t"
448 "add x1, x1, 1\n\t"
449
450 "fmov x2, d15\n\t"
451 "cmp x1, x2\n\t"
452 "b.ne 1f\n\t"
453
Andreas Gampef39b3782014-06-03 14:38:30 -0700454 "mov x9, #0\n\t" // Use x9 as flag, in clobber list
Andreas Gampe51f76352014-05-21 08:28:48 -0700455
456 // Finish up.
457 "2:\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700458 "ldp x0, x1, [sp]\n\t" // Restore stuff not named clobbered, may contain fpr_result
459 "ldp x2, x3, [sp, #16]\n\t"
460 "ldp x4, x5, [sp, #32]\n\t"
461 "ldp x6, x7, [sp, #48]\n\t"
462 "add sp, sp, #64\n\t" // Free stack space, now sp as on entry
Andreas Gampe51f76352014-05-21 08:28:48 -0700463 ".cfi_adjust_cfa_offset -64\n\t"
464
Andreas Gampef39b3782014-06-03 14:38:30 -0700465 "str x9, %[fpr_result]\n\t" // Store the FPR comparison result
466 "mov %[result], x8\n\t" // Store the call result
467
Andreas Gampe51f76352014-05-21 08:28:48 -0700468 "b 3f\n\t" // Goto end
469
470 // Failed fpr verification.
471 "1:\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700472 "mov x9, #1\n\t"
Andreas Gampe51f76352014-05-21 08:28:48 -0700473 "b 2b\n\t" // Goto finish-up
474
475 // End
476 "3:\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700477 : [result] "=r" (result)
478 // Use the result from r0
Andreas Gampe51f76352014-05-21 08:28:48 -0700479 : [arg0] "0"(arg0), [arg1] "r"(arg1), [arg2] "r"(arg2), [code] "r"(code), [self] "r"(self),
Andreas Gampef39b3782014-06-03 14:38:30 -0700480 [referrer] "r"(referrer), [hidden] "r"(hidden), [fpr_result] "m" (fpr_result)
481 : "x8", "x9", "x10", "x11", "x12", "x13", "x14", "x15", "x16", "x17", "x18", "x19", "x20",
482 "x21", "x22", "x23", "x24", "x25", "x26", "x27", "x28", "x30",
483 "d0", "d1", "d2", "d3", "d4", "d5", "d6", "d7",
484 "d8", "d9", "d10", "d11", "d12", "d13", "d14", "d15",
485 "d16", "d17", "d18", "d19", "d20", "d21", "d22", "d23",
Andreas Gampe2f6e3512014-06-07 01:32:33 -0700486 "d24", "d25", "d26", "d27", "d28", "d29", "d30", "d31",
487 "memory"); // clobber.
Ian Rogers6f3dbba2014-10-14 17:41:57 -0700488#elif defined(__x86_64__) && !defined(__APPLE__) && defined(__clang__)
Andreas Gampe51f76352014-05-21 08:28:48 -0700489 // Note: Uses the native convention
490 // TODO: Set the thread?
491 __asm__ __volatile__(
Andreas Gampe51f76352014-05-21 08:28:48 -0700492 "pushq %[referrer]\n\t" // Push referrer
493 "pushq (%%rsp)\n\t" // & 16B alignment padding
494 ".cfi_adjust_cfa_offset 16\n\t"
Andreas Gampe1a7e2922014-05-21 15:37:53 -0700495 "call *%%rbx\n\t" // Call the stub
Andreas Gampe51f76352014-05-21 08:28:48 -0700496 "addq $16, %%rsp\n\t" // Pop nullptr and padding
497 ".cfi_adjust_cfa_offset -16\n\t"
498 : "=a" (result)
499 // Use the result from rax
Andreas Gampe1a7e2922014-05-21 15:37:53 -0700500 : "D"(arg0), "S"(arg1), "d"(arg2), "b"(code), [referrer] "c"(referrer), [hidden] "a"(hidden)
Andreas Gampe51f76352014-05-21 08:28:48 -0700501 // This places arg0 into rdi, arg1 into rsi, arg2 into rdx, and code into rax
Andreas Gampe1a7e2922014-05-21 15:37:53 -0700502 : "rbp", "r8", "r9", "r10", "r11", "r12", "r13", "r14", "r15",
Andreas Gampe2f6e3512014-06-07 01:32:33 -0700503 "memory"); // clobber all
Andreas Gampe51f76352014-05-21 08:28:48 -0700504 // TODO: Should we clobber the other registers?
505#else
506 LOG(WARNING) << "Was asked to invoke for an architecture I do not understand.";
507 result = 0;
508#endif
509 // Pop transition.
510 self->PopManagedStackFragment(fragment);
511
512 fp_result = fpr_result;
513 EXPECT_EQ(0U, fp_result);
514
515 return result;
516 }
517
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700518 // Method with 32b arg0, 64b arg1
519 size_t Invoke3UWithReferrer(size_t arg0, uint64_t arg1, uintptr_t code, Thread* self,
520 mirror::ArtMethod* referrer) {
Ian Rogersc3ccc102014-06-25 11:52:14 -0700521#if (defined(__x86_64__) && !defined(__APPLE__)) || defined(__aarch64__)
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700522 // Just pass through.
523 return Invoke3WithReferrer(arg0, arg1, 0U, code, self, referrer);
524#else
525 // Need to split up arguments.
526 uint32_t lower = static_cast<uint32_t>(arg1 & 0xFFFFFFFF);
527 uint32_t upper = static_cast<uint32_t>((arg1 >> 32) & 0xFFFFFFFF);
528
529 return Invoke3WithReferrer(arg0, lower, upper, code, self, referrer);
530#endif
531 }
532
533 // Method with 32b arg0, 32b arg1, 64b arg2
534 size_t Invoke3UUWithReferrer(uint32_t arg0, uint32_t arg1, uint64_t arg2, uintptr_t code,
535 Thread* self, mirror::ArtMethod* referrer) {
Ian Rogersc3ccc102014-06-25 11:52:14 -0700536#if (defined(__x86_64__) && !defined(__APPLE__)) || defined(__aarch64__)
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700537 // Just pass through.
538 return Invoke3WithReferrer(arg0, arg1, arg2, code, self, referrer);
539#else
540 // TODO: Needs 4-param invoke.
541 return 0;
542#endif
543 }
Andreas Gampe6cf80102014-05-19 11:32:41 -0700544
Andreas Gampe29b38412014-08-13 00:15:43 -0700545 static uintptr_t GetEntrypoint(Thread* self, QuickEntrypointEnum entrypoint) {
546 int32_t offset;
547#ifdef __LP64__
548 offset = GetThreadOffset<8>(entrypoint).Int32Value();
549#else
550 offset = GetThreadOffset<4>(entrypoint).Int32Value();
551#endif
552 return *reinterpret_cast<uintptr_t*>(reinterpret_cast<uint8_t*>(self) + offset);
553 }
554
Andreas Gampe6cf80102014-05-19 11:32:41 -0700555 protected:
556 size_t fp_result;
Andreas Gampe525cde22014-04-22 15:44:50 -0700557};
558
559
Andreas Gampe525cde22014-04-22 15:44:50 -0700560TEST_F(StubTest, Memcpy) {
Ian Rogersc3ccc102014-06-25 11:52:14 -0700561#if defined(__i386__) || (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe525cde22014-04-22 15:44:50 -0700562 Thread* self = Thread::Current();
563
564 uint32_t orig[20];
565 uint32_t trg[20];
566 for (size_t i = 0; i < 20; ++i) {
567 orig[i] = i;
568 trg[i] = 0;
569 }
570
571 Invoke3(reinterpret_cast<size_t>(&trg[4]), reinterpret_cast<size_t>(&orig[4]),
Andreas Gampe29b38412014-08-13 00:15:43 -0700572 10 * sizeof(uint32_t), StubTest::GetEntrypoint(self, kQuickMemcpy), self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700573
574 EXPECT_EQ(orig[0], trg[0]);
575
576 for (size_t i = 1; i < 4; ++i) {
577 EXPECT_NE(orig[i], trg[i]);
578 }
579
580 for (size_t i = 4; i < 14; ++i) {
581 EXPECT_EQ(orig[i], trg[i]);
582 }
583
584 for (size_t i = 14; i < 20; ++i) {
585 EXPECT_NE(orig[i], trg[i]);
586 }
587
588 // TODO: Test overlapping?
589
590#else
591 LOG(INFO) << "Skipping memcpy as I don't know how to do that on " << kRuntimeISA;
592 // Force-print to std::cout so it's also outside the logcat.
593 std::cout << "Skipping memcpy as I don't know how to do that on " << kRuntimeISA << std::endl;
594#endif
595}
596
Andreas Gampe525cde22014-04-22 15:44:50 -0700597TEST_F(StubTest, LockObject) {
Ian Rogersc3ccc102014-06-25 11:52:14 -0700598#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -0700599 static constexpr size_t kThinLockLoops = 100;
600
Andreas Gampe525cde22014-04-22 15:44:50 -0700601 Thread* self = Thread::Current();
Andreas Gampe29b38412014-08-13 00:15:43 -0700602
603 const uintptr_t art_quick_lock_object = StubTest::GetEntrypoint(self, kQuickLockObject);
604
Andreas Gampe525cde22014-04-22 15:44:50 -0700605 // Create an object
606 ScopedObjectAccess soa(self);
607 // garbage is created during ClassLinker::Init
608
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700609 StackHandleScope<2> hs(soa.Self());
610 Handle<mirror::String> obj(
611 hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), "hello, world!")));
Andreas Gampe525cde22014-04-22 15:44:50 -0700612 LockWord lock = obj->GetLockWord(false);
613 LockWord::LockState old_state = lock.GetState();
614 EXPECT_EQ(LockWord::LockState::kUnlocked, old_state);
615
Andreas Gampe29b38412014-08-13 00:15:43 -0700616 Invoke3(reinterpret_cast<size_t>(obj.Get()), 0U, 0U, art_quick_lock_object, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700617
618 LockWord lock_after = obj->GetLockWord(false);
619 LockWord::LockState new_state = lock_after.GetState();
620 EXPECT_EQ(LockWord::LockState::kThinLocked, new_state);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700621 EXPECT_EQ(lock_after.ThinLockCount(), 0U); // Thin lock starts count at zero
622
623 for (size_t i = 1; i < kThinLockLoops; ++i) {
Andreas Gampe29b38412014-08-13 00:15:43 -0700624 Invoke3(reinterpret_cast<size_t>(obj.Get()), 0U, 0U, art_quick_lock_object, self);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700625
626 // Check we're at lock count i
627
628 LockWord l_inc = obj->GetLockWord(false);
629 LockWord::LockState l_inc_state = l_inc.GetState();
630 EXPECT_EQ(LockWord::LockState::kThinLocked, l_inc_state);
631 EXPECT_EQ(l_inc.ThinLockCount(), i);
632 }
633
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700634 // Force a fat lock by running identity hashcode to fill up lock word.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700635 Handle<mirror::String> obj2(hs.NewHandle(
636 mirror::String::AllocFromModifiedUtf8(soa.Self(), "hello, world!")));
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700637
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700638 obj2->IdentityHashCode();
639
Andreas Gampe29b38412014-08-13 00:15:43 -0700640 Invoke3(reinterpret_cast<size_t>(obj2.Get()), 0U, 0U, art_quick_lock_object, self);
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700641
642 LockWord lock_after2 = obj2->GetLockWord(false);
643 LockWord::LockState new_state2 = lock_after2.GetState();
644 EXPECT_EQ(LockWord::LockState::kFatLocked, new_state2);
645 EXPECT_NE(lock_after2.FatLockMonitor(), static_cast<Monitor*>(nullptr));
646
647 // Test done.
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700648#else
649 LOG(INFO) << "Skipping lock_object as I don't know how to do that on " << kRuntimeISA;
650 // Force-print to std::cout so it's also outside the logcat.
651 std::cout << "Skipping lock_object as I don't know how to do that on " << kRuntimeISA << std::endl;
652#endif
653}
654
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700655
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700656class RandGen {
657 public:
658 explicit RandGen(uint32_t seed) : val_(seed) {}
659
660 uint32_t next() {
661 val_ = val_ * 48271 % 2147483647 + 13;
662 return val_;
663 }
664
665 uint32_t val_;
666};
667
668
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700669// NO_THREAD_SAFETY_ANALYSIS as we do not want to grab exclusive mutator lock for MonitorInfo.
670static void TestUnlockObject(StubTest* test) NO_THREAD_SAFETY_ANALYSIS {
Ian Rogersc3ccc102014-06-25 11:52:14 -0700671#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -0700672 static constexpr size_t kThinLockLoops = 100;
673
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700674 Thread* self = Thread::Current();
Andreas Gampe29b38412014-08-13 00:15:43 -0700675
676 const uintptr_t art_quick_lock_object = StubTest::GetEntrypoint(self, kQuickLockObject);
677 const uintptr_t art_quick_unlock_object = StubTest::GetEntrypoint(self, kQuickUnlockObject);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700678 // Create an object
679 ScopedObjectAccess soa(self);
680 // garbage is created during ClassLinker::Init
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700681 static constexpr size_t kNumberOfLocks = 10; // Number of objects = lock
682 StackHandleScope<kNumberOfLocks + 1> hs(self);
683 Handle<mirror::String> obj(
684 hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), "hello, world!")));
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700685 LockWord lock = obj->GetLockWord(false);
686 LockWord::LockState old_state = lock.GetState();
687 EXPECT_EQ(LockWord::LockState::kUnlocked, old_state);
688
Andreas Gampe29b38412014-08-13 00:15:43 -0700689 test->Invoke3(reinterpret_cast<size_t>(obj.Get()), 0U, 0U, art_quick_unlock_object, self);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700690 // This should be an illegal monitor state.
691 EXPECT_TRUE(self->IsExceptionPending());
692 self->ClearException();
693
694 LockWord lock_after = obj->GetLockWord(false);
695 LockWord::LockState new_state = lock_after.GetState();
696 EXPECT_EQ(LockWord::LockState::kUnlocked, new_state);
Andreas Gampe525cde22014-04-22 15:44:50 -0700697
Andreas Gampe29b38412014-08-13 00:15:43 -0700698 test->Invoke3(reinterpret_cast<size_t>(obj.Get()), 0U, 0U, art_quick_lock_object, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700699
700 LockWord lock_after2 = obj->GetLockWord(false);
701 LockWord::LockState new_state2 = lock_after2.GetState();
702 EXPECT_EQ(LockWord::LockState::kThinLocked, new_state2);
703
Andreas Gampe29b38412014-08-13 00:15:43 -0700704 test->Invoke3(reinterpret_cast<size_t>(obj.Get()), 0U, 0U, art_quick_unlock_object, self);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700705
706 LockWord lock_after3 = obj->GetLockWord(false);
707 LockWord::LockState new_state3 = lock_after3.GetState();
708 EXPECT_EQ(LockWord::LockState::kUnlocked, new_state3);
709
710 // Stress test:
711 // Keep a number of objects and their locks in flight. Randomly lock or unlock one of them in
712 // each step.
713
714 RandGen r(0x1234);
715
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700716 constexpr size_t kIterations = 10000; // Number of iterations
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700717 constexpr size_t kMoveToFat = 1000; // Chance of 1:kMoveFat to make a lock fat.
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700718
719 size_t counts[kNumberOfLocks];
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700720 bool fat[kNumberOfLocks]; // Whether a lock should be thin or fat.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700721 Handle<mirror::String> objects[kNumberOfLocks];
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700722
723 // Initialize = allocate.
724 for (size_t i = 0; i < kNumberOfLocks; ++i) {
725 counts[i] = 0;
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700726 fat[i] = false;
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700727 objects[i] = hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), ""));
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700728 }
729
730 for (size_t i = 0; i < kIterations; ++i) {
731 // Select which lock to update.
732 size_t index = r.next() % kNumberOfLocks;
733
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700734 // Make lock fat?
735 if (!fat[index] && (r.next() % kMoveToFat == 0)) {
736 fat[index] = true;
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700737 objects[index]->IdentityHashCode();
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700738
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700739 LockWord lock_iter = objects[index]->GetLockWord(false);
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700740 LockWord::LockState iter_state = lock_iter.GetState();
741 if (counts[index] == 0) {
742 EXPECT_EQ(LockWord::LockState::kHashCode, iter_state);
743 } else {
744 EXPECT_EQ(LockWord::LockState::kFatLocked, iter_state);
745 }
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700746 } else {
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700747 bool lock; // Whether to lock or unlock in this step.
748 if (counts[index] == 0) {
749 lock = true;
750 } else if (counts[index] == kThinLockLoops) {
751 lock = false;
752 } else {
753 // Randomly.
754 lock = r.next() % 2 == 0;
755 }
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700756
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700757 if (lock) {
Andreas Gampe29b38412014-08-13 00:15:43 -0700758 test->Invoke3(reinterpret_cast<size_t>(objects[index].Get()), 0U, 0U, art_quick_lock_object,
759 self);
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700760 counts[index]++;
761 } else {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700762 test->Invoke3(reinterpret_cast<size_t>(objects[index].Get()), 0U, 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -0700763 art_quick_unlock_object, self);
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700764 counts[index]--;
765 }
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700766
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700767 EXPECT_FALSE(self->IsExceptionPending());
768
769 // Check the new state.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700770 LockWord lock_iter = objects[index]->GetLockWord(true);
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700771 LockWord::LockState iter_state = lock_iter.GetState();
772 if (fat[index]) {
773 // Abuse MonitorInfo.
774 EXPECT_EQ(LockWord::LockState::kFatLocked, iter_state) << index;
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700775 MonitorInfo info(objects[index].Get());
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700776 EXPECT_EQ(counts[index], info.entry_count_) << index;
777 } else {
778 if (counts[index] > 0) {
779 EXPECT_EQ(LockWord::LockState::kThinLocked, iter_state);
780 EXPECT_EQ(counts[index] - 1, lock_iter.ThinLockCount());
781 } else {
782 EXPECT_EQ(LockWord::LockState::kUnlocked, iter_state);
783 }
784 }
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700785 }
786 }
787
788 // Unlock the remaining count times and then check it's unlocked. Then deallocate.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700789 // Go reverse order to correctly handle Handles.
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700790 for (size_t i = 0; i < kNumberOfLocks; ++i) {
791 size_t index = kNumberOfLocks - 1 - i;
792 size_t count = counts[index];
793 while (count > 0) {
Andreas Gampe29b38412014-08-13 00:15:43 -0700794 test->Invoke3(reinterpret_cast<size_t>(objects[index].Get()), 0U, 0U, art_quick_unlock_object,
795 self);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700796 count--;
797 }
798
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700799 LockWord lock_after4 = objects[index]->GetLockWord(false);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700800 LockWord::LockState new_state4 = lock_after4.GetState();
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700801 EXPECT_TRUE(LockWord::LockState::kUnlocked == new_state4
802 || LockWord::LockState::kFatLocked == new_state4);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700803 }
804
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700805 // Test done.
Andreas Gampe525cde22014-04-22 15:44:50 -0700806#else
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700807 LOG(INFO) << "Skipping unlock_object as I don't know how to do that on " << kRuntimeISA;
Andreas Gampe525cde22014-04-22 15:44:50 -0700808 // Force-print to std::cout so it's also outside the logcat.
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700809 std::cout << "Skipping unlock_object as I don't know how to do that on " << kRuntimeISA << std::endl;
Andreas Gampe525cde22014-04-22 15:44:50 -0700810#endif
811}
812
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700813TEST_F(StubTest, UnlockObject) {
814 TestUnlockObject(this);
815}
Andreas Gampe525cde22014-04-22 15:44:50 -0700816
Ian Rogersc3ccc102014-06-25 11:52:14 -0700817#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe525cde22014-04-22 15:44:50 -0700818extern "C" void art_quick_check_cast(void);
819#endif
820
821TEST_F(StubTest, CheckCast) {
Ian Rogersc3ccc102014-06-25 11:52:14 -0700822#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe525cde22014-04-22 15:44:50 -0700823 Thread* self = Thread::Current();
Andreas Gampe29b38412014-08-13 00:15:43 -0700824
825 const uintptr_t art_quick_check_cast = StubTest::GetEntrypoint(self, kQuickCheckCast);
826
Andreas Gampe525cde22014-04-22 15:44:50 -0700827 // Find some classes.
828 ScopedObjectAccess soa(self);
829 // garbage is created during ClassLinker::Init
830
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700831 StackHandleScope<2> hs(soa.Self());
832 Handle<mirror::Class> c(
833 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "[Ljava/lang/Object;")));
834 Handle<mirror::Class> c2(
835 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "[Ljava/lang/String;")));
Andreas Gampe525cde22014-04-22 15:44:50 -0700836
837 EXPECT_FALSE(self->IsExceptionPending());
838
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700839 Invoke3(reinterpret_cast<size_t>(c.Get()), reinterpret_cast<size_t>(c.Get()), 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -0700840 art_quick_check_cast, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700841
842 EXPECT_FALSE(self->IsExceptionPending());
843
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700844 Invoke3(reinterpret_cast<size_t>(c2.Get()), reinterpret_cast<size_t>(c2.Get()), 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -0700845 art_quick_check_cast, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700846
847 EXPECT_FALSE(self->IsExceptionPending());
848
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700849 Invoke3(reinterpret_cast<size_t>(c.Get()), reinterpret_cast<size_t>(c2.Get()), 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -0700850 art_quick_check_cast, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700851
852 EXPECT_FALSE(self->IsExceptionPending());
853
854 // TODO: Make the following work. But that would require correct managed frames.
855
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700856 Invoke3(reinterpret_cast<size_t>(c2.Get()), reinterpret_cast<size_t>(c.Get()), 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -0700857 art_quick_check_cast, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700858
859 EXPECT_TRUE(self->IsExceptionPending());
860 self->ClearException();
861
862#else
863 LOG(INFO) << "Skipping check_cast as I don't know how to do that on " << kRuntimeISA;
864 // Force-print to std::cout so it's also outside the logcat.
865 std::cout << "Skipping check_cast as I don't know how to do that on " << kRuntimeISA << std::endl;
866#endif
867}
868
869
Andreas Gampe525cde22014-04-22 15:44:50 -0700870TEST_F(StubTest, APutObj) {
Hiroshi Yamauchid6881ae2014-04-28 17:21:48 -0700871 TEST_DISABLED_FOR_HEAP_REFERENCE_POISONING();
872
Ian Rogersc3ccc102014-06-25 11:52:14 -0700873#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe525cde22014-04-22 15:44:50 -0700874 Thread* self = Thread::Current();
Andreas Gampe29b38412014-08-13 00:15:43 -0700875
876 // Do not check non-checked ones, we'd need handlers and stuff...
877 const uintptr_t art_quick_aput_obj_with_null_and_bound_check =
878 StubTest::GetEntrypoint(self, kQuickAputObjectWithNullAndBoundCheck);
879
Andreas Gampe525cde22014-04-22 15:44:50 -0700880 // Create an object
881 ScopedObjectAccess soa(self);
882 // garbage is created during ClassLinker::Init
883
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700884 StackHandleScope<5> hs(soa.Self());
885 Handle<mirror::Class> c(
886 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "Ljava/lang/Object;")));
887 Handle<mirror::Class> ca(
888 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "[Ljava/lang/String;")));
Andreas Gampe525cde22014-04-22 15:44:50 -0700889
890 // Build a string array of size 1
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700891 Handle<mirror::ObjectArray<mirror::Object>> array(
892 hs.NewHandle(mirror::ObjectArray<mirror::Object>::Alloc(soa.Self(), ca.Get(), 10)));
Andreas Gampe525cde22014-04-22 15:44:50 -0700893
894 // Build a string -> should be assignable
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700895 Handle<mirror::String> str_obj(
896 hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), "hello, world!")));
Andreas Gampe525cde22014-04-22 15:44:50 -0700897
898 // Build a generic object -> should fail assigning
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700899 Handle<mirror::Object> obj_obj(hs.NewHandle(c->AllocObject(soa.Self())));
Andreas Gampe525cde22014-04-22 15:44:50 -0700900
901 // Play with it...
902
903 // 1) Success cases
Andreas Gampef4e910b2014-04-29 16:55:52 -0700904 // 1.1) Assign str_obj to array[0..3]
Andreas Gampe525cde22014-04-22 15:44:50 -0700905
906 EXPECT_FALSE(self->IsExceptionPending());
907
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700908 Invoke3(reinterpret_cast<size_t>(array.Get()), 0U, reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampe29b38412014-08-13 00:15:43 -0700909 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700910
911 EXPECT_FALSE(self->IsExceptionPending());
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700912 EXPECT_EQ(str_obj.Get(), array->Get(0));
Andreas Gampe525cde22014-04-22 15:44:50 -0700913
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700914 Invoke3(reinterpret_cast<size_t>(array.Get()), 1U, reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampe29b38412014-08-13 00:15:43 -0700915 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampef4e910b2014-04-29 16:55:52 -0700916
917 EXPECT_FALSE(self->IsExceptionPending());
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700918 EXPECT_EQ(str_obj.Get(), array->Get(1));
Andreas Gampef4e910b2014-04-29 16:55:52 -0700919
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700920 Invoke3(reinterpret_cast<size_t>(array.Get()), 2U, reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampe29b38412014-08-13 00:15:43 -0700921 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampef4e910b2014-04-29 16:55:52 -0700922
923 EXPECT_FALSE(self->IsExceptionPending());
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700924 EXPECT_EQ(str_obj.Get(), array->Get(2));
Andreas Gampef4e910b2014-04-29 16:55:52 -0700925
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700926 Invoke3(reinterpret_cast<size_t>(array.Get()), 3U, reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampe29b38412014-08-13 00:15:43 -0700927 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampef4e910b2014-04-29 16:55:52 -0700928
929 EXPECT_FALSE(self->IsExceptionPending());
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700930 EXPECT_EQ(str_obj.Get(), array->Get(3));
Andreas Gampef4e910b2014-04-29 16:55:52 -0700931
932 // 1.2) Assign null to array[0..3]
Andreas Gampe525cde22014-04-22 15:44:50 -0700933
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700934 Invoke3(reinterpret_cast<size_t>(array.Get()), 0U, reinterpret_cast<size_t>(nullptr),
Andreas Gampe29b38412014-08-13 00:15:43 -0700935 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700936
937 EXPECT_FALSE(self->IsExceptionPending());
Andreas Gampef4e910b2014-04-29 16:55:52 -0700938 EXPECT_EQ(nullptr, array->Get(0));
939
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700940 Invoke3(reinterpret_cast<size_t>(array.Get()), 1U, reinterpret_cast<size_t>(nullptr),
Andreas Gampe29b38412014-08-13 00:15:43 -0700941 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampef4e910b2014-04-29 16:55:52 -0700942
943 EXPECT_FALSE(self->IsExceptionPending());
944 EXPECT_EQ(nullptr, array->Get(1));
945
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700946 Invoke3(reinterpret_cast<size_t>(array.Get()), 2U, reinterpret_cast<size_t>(nullptr),
Andreas Gampe29b38412014-08-13 00:15:43 -0700947 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampef4e910b2014-04-29 16:55:52 -0700948
949 EXPECT_FALSE(self->IsExceptionPending());
950 EXPECT_EQ(nullptr, array->Get(2));
951
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700952 Invoke3(reinterpret_cast<size_t>(array.Get()), 3U, reinterpret_cast<size_t>(nullptr),
Andreas Gampe29b38412014-08-13 00:15:43 -0700953 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampef4e910b2014-04-29 16:55:52 -0700954
955 EXPECT_FALSE(self->IsExceptionPending());
956 EXPECT_EQ(nullptr, array->Get(3));
Andreas Gampe525cde22014-04-22 15:44:50 -0700957
958 // TODO: Check _which_ exception is thrown. Then make 3) check that it's the right check order.
959
960 // 2) Failure cases (str into str[])
961 // 2.1) Array = null
962 // TODO: Throwing NPE needs actual DEX code
963
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700964// Invoke3(reinterpret_cast<size_t>(nullptr), 0U, reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampe525cde22014-04-22 15:44:50 -0700965// reinterpret_cast<uintptr_t>(&art_quick_aput_obj_with_null_and_bound_check), self);
966//
967// EXPECT_TRUE(self->IsExceptionPending());
968// self->ClearException();
969
970 // 2.2) Index < 0
971
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700972 Invoke3(reinterpret_cast<size_t>(array.Get()), static_cast<size_t>(-1),
973 reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampe29b38412014-08-13 00:15:43 -0700974 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700975
976 EXPECT_TRUE(self->IsExceptionPending());
977 self->ClearException();
978
979 // 2.3) Index > 0
980
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700981 Invoke3(reinterpret_cast<size_t>(array.Get()), 10U, reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampe29b38412014-08-13 00:15:43 -0700982 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700983
984 EXPECT_TRUE(self->IsExceptionPending());
985 self->ClearException();
986
987 // 3) Failure cases (obj into str[])
988
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700989 Invoke3(reinterpret_cast<size_t>(array.Get()), 0U, reinterpret_cast<size_t>(obj_obj.Get()),
Andreas Gampe29b38412014-08-13 00:15:43 -0700990 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700991
992 EXPECT_TRUE(self->IsExceptionPending());
993 self->ClearException();
994
995 // Tests done.
996#else
997 LOG(INFO) << "Skipping aput_obj as I don't know how to do that on " << kRuntimeISA;
998 // Force-print to std::cout so it's also outside the logcat.
999 std::cout << "Skipping aput_obj as I don't know how to do that on " << kRuntimeISA << std::endl;
1000#endif
1001}
1002
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001003TEST_F(StubTest, AllocObject) {
1004 TEST_DISABLED_FOR_HEAP_REFERENCE_POISONING();
1005
Ian Rogersc3ccc102014-06-25 11:52:14 -07001006#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001007 // TODO: Check the "Unresolved" allocation stubs
1008
1009 Thread* self = Thread::Current();
1010 // Create an object
1011 ScopedObjectAccess soa(self);
1012 // garbage is created during ClassLinker::Init
1013
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001014 StackHandleScope<2> hs(soa.Self());
1015 Handle<mirror::Class> c(
1016 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "Ljava/lang/Object;")));
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001017
1018 // Play with it...
1019
1020 EXPECT_FALSE(self->IsExceptionPending());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001021 {
1022 // Use an arbitrary method from c to use as referrer
1023 size_t result = Invoke3(static_cast<size_t>(c->GetDexTypeIndex()), // type_idx
1024 reinterpret_cast<size_t>(c->GetVirtualMethod(0)), // arbitrary
1025 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001026 StubTest::GetEntrypoint(self, kQuickAllocObject),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001027 self);
1028
1029 EXPECT_FALSE(self->IsExceptionPending());
1030 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
1031 mirror::Object* obj = reinterpret_cast<mirror::Object*>(result);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001032 EXPECT_EQ(c.Get(), obj->GetClass());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001033 VerifyObject(obj);
1034 }
1035
1036 {
1037 // We can use nullptr in the second argument as we do not need a method here (not used in
1038 // resolved/initialized cases)
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001039 size_t result = Invoke3(reinterpret_cast<size_t>(c.Get()), reinterpret_cast<size_t>(nullptr), 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001040 StubTest::GetEntrypoint(self, kQuickAllocObjectResolved),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001041 self);
1042
1043 EXPECT_FALSE(self->IsExceptionPending());
1044 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
1045 mirror::Object* obj = reinterpret_cast<mirror::Object*>(result);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001046 EXPECT_EQ(c.Get(), obj->GetClass());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001047 VerifyObject(obj);
1048 }
1049
1050 {
1051 // We can use nullptr in the second argument as we do not need a method here (not used in
1052 // resolved/initialized cases)
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001053 size_t result = Invoke3(reinterpret_cast<size_t>(c.Get()), reinterpret_cast<size_t>(nullptr), 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001054 StubTest::GetEntrypoint(self, kQuickAllocObjectInitialized),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001055 self);
1056
1057 EXPECT_FALSE(self->IsExceptionPending());
1058 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
1059 mirror::Object* obj = reinterpret_cast<mirror::Object*>(result);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001060 EXPECT_EQ(c.Get(), obj->GetClass());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001061 VerifyObject(obj);
1062 }
1063
1064 // Failure tests.
1065
1066 // Out-of-memory.
1067 {
1068 Runtime::Current()->GetHeap()->SetIdealFootprint(1 * GB);
1069
1070 // Array helps to fill memory faster.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001071 Handle<mirror::Class> ca(
1072 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "[Ljava/lang/Object;")));
1073
1074 // Use arbitrary large amount for now.
1075 static const size_t kMaxHandles = 1000000;
Ian Rogers700a4022014-05-19 16:49:03 -07001076 std::unique_ptr<StackHandleScope<kMaxHandles>> hsp(new StackHandleScope<kMaxHandles>(self));
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001077
1078 std::vector<Handle<mirror::Object>> handles;
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001079 // Start allocating with 128K
1080 size_t length = 128 * KB / 4;
1081 while (length > 10) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001082 Handle<mirror::Object> h(hsp->NewHandle<mirror::Object>(
1083 mirror::ObjectArray<mirror::Object>::Alloc(soa.Self(), ca.Get(), length / 4)));
1084 if (self->IsExceptionPending() || h.Get() == nullptr) {
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001085 self->ClearException();
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001086
1087 // Try a smaller length
1088 length = length / 8;
1089 // Use at most half the reported free space.
1090 size_t mem = Runtime::Current()->GetHeap()->GetFreeMemory();
1091 if (length * 8 > mem) {
1092 length = mem / 8;
1093 }
1094 } else {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001095 handles.push_back(h);
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001096 }
1097 }
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001098 LOG(INFO) << "Used " << handles.size() << " arrays to fill space.";
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001099
1100 // Allocate simple objects till it fails.
1101 while (!self->IsExceptionPending()) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001102 Handle<mirror::Object> h = hsp->NewHandle(c->AllocObject(soa.Self()));
1103 if (!self->IsExceptionPending() && h.Get() != nullptr) {
1104 handles.push_back(h);
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001105 }
1106 }
1107 self->ClearException();
1108
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001109 size_t result = Invoke3(reinterpret_cast<size_t>(c.Get()), reinterpret_cast<size_t>(nullptr), 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001110 StubTest::GetEntrypoint(self, kQuickAllocObjectInitialized),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001111 self);
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001112 EXPECT_TRUE(self->IsExceptionPending());
1113 self->ClearException();
1114 EXPECT_EQ(reinterpret_cast<size_t>(nullptr), result);
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001115 }
1116
1117 // Tests done.
1118#else
1119 LOG(INFO) << "Skipping alloc_object as I don't know how to do that on " << kRuntimeISA;
1120 // Force-print to std::cout so it's also outside the logcat.
1121 std::cout << "Skipping alloc_object as I don't know how to do that on " << kRuntimeISA << std::endl;
1122#endif
1123}
1124
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001125TEST_F(StubTest, AllocObjectArray) {
1126 TEST_DISABLED_FOR_HEAP_REFERENCE_POISONING();
1127
Ian Rogersc3ccc102014-06-25 11:52:14 -07001128#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001129 // TODO: Check the "Unresolved" allocation stubs
1130
1131 Thread* self = Thread::Current();
1132 // Create an object
1133 ScopedObjectAccess soa(self);
1134 // garbage is created during ClassLinker::Init
1135
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001136 StackHandleScope<2> hs(self);
1137 Handle<mirror::Class> c(
1138 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "[Ljava/lang/Object;")));
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001139
1140 // Needed to have a linked method.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001141 Handle<mirror::Class> c_obj(
1142 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "Ljava/lang/Object;")));
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001143
1144 // Play with it...
1145
1146 EXPECT_FALSE(self->IsExceptionPending());
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001147
1148 // For some reason this does not work, as the type_idx is artificial and outside what the
1149 // resolved types of c_obj allow...
1150
1151 if (false) {
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001152 // Use an arbitrary method from c to use as referrer
1153 size_t result = Invoke3(static_cast<size_t>(c->GetDexTypeIndex()), // type_idx
1154 reinterpret_cast<size_t>(c_obj->GetVirtualMethod(0)), // arbitrary
1155 10U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001156 StubTest::GetEntrypoint(self, kQuickAllocArray),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001157 self);
1158
1159 EXPECT_FALSE(self->IsExceptionPending());
1160 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
1161 mirror::Array* obj = reinterpret_cast<mirror::Array*>(result);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001162 EXPECT_EQ(c.Get(), obj->GetClass());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001163 VerifyObject(obj);
1164 EXPECT_EQ(obj->GetLength(), 10);
1165 }
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001166
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001167 {
1168 // We can use nullptr in the second argument as we do not need a method here (not used in
1169 // resolved/initialized cases)
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001170 size_t result = Invoke3(reinterpret_cast<size_t>(c.Get()), reinterpret_cast<size_t>(nullptr), 10U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001171 StubTest::GetEntrypoint(self, kQuickAllocArrayResolved),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001172 self);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001173 EXPECT_FALSE(self->IsExceptionPending()) << PrettyTypeOf(self->GetException(nullptr));
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001174 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
1175 mirror::Object* obj = reinterpret_cast<mirror::Object*>(result);
1176 EXPECT_TRUE(obj->IsArrayInstance());
1177 EXPECT_TRUE(obj->IsObjectArray());
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001178 EXPECT_EQ(c.Get(), obj->GetClass());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001179 VerifyObject(obj);
1180 mirror::Array* array = reinterpret_cast<mirror::Array*>(result);
1181 EXPECT_EQ(array->GetLength(), 10);
1182 }
1183
1184 // Failure tests.
1185
1186 // Out-of-memory.
1187 {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001188 size_t result = Invoke3(reinterpret_cast<size_t>(c.Get()), reinterpret_cast<size_t>(nullptr),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001189 GB, // that should fail...
Andreas Gampe29b38412014-08-13 00:15:43 -07001190 StubTest::GetEntrypoint(self, kQuickAllocArrayResolved),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001191 self);
1192
1193 EXPECT_TRUE(self->IsExceptionPending());
1194 self->ClearException();
1195 EXPECT_EQ(reinterpret_cast<size_t>(nullptr), result);
1196 }
1197
1198 // Tests done.
1199#else
1200 LOG(INFO) << "Skipping alloc_array as I don't know how to do that on " << kRuntimeISA;
1201 // Force-print to std::cout so it's also outside the logcat.
1202 std::cout << "Skipping alloc_array as I don't know how to do that on " << kRuntimeISA << std::endl;
1203#endif
1204}
1205
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001206
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001207TEST_F(StubTest, StringCompareTo) {
1208 TEST_DISABLED_FOR_HEAP_REFERENCE_POISONING();
1209
Ian Rogersc3ccc102014-06-25 11:52:14 -07001210#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001211 // TODO: Check the "Unresolved" allocation stubs
1212
1213 Thread* self = Thread::Current();
Andreas Gampe29b38412014-08-13 00:15:43 -07001214
1215 const uintptr_t art_quick_string_compareto = StubTest::GetEntrypoint(self, kQuickStringCompareTo);
1216
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001217 ScopedObjectAccess soa(self);
1218 // garbage is created during ClassLinker::Init
1219
1220 // Create some strings
1221 // Use array so we can index into it and use a matrix for expected results
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -07001222 // Setup: The first half is standard. The second half uses a non-zero offset.
1223 // TODO: Shared backing arrays.
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001224 const char* c[] = { "", "", "a", "aa", "ab",
Serban Constantinescu86797a72014-06-19 16:17:56 +01001225 "aacaacaacaacaacaac", // This one's under the default limit to go to __memcmp16.
1226 "aacaacaacaacaacaacaacaacaacaacaacaac", // This one's over.
1227 "aacaacaacaacaacaacaacaacaacaacaacaaca" }; // As is this one. We need a separate one to
1228 // defeat object-equal optimizations.
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001229 static constexpr size_t kBaseStringCount = arraysize(c);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001230 static constexpr size_t kStringCount = 2 * kBaseStringCount;
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001231
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001232 StackHandleScope<kStringCount> hs(self);
1233 Handle<mirror::String> s[kStringCount];
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001234
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001235 for (size_t i = 0; i < kBaseStringCount; ++i) {
1236 s[i] = hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), c[i]));
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001237 }
1238
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -07001239 RandGen r(0x1234);
1240
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001241 for (size_t i = kBaseStringCount; i < kStringCount; ++i) {
1242 s[i] = hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), c[i - kBaseStringCount]));
1243 int32_t length = s[i]->GetLength();
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -07001244 if (length > 1) {
1245 // Set a random offset and length.
1246 int32_t new_offset = 1 + (r.next() % (length - 1));
1247 int32_t rest = length - new_offset - 1;
1248 int32_t new_length = 1 + (rest > 0 ? r.next() % rest : 0);
1249
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001250 s[i]->SetField32<false>(mirror::String::CountOffset(), new_length);
1251 s[i]->SetField32<false>(mirror::String::OffsetOffset(), new_offset);
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -07001252 }
1253 }
1254
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001255 // TODO: wide characters
1256
1257 // Matrix of expectations. First component is first parameter. Note we only check against the
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -07001258 // sign, not the value. As we are testing random offsets, we need to compute this and need to
1259 // rely on String::CompareTo being correct.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001260 int32_t expected[kStringCount][kStringCount];
1261 for (size_t x = 0; x < kStringCount; ++x) {
1262 for (size_t y = 0; y < kStringCount; ++y) {
1263 expected[x][y] = s[x]->CompareTo(s[y].Get());
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -07001264 }
1265 }
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001266
1267 // Play with it...
1268
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001269 for (size_t x = 0; x < kStringCount; ++x) {
1270 for (size_t y = 0; y < kStringCount; ++y) {
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001271 // Test string_compareto x y
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001272 size_t result = Invoke3(reinterpret_cast<size_t>(s[x].Get()),
1273 reinterpret_cast<size_t>(s[y].Get()), 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001274 art_quick_string_compareto, self);
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001275
1276 EXPECT_FALSE(self->IsExceptionPending());
1277
1278 // The result is a 32b signed integer
1279 union {
1280 size_t r;
1281 int32_t i;
1282 } conv;
1283 conv.r = result;
1284 int32_t e = expected[x][y];
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -07001285 EXPECT_TRUE(e == 0 ? conv.i == 0 : true) << "x=" << c[x] << " y=" << c[y] << " res=" <<
1286 conv.r;
1287 EXPECT_TRUE(e < 0 ? conv.i < 0 : true) << "x=" << c[x] << " y=" << c[y] << " res=" <<
1288 conv.r;
1289 EXPECT_TRUE(e > 0 ? conv.i > 0 : true) << "x=" << c[x] << " y=" << c[y] << " res=" <<
1290 conv.r;
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001291 }
1292 }
1293
Andreas Gampe7177d7c2014-05-02 12:10:02 -07001294 // TODO: Deallocate things.
1295
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001296 // Tests done.
1297#else
1298 LOG(INFO) << "Skipping string_compareto as I don't know how to do that on " << kRuntimeISA;
1299 // Force-print to std::cout so it's also outside the logcat.
1300 std::cout << "Skipping string_compareto as I don't know how to do that on " << kRuntimeISA <<
1301 std::endl;
1302#endif
1303}
1304
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001305
Fred Shih37f05ef2014-07-16 18:38:08 -07001306static void GetSetBooleanStatic(Handle<mirror::Object>* obj, Handle<mirror::ArtField>* f, Thread* self,
1307 mirror::ArtMethod* referrer, StubTest* test)
1308 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1309#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
1310 constexpr size_t num_values = 5;
1311 uint8_t values[num_values] = { 0, 1, 2, 128, 0xFF };
1312
1313 for (size_t i = 0; i < num_values; ++i) {
1314 test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
1315 static_cast<size_t>(values[i]),
1316 0U,
1317 StubTest::GetEntrypoint(self, kQuickSet8Static),
1318 self,
1319 referrer);
1320
1321 size_t res = test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
1322 0U, 0U,
1323 StubTest::GetEntrypoint(self, kQuickGetBooleanStatic),
1324 self,
1325 referrer);
1326 // Boolean currently stores bools as uint8_t, be more zealous about asserting correct writes/gets.
1327 EXPECT_EQ(values[i], static_cast<uint8_t>(res)) << "Iteration " << i;
1328 }
1329#else
1330 LOG(INFO) << "Skipping set_boolean_static as I don't know how to do that on " << kRuntimeISA;
1331 // Force-print to std::cout so it's also outside the logcat.
1332 std::cout << "Skipping set_boolean_static as I don't know how to do that on " << kRuntimeISA << std::endl;
1333#endif
1334}
1335static void GetSetByteStatic(Handle<mirror::Object>* obj, Handle<mirror::ArtField>* f, Thread* self,
1336 mirror::ArtMethod* referrer, StubTest* test)
1337 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1338#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001339 int8_t values[] = { -128, -64, 0, 64, 127 };
Fred Shih37f05ef2014-07-16 18:38:08 -07001340
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001341 for (size_t i = 0; i < arraysize(values); ++i) {
Fred Shih37f05ef2014-07-16 18:38:08 -07001342 test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
1343 static_cast<size_t>(values[i]),
1344 0U,
1345 StubTest::GetEntrypoint(self, kQuickSet8Static),
1346 self,
1347 referrer);
1348
1349 size_t res = test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
1350 0U, 0U,
1351 StubTest::GetEntrypoint(self, kQuickGetByteStatic),
1352 self,
1353 referrer);
1354 EXPECT_EQ(values[i], static_cast<int8_t>(res)) << "Iteration " << i;
1355 }
1356#else
1357 LOG(INFO) << "Skipping set_byte_static as I don't know how to do that on " << kRuntimeISA;
1358 // Force-print to std::cout so it's also outside the logcat.
1359 std::cout << "Skipping set_byte_static as I don't know how to do that on " << kRuntimeISA << std::endl;
1360#endif
1361}
1362
1363
Fred Shih37f05ef2014-07-16 18:38:08 -07001364static void GetSetBooleanInstance(Handle<mirror::Object>* obj, Handle<mirror::ArtField>* f,
1365 Thread* self, mirror::ArtMethod* referrer, StubTest* test)
1366 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1367#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001368 uint8_t values[] = { 0, true, 2, 128, 0xFF };
Fred Shih37f05ef2014-07-16 18:38:08 -07001369
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001370 for (size_t i = 0; i < arraysize(values); ++i) {
Fred Shih37f05ef2014-07-16 18:38:08 -07001371 test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
1372 reinterpret_cast<size_t>(obj->Get()),
1373 static_cast<size_t>(values[i]),
1374 StubTest::GetEntrypoint(self, kQuickSet8Instance),
1375 self,
1376 referrer);
1377
1378 uint8_t res = f->Get()->GetBoolean(obj->Get());
1379 EXPECT_EQ(values[i], res) << "Iteration " << i;
1380
1381 f->Get()->SetBoolean<false>(obj->Get(), res);
1382
1383 size_t res2 = test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
1384 reinterpret_cast<size_t>(obj->Get()),
1385 0U,
1386 StubTest::GetEntrypoint(self, kQuickGetBooleanInstance),
1387 self,
1388 referrer);
1389 EXPECT_EQ(res, static_cast<uint8_t>(res2));
1390 }
1391#else
1392 LOG(INFO) << "Skipping set_boolean_instance as I don't know how to do that on " << kRuntimeISA;
1393 // Force-print to std::cout so it's also outside the logcat.
1394 std::cout << "Skipping set_boolean_instance as I don't know how to do that on " << kRuntimeISA << std::endl;
1395#endif
1396}
1397static void GetSetByteInstance(Handle<mirror::Object>* obj, Handle<mirror::ArtField>* f,
1398 Thread* self, mirror::ArtMethod* referrer, StubTest* test)
1399 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1400#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001401 int8_t values[] = { -128, -64, 0, 64, 127 };
Fred Shih37f05ef2014-07-16 18:38:08 -07001402
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001403 for (size_t i = 0; i < arraysize(values); ++i) {
Fred Shih37f05ef2014-07-16 18:38:08 -07001404 test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
1405 reinterpret_cast<size_t>(obj->Get()),
1406 static_cast<size_t>(values[i]),
1407 StubTest::GetEntrypoint(self, kQuickSet8Instance),
1408 self,
1409 referrer);
1410
1411 int8_t res = f->Get()->GetByte(obj->Get());
1412 EXPECT_EQ(res, values[i]) << "Iteration " << i;
1413 f->Get()->SetByte<false>(obj->Get(), ++res);
1414
1415 size_t res2 = test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
1416 reinterpret_cast<size_t>(obj->Get()),
1417 0U,
1418 StubTest::GetEntrypoint(self, kQuickGetByteInstance),
1419 self,
1420 referrer);
1421 EXPECT_EQ(res, static_cast<int8_t>(res2));
1422 }
1423#else
1424 LOG(INFO) << "Skipping set_byte_instance as I don't know how to do that on " << kRuntimeISA;
1425 // Force-print to std::cout so it's also outside the logcat.
1426 std::cout << "Skipping set_byte_instance as I don't know how to do that on " << kRuntimeISA << std::endl;
1427#endif
1428}
1429
Fred Shih37f05ef2014-07-16 18:38:08 -07001430static void GetSetCharStatic(Handle<mirror::Object>* obj, Handle<mirror::ArtField>* f, Thread* self,
1431 mirror::ArtMethod* referrer, StubTest* test)
1432 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1433#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001434 uint16_t values[] = { 0, 1, 2, 255, 32768, 0xFFFF };
Fred Shih37f05ef2014-07-16 18:38:08 -07001435
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001436 for (size_t i = 0; i < arraysize(values); ++i) {
Fred Shih37f05ef2014-07-16 18:38:08 -07001437 test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
1438 static_cast<size_t>(values[i]),
1439 0U,
1440 StubTest::GetEntrypoint(self, kQuickSet16Static),
1441 self,
1442 referrer);
1443
1444 size_t res = test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
1445 0U, 0U,
1446 StubTest::GetEntrypoint(self, kQuickGetCharStatic),
1447 self,
1448 referrer);
1449
1450 EXPECT_EQ(values[i], static_cast<uint16_t>(res)) << "Iteration " << i;
1451 }
1452#else
1453 LOG(INFO) << "Skipping set_char_static as I don't know how to do that on " << kRuntimeISA;
1454 // Force-print to std::cout so it's also outside the logcat.
1455 std::cout << "Skipping set_char_static as I don't know how to do that on " << kRuntimeISA << std::endl;
1456#endif
1457}
1458static void GetSetShortStatic(Handle<mirror::Object>* obj, Handle<mirror::ArtField>* f, Thread* self,
1459 mirror::ArtMethod* referrer, StubTest* test)
1460 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1461#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001462 int16_t values[] = { -0x7FFF, -32768, 0, 255, 32767, 0x7FFE };
Fred Shih37f05ef2014-07-16 18:38:08 -07001463
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001464 for (size_t i = 0; i < arraysize(values); ++i) {
Fred Shih37f05ef2014-07-16 18:38:08 -07001465 test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
1466 static_cast<size_t>(values[i]),
1467 0U,
1468 StubTest::GetEntrypoint(self, kQuickSet16Static),
1469 self,
1470 referrer);
1471
1472 size_t res = test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
1473 0U, 0U,
1474 StubTest::GetEntrypoint(self, kQuickGetShortStatic),
1475 self,
1476 referrer);
1477
1478 EXPECT_EQ(static_cast<int16_t>(res), values[i]) << "Iteration " << i;
1479 }
1480#else
1481 LOG(INFO) << "Skipping set_short_static as I don't know how to do that on " << kRuntimeISA;
1482 // Force-print to std::cout so it's also outside the logcat.
1483 std::cout << "Skipping set_short_static as I don't know how to do that on " << kRuntimeISA << std::endl;
1484#endif
1485}
1486
Fred Shih37f05ef2014-07-16 18:38:08 -07001487static void GetSetCharInstance(Handle<mirror::Object>* obj, Handle<mirror::ArtField>* f,
1488 Thread* self, mirror::ArtMethod* referrer, StubTest* test)
1489 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1490#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001491 uint16_t values[] = { 0, 1, 2, 255, 32768, 0xFFFF };
Fred Shih37f05ef2014-07-16 18:38:08 -07001492
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001493 for (size_t i = 0; i < arraysize(values); ++i) {
Fred Shih37f05ef2014-07-16 18:38:08 -07001494 test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
1495 reinterpret_cast<size_t>(obj->Get()),
1496 static_cast<size_t>(values[i]),
1497 StubTest::GetEntrypoint(self, kQuickSet16Instance),
1498 self,
1499 referrer);
1500
1501 uint16_t res = f->Get()->GetChar(obj->Get());
1502 EXPECT_EQ(res, values[i]) << "Iteration " << i;
1503 f->Get()->SetChar<false>(obj->Get(), ++res);
1504
1505 size_t res2 = test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
1506 reinterpret_cast<size_t>(obj->Get()),
1507 0U,
1508 StubTest::GetEntrypoint(self, kQuickGetCharInstance),
1509 self,
1510 referrer);
1511 EXPECT_EQ(res, static_cast<uint16_t>(res2));
1512 }
1513#else
1514 LOG(INFO) << "Skipping set_char_instance as I don't know how to do that on " << kRuntimeISA;
1515 // Force-print to std::cout so it's also outside the logcat.
1516 std::cout << "Skipping set_char_instance as I don't know how to do that on " << kRuntimeISA << std::endl;
1517#endif
1518}
1519static void GetSetShortInstance(Handle<mirror::Object>* obj, Handle<mirror::ArtField>* f,
1520 Thread* self, mirror::ArtMethod* referrer, StubTest* test)
1521 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1522#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001523 int16_t values[] = { -0x7FFF, -32768, 0, 255, 32767, 0x7FFE };
Fred Shih37f05ef2014-07-16 18:38:08 -07001524
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001525 for (size_t i = 0; i < arraysize(values); ++i) {
Fred Shih37f05ef2014-07-16 18:38:08 -07001526 test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
1527 reinterpret_cast<size_t>(obj->Get()),
1528 static_cast<size_t>(values[i]),
1529 StubTest::GetEntrypoint(self, kQuickSet16Instance),
1530 self,
1531 referrer);
1532
1533 int16_t res = f->Get()->GetShort(obj->Get());
1534 EXPECT_EQ(res, values[i]) << "Iteration " << i;
1535 f->Get()->SetShort<false>(obj->Get(), ++res);
1536
1537 size_t res2 = test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
1538 reinterpret_cast<size_t>(obj->Get()),
1539 0U,
1540 StubTest::GetEntrypoint(self, kQuickGetShortInstance),
1541 self,
1542 referrer);
1543 EXPECT_EQ(res, static_cast<int16_t>(res2));
1544 }
1545#else
1546 LOG(INFO) << "Skipping set_short_instance as I don't know how to do that on " << kRuntimeISA;
1547 // Force-print to std::cout so it's also outside the logcat.
1548 std::cout << "Skipping set_short_instance as I don't know how to do that on " << kRuntimeISA << std::endl;
1549#endif
1550}
1551
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001552static void GetSet32Static(Handle<mirror::Object>* obj, Handle<mirror::ArtField>* f, Thread* self,
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001553 mirror::ArtMethod* referrer, StubTest* test)
1554 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Ian Rogersc3ccc102014-06-25 11:52:14 -07001555#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001556 uint32_t values[] = { 0, 1, 2, 255, 32768, 1000000, 0xFFFFFFFF };
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001557
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001558 for (size_t i = 0; i < arraysize(values); ++i) {
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001559 test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
1560 static_cast<size_t>(values[i]),
1561 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001562 StubTest::GetEntrypoint(self, kQuickSet32Static),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001563 self,
1564 referrer);
1565
1566 size_t res = test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
1567 0U, 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001568 StubTest::GetEntrypoint(self, kQuickGet32Static),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001569 self,
1570 referrer);
1571
1572 EXPECT_EQ(res, values[i]) << "Iteration " << i;
1573 }
1574#else
1575 LOG(INFO) << "Skipping set32static as I don't know how to do that on " << kRuntimeISA;
1576 // Force-print to std::cout so it's also outside the logcat.
1577 std::cout << "Skipping set32static as I don't know how to do that on " << kRuntimeISA << std::endl;
1578#endif
1579}
1580
1581
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001582static void GetSet32Instance(Handle<mirror::Object>* obj, Handle<mirror::ArtField>* f,
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001583 Thread* self, mirror::ArtMethod* referrer, StubTest* test)
1584 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Ian Rogersc3ccc102014-06-25 11:52:14 -07001585#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001586 uint32_t values[] = { 0, 1, 2, 255, 32768, 1000000, 0xFFFFFFFF };
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001587
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001588 for (size_t i = 0; i < arraysize(values); ++i) {
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001589 test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001590 reinterpret_cast<size_t>(obj->Get()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001591 static_cast<size_t>(values[i]),
Andreas Gampe29b38412014-08-13 00:15:43 -07001592 StubTest::GetEntrypoint(self, kQuickSet32Instance),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001593 self,
1594 referrer);
1595
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001596 int32_t res = f->Get()->GetInt(obj->Get());
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001597 EXPECT_EQ(res, static_cast<int32_t>(values[i])) << "Iteration " << i;
1598
1599 res++;
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001600 f->Get()->SetInt<false>(obj->Get(), res);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001601
1602 size_t res2 = test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001603 reinterpret_cast<size_t>(obj->Get()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001604 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001605 StubTest::GetEntrypoint(self, kQuickGet32Instance),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001606 self,
1607 referrer);
1608 EXPECT_EQ(res, static_cast<int32_t>(res2));
1609 }
1610#else
1611 LOG(INFO) << "Skipping set32instance as I don't know how to do that on " << kRuntimeISA;
1612 // Force-print to std::cout so it's also outside the logcat.
1613 std::cout << "Skipping set32instance as I don't know how to do that on " << kRuntimeISA << std::endl;
1614#endif
1615}
1616
1617
Ian Rogersc3ccc102014-06-25 11:52:14 -07001618#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001619
1620static void set_and_check_static(uint32_t f_idx, mirror::Object* val, Thread* self,
1621 mirror::ArtMethod* referrer, StubTest* test)
1622 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1623 test->Invoke3WithReferrer(static_cast<size_t>(f_idx),
1624 reinterpret_cast<size_t>(val),
1625 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001626 StubTest::GetEntrypoint(self, kQuickSetObjStatic),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001627 self,
1628 referrer);
1629
1630 size_t res = test->Invoke3WithReferrer(static_cast<size_t>(f_idx),
1631 0U, 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001632 StubTest::GetEntrypoint(self, kQuickGetObjStatic),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001633 self,
1634 referrer);
1635
1636 EXPECT_EQ(res, reinterpret_cast<size_t>(val)) << "Value " << val;
1637}
1638#endif
1639
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001640static void GetSetObjStatic(Handle<mirror::Object>* obj, Handle<mirror::ArtField>* f, Thread* self,
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001641 mirror::ArtMethod* referrer, StubTest* test)
1642 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Ian Rogersc3ccc102014-06-25 11:52:14 -07001643#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001644 set_and_check_static((*f)->GetDexFieldIndex(), nullptr, self, referrer, test);
1645
1646 // Allocate a string object for simplicity.
1647 mirror::String* str = mirror::String::AllocFromModifiedUtf8(self, "Test");
1648 set_and_check_static((*f)->GetDexFieldIndex(), str, self, referrer, test);
1649
1650 set_and_check_static((*f)->GetDexFieldIndex(), nullptr, self, referrer, test);
1651#else
1652 LOG(INFO) << "Skipping setObjstatic as I don't know how to do that on " << kRuntimeISA;
1653 // Force-print to std::cout so it's also outside the logcat.
1654 std::cout << "Skipping setObjstatic as I don't know how to do that on " << kRuntimeISA << std::endl;
1655#endif
1656}
1657
1658
Ian Rogersc3ccc102014-06-25 11:52:14 -07001659#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001660static void set_and_check_instance(Handle<mirror::ArtField>* f, mirror::Object* trg,
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001661 mirror::Object* val, Thread* self, mirror::ArtMethod* referrer,
1662 StubTest* test)
1663 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1664 test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
1665 reinterpret_cast<size_t>(trg),
1666 reinterpret_cast<size_t>(val),
Andreas Gampe29b38412014-08-13 00:15:43 -07001667 StubTest::GetEntrypoint(self, kQuickSetObjInstance),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001668 self,
1669 referrer);
1670
1671 size_t res = test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
1672 reinterpret_cast<size_t>(trg),
1673 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001674 StubTest::GetEntrypoint(self, kQuickGetObjInstance),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001675 self,
1676 referrer);
1677
1678 EXPECT_EQ(res, reinterpret_cast<size_t>(val)) << "Value " << val;
1679
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001680 EXPECT_EQ(val, f->Get()->GetObj(trg));
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001681}
1682#endif
1683
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001684static void GetSetObjInstance(Handle<mirror::Object>* obj, Handle<mirror::ArtField>* f,
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001685 Thread* self, mirror::ArtMethod* referrer, StubTest* test)
1686 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Ian Rogersc3ccc102014-06-25 11:52:14 -07001687#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001688 set_and_check_instance(f, obj->Get(), nullptr, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001689
1690 // Allocate a string object for simplicity.
1691 mirror::String* str = mirror::String::AllocFromModifiedUtf8(self, "Test");
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001692 set_and_check_instance(f, obj->Get(), str, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001693
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001694 set_and_check_instance(f, obj->Get(), nullptr, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001695#else
1696 LOG(INFO) << "Skipping setObjinstance as I don't know how to do that on " << kRuntimeISA;
1697 // Force-print to std::cout so it's also outside the logcat.
1698 std::cout << "Skipping setObjinstance as I don't know how to do that on " << kRuntimeISA << std::endl;
1699#endif
1700}
1701
1702
1703// TODO: Complete these tests for 32b architectures.
1704
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001705static void GetSet64Static(Handle<mirror::Object>* obj, Handle<mirror::ArtField>* f, Thread* self,
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001706 mirror::ArtMethod* referrer, StubTest* test)
1707 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Ian Rogersc3ccc102014-06-25 11:52:14 -07001708#if (defined(__x86_64__) && !defined(__APPLE__)) || defined(__aarch64__)
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001709 uint64_t values[] = { 0, 1, 2, 255, 32768, 1000000, 0xFFFFFFFF, 0xFFFFFFFFFFFF };
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001710
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001711 for (size_t i = 0; i < arraysize(values); ++i) {
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001712 test->Invoke3UWithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
1713 values[i],
Andreas Gampe29b38412014-08-13 00:15:43 -07001714 StubTest::GetEntrypoint(self, kQuickSet64Static),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001715 self,
1716 referrer);
1717
1718 size_t res = test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
1719 0U, 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001720 StubTest::GetEntrypoint(self, kQuickGet64Static),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001721 self,
1722 referrer);
1723
1724 EXPECT_EQ(res, values[i]) << "Iteration " << i;
1725 }
1726#else
1727 LOG(INFO) << "Skipping set64static as I don't know how to do that on " << kRuntimeISA;
1728 // Force-print to std::cout so it's also outside the logcat.
1729 std::cout << "Skipping set64static as I don't know how to do that on " << kRuntimeISA << std::endl;
1730#endif
1731}
1732
1733
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001734static void GetSet64Instance(Handle<mirror::Object>* obj, Handle<mirror::ArtField>* f,
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001735 Thread* self, mirror::ArtMethod* referrer, StubTest* test)
1736 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Ian Rogersc3ccc102014-06-25 11:52:14 -07001737#if (defined(__x86_64__) && !defined(__APPLE__)) || defined(__aarch64__)
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001738 uint64_t values[] = { 0, 1, 2, 255, 32768, 1000000, 0xFFFFFFFF, 0xFFFFFFFFFFFF };
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001739
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001740 for (size_t i = 0; i < arraysize(values); ++i) {
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001741 test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001742 reinterpret_cast<size_t>(obj->Get()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001743 static_cast<size_t>(values[i]),
Andreas Gampe29b38412014-08-13 00:15:43 -07001744 StubTest::GetEntrypoint(self, kQuickSet64Instance),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001745 self,
1746 referrer);
1747
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001748 int64_t res = f->Get()->GetLong(obj->Get());
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001749 EXPECT_EQ(res, static_cast<int64_t>(values[i])) << "Iteration " << i;
1750
1751 res++;
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001752 f->Get()->SetLong<false>(obj->Get(), res);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001753
1754 size_t res2 = test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001755 reinterpret_cast<size_t>(obj->Get()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001756 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001757 StubTest::GetEntrypoint(self, kQuickGet64Instance),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001758 self,
1759 referrer);
1760 EXPECT_EQ(res, static_cast<int64_t>(res2));
1761 }
1762#else
1763 LOG(INFO) << "Skipping set64instance as I don't know how to do that on " << kRuntimeISA;
1764 // Force-print to std::cout so it's also outside the logcat.
1765 std::cout << "Skipping set64instance as I don't know how to do that on " << kRuntimeISA << std::endl;
1766#endif
1767}
1768
1769static void TestFields(Thread* self, StubTest* test, Primitive::Type test_type) {
1770 // garbage is created during ClassLinker::Init
1771
1772 JNIEnv* env = Thread::Current()->GetJniEnv();
1773 jclass jc = env->FindClass("AllFields");
1774 CHECK(jc != NULL);
1775 jobject o = env->AllocObject(jc);
1776 CHECK(o != NULL);
1777
1778 ScopedObjectAccess soa(self);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001779 StackHandleScope<5> hs(self);
1780 Handle<mirror::Object> obj(hs.NewHandle(soa.Decode<mirror::Object*>(o)));
1781 Handle<mirror::Class> c(hs.NewHandle(obj->GetClass()));
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001782 // Need a method as a referrer
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001783 Handle<mirror::ArtMethod> m(hs.NewHandle(c->GetDirectMethod(0)));
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001784
1785 // Play with it...
1786
1787 // Static fields.
1788 {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001789 Handle<mirror::ObjectArray<mirror::ArtField>> fields(hs.NewHandle(c.Get()->GetSFields()));
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001790 int32_t num_fields = fields->GetLength();
1791 for (int32_t i = 0; i < num_fields; ++i) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001792 StackHandleScope<1> hs(self);
1793 Handle<mirror::ArtField> f(hs.NewHandle(fields->Get(i)));
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001794
Mathieu Chartier61c5ebc2014-06-05 17:42:53 -07001795 Primitive::Type type = f->GetTypeAsPrimitiveType();
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001796 switch (type) {
Fred Shih37f05ef2014-07-16 18:38:08 -07001797 case Primitive::Type::kPrimBoolean:
1798 if (test_type == type) {
1799 GetSetBooleanStatic(&obj, &f, self, m.Get(), test);
1800 }
1801 break;
1802 case Primitive::Type::kPrimByte:
1803 if (test_type == type) {
1804 GetSetByteStatic(&obj, &f, self, m.Get(), test);
1805 }
1806 break;
1807 case Primitive::Type::kPrimChar:
1808 if (test_type == type) {
1809 GetSetCharStatic(&obj, &f, self, m.Get(), test);
1810 }
1811 break;
1812 case Primitive::Type::kPrimShort:
1813 if (test_type == type) {
1814 GetSetShortStatic(&obj, &f, self, m.Get(), test);
1815 }
1816 break;
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001817 case Primitive::Type::kPrimInt:
1818 if (test_type == type) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001819 GetSet32Static(&obj, &f, self, m.Get(), test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001820 }
1821 break;
1822
1823 case Primitive::Type::kPrimLong:
1824 if (test_type == type) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001825 GetSet64Static(&obj, &f, self, m.Get(), test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001826 }
1827 break;
1828
1829 case Primitive::Type::kPrimNot:
1830 // Don't try array.
Mathieu Chartier61c5ebc2014-06-05 17:42:53 -07001831 if (test_type == type && f->GetTypeDescriptor()[0] != '[') {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001832 GetSetObjStatic(&obj, &f, self, m.Get(), test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001833 }
1834 break;
1835
1836 default:
1837 break; // Skip.
1838 }
1839 }
1840 }
1841
1842 // Instance fields.
1843 {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001844 Handle<mirror::ObjectArray<mirror::ArtField>> fields(hs.NewHandle(c.Get()->GetIFields()));
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001845 int32_t num_fields = fields->GetLength();
1846 for (int32_t i = 0; i < num_fields; ++i) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001847 StackHandleScope<1> hs(self);
1848 Handle<mirror::ArtField> f(hs.NewHandle(fields->Get(i)));
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001849
Mathieu Chartier61c5ebc2014-06-05 17:42:53 -07001850 Primitive::Type type = f->GetTypeAsPrimitiveType();
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001851 switch (type) {
Fred Shih37f05ef2014-07-16 18:38:08 -07001852 case Primitive::Type::kPrimBoolean:
1853 if (test_type == type) {
1854 GetSetBooleanInstance(&obj, &f, self, m.Get(), test);
1855 }
1856 break;
1857 case Primitive::Type::kPrimByte:
1858 if (test_type == type) {
1859 GetSetByteInstance(&obj, &f, self, m.Get(), test);
1860 }
1861 break;
1862 case Primitive::Type::kPrimChar:
1863 if (test_type == type) {
1864 GetSetCharInstance(&obj, &f, self, m.Get(), test);
1865 }
1866 break;
1867 case Primitive::Type::kPrimShort:
1868 if (test_type == type) {
1869 GetSetShortInstance(&obj, &f, self, m.Get(), test);
1870 }
1871 break;
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001872 case Primitive::Type::kPrimInt:
1873 if (test_type == type) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001874 GetSet32Instance(&obj, &f, self, m.Get(), test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001875 }
1876 break;
1877
1878 case Primitive::Type::kPrimLong:
1879 if (test_type == type) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001880 GetSet64Instance(&obj, &f, self, m.Get(), test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001881 }
1882 break;
1883
1884 case Primitive::Type::kPrimNot:
1885 // Don't try array.
Mathieu Chartier61c5ebc2014-06-05 17:42:53 -07001886 if (test_type == type && f->GetTypeDescriptor()[0] != '[') {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001887 GetSetObjInstance(&obj, &f, self, m.Get(), test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001888 }
1889 break;
1890
1891 default:
1892 break; // Skip.
1893 }
1894 }
1895 }
1896
1897 // TODO: Deallocate things.
1898}
1899
Fred Shih37f05ef2014-07-16 18:38:08 -07001900TEST_F(StubTest, Fields8) {
1901 TEST_DISABLED_FOR_HEAP_REFERENCE_POISONING();
1902
1903 Thread* self = Thread::Current();
1904
1905 self->TransitionFromSuspendedToRunnable();
1906 LoadDex("AllFields");
1907 bool started = runtime_->Start();
1908 CHECK(started);
1909
1910 TestFields(self, this, Primitive::Type::kPrimBoolean);
1911 TestFields(self, this, Primitive::Type::kPrimByte);
1912}
1913
1914TEST_F(StubTest, Fields16) {
1915 TEST_DISABLED_FOR_HEAP_REFERENCE_POISONING();
1916
1917 Thread* self = Thread::Current();
1918
1919 self->TransitionFromSuspendedToRunnable();
1920 LoadDex("AllFields");
1921 bool started = runtime_->Start();
1922 CHECK(started);
1923
1924 TestFields(self, this, Primitive::Type::kPrimChar);
1925 TestFields(self, this, Primitive::Type::kPrimShort);
1926}
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001927
1928TEST_F(StubTest, Fields32) {
1929 TEST_DISABLED_FOR_HEAP_REFERENCE_POISONING();
1930
1931 Thread* self = Thread::Current();
1932
1933 self->TransitionFromSuspendedToRunnable();
1934 LoadDex("AllFields");
1935 bool started = runtime_->Start();
1936 CHECK(started);
1937
1938 TestFields(self, this, Primitive::Type::kPrimInt);
1939}
1940
1941TEST_F(StubTest, FieldsObj) {
1942 TEST_DISABLED_FOR_HEAP_REFERENCE_POISONING();
1943
1944 Thread* self = Thread::Current();
1945
1946 self->TransitionFromSuspendedToRunnable();
1947 LoadDex("AllFields");
1948 bool started = runtime_->Start();
1949 CHECK(started);
1950
1951 TestFields(self, this, Primitive::Type::kPrimNot);
1952}
1953
1954TEST_F(StubTest, Fields64) {
1955 TEST_DISABLED_FOR_HEAP_REFERENCE_POISONING();
1956
1957 Thread* self = Thread::Current();
1958
1959 self->TransitionFromSuspendedToRunnable();
1960 LoadDex("AllFields");
1961 bool started = runtime_->Start();
1962 CHECK(started);
1963
1964 TestFields(self, this, Primitive::Type::kPrimLong);
1965}
1966
Andreas Gampe51f76352014-05-21 08:28:48 -07001967TEST_F(StubTest, IMT) {
Ian Rogersc3ccc102014-06-25 11:52:14 -07001968#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe51f76352014-05-21 08:28:48 -07001969 TEST_DISABLED_FOR_HEAP_REFERENCE_POISONING();
1970
1971 Thread* self = Thread::Current();
1972
1973 ScopedObjectAccess soa(self);
1974 StackHandleScope<7> hs(self);
1975
1976 JNIEnv* env = Thread::Current()->GetJniEnv();
1977
1978 // ArrayList
1979
1980 // Load ArrayList and used methods (JNI).
1981 jclass arraylist_jclass = env->FindClass("java/util/ArrayList");
1982 ASSERT_NE(nullptr, arraylist_jclass);
1983 jmethodID arraylist_constructor = env->GetMethodID(arraylist_jclass, "<init>", "()V");
1984 ASSERT_NE(nullptr, arraylist_constructor);
1985 jmethodID contains_jmethod = env->GetMethodID(arraylist_jclass, "contains", "(Ljava/lang/Object;)Z");
1986 ASSERT_NE(nullptr, contains_jmethod);
1987 jmethodID add_jmethod = env->GetMethodID(arraylist_jclass, "add", "(Ljava/lang/Object;)Z");
1988 ASSERT_NE(nullptr, add_jmethod);
1989
1990 // Get mirror representation.
1991 Handle<mirror::ArtMethod> contains_amethod(hs.NewHandle(soa.DecodeMethod(contains_jmethod)));
1992
1993 // Patch up ArrayList.contains.
1994 if (contains_amethod.Get()->GetEntryPointFromQuickCompiledCode() == nullptr) {
1995 contains_amethod.Get()->SetEntryPointFromQuickCompiledCode(reinterpret_cast<void*>(
Andreas Gampe29b38412014-08-13 00:15:43 -07001996 StubTest::GetEntrypoint(self, kQuickQuickToInterpreterBridge)));
Andreas Gampe51f76352014-05-21 08:28:48 -07001997 }
1998
1999 // List
2000
2001 // Load List and used methods (JNI).
2002 jclass list_jclass = env->FindClass("java/util/List");
2003 ASSERT_NE(nullptr, list_jclass);
2004 jmethodID inf_contains_jmethod = env->GetMethodID(list_jclass, "contains", "(Ljava/lang/Object;)Z");
2005 ASSERT_NE(nullptr, inf_contains_jmethod);
2006
2007 // Get mirror representation.
2008 Handle<mirror::ArtMethod> inf_contains(hs.NewHandle(soa.DecodeMethod(inf_contains_jmethod)));
2009
2010 // Object
2011
2012 jclass obj_jclass = env->FindClass("java/lang/Object");
2013 ASSERT_NE(nullptr, obj_jclass);
2014 jmethodID obj_constructor = env->GetMethodID(obj_jclass, "<init>", "()V");
2015 ASSERT_NE(nullptr, obj_constructor);
2016
Andreas Gampe51f76352014-05-21 08:28:48 -07002017 // Create instances.
2018
2019 jobject jarray_list = env->NewObject(arraylist_jclass, arraylist_constructor);
2020 ASSERT_NE(nullptr, jarray_list);
2021 Handle<mirror::Object> array_list(hs.NewHandle(soa.Decode<mirror::Object*>(jarray_list)));
2022
2023 jobject jobj = env->NewObject(obj_jclass, obj_constructor);
2024 ASSERT_NE(nullptr, jobj);
2025 Handle<mirror::Object> obj(hs.NewHandle(soa.Decode<mirror::Object*>(jobj)));
2026
Andreas Gampe1a7e2922014-05-21 15:37:53 -07002027 // Invocation tests.
2028
2029 // 1. imt_conflict
2030
2031 // Contains.
Andreas Gampe51f76352014-05-21 08:28:48 -07002032
2033 size_t result =
2034 Invoke3WithReferrerAndHidden(0U, reinterpret_cast<size_t>(array_list.Get()),
2035 reinterpret_cast<size_t>(obj.Get()),
Andreas Gampe29b38412014-08-13 00:15:43 -07002036 StubTest::GetEntrypoint(self, kQuickQuickImtConflictTrampoline),
Andreas Gampe51f76352014-05-21 08:28:48 -07002037 self, contains_amethod.Get(),
2038 static_cast<size_t>(inf_contains.Get()->GetDexMethodIndex()));
2039
2040 ASSERT_FALSE(self->IsExceptionPending());
2041 EXPECT_EQ(static_cast<size_t>(JNI_FALSE), result);
2042
2043 // Add object.
2044
2045 env->CallBooleanMethod(jarray_list, add_jmethod, jobj);
2046
2047 ASSERT_FALSE(self->IsExceptionPending()) << PrettyTypeOf(self->GetException(nullptr));
2048
Andreas Gampe1a7e2922014-05-21 15:37:53 -07002049 // Contains.
Andreas Gampe51f76352014-05-21 08:28:48 -07002050
2051 result = Invoke3WithReferrerAndHidden(0U, reinterpret_cast<size_t>(array_list.Get()),
2052 reinterpret_cast<size_t>(obj.Get()),
Andreas Gampe29b38412014-08-13 00:15:43 -07002053 StubTest::GetEntrypoint(self, kQuickQuickImtConflictTrampoline),
Andreas Gampe51f76352014-05-21 08:28:48 -07002054 self, contains_amethod.Get(),
2055 static_cast<size_t>(inf_contains.Get()->GetDexMethodIndex()));
2056
2057 ASSERT_FALSE(self->IsExceptionPending());
2058 EXPECT_EQ(static_cast<size_t>(JNI_TRUE), result);
Andreas Gampe1a7e2922014-05-21 15:37:53 -07002059
2060 // 2. regular interface trampoline
2061
2062 result = Invoke3WithReferrer(static_cast<size_t>(inf_contains.Get()->GetDexMethodIndex()),
2063 reinterpret_cast<size_t>(array_list.Get()),
2064 reinterpret_cast<size_t>(obj.Get()),
2065 StubTest::GetEntrypoint(self,
2066 kQuickInvokeInterfaceTrampolineWithAccessCheck),
2067 self, contains_amethod.Get());
2068
2069 ASSERT_FALSE(self->IsExceptionPending());
2070 EXPECT_EQ(static_cast<size_t>(JNI_TRUE), result);
2071
2072 result = Invoke3WithReferrer(static_cast<size_t>(inf_contains.Get()->GetDexMethodIndex()),
2073 reinterpret_cast<size_t>(array_list.Get()),
2074 reinterpret_cast<size_t>(array_list.Get()),
2075 StubTest::GetEntrypoint(self,
2076 kQuickInvokeInterfaceTrampolineWithAccessCheck),
2077 self, contains_amethod.Get());
2078
2079 ASSERT_FALSE(self->IsExceptionPending());
2080 EXPECT_EQ(static_cast<size_t>(JNI_FALSE), result);
Andreas Gampe51f76352014-05-21 08:28:48 -07002081#else
Andreas Gampe6aac3552014-06-09 14:55:53 -07002082 LOG(INFO) << "Skipping imt as I don't know how to do that on " << kRuntimeISA;
Andreas Gampe51f76352014-05-21 08:28:48 -07002083 // Force-print to std::cout so it's also outside the logcat.
Andreas Gampe6aac3552014-06-09 14:55:53 -07002084 std::cout << "Skipping imt as I don't know how to do that on " << kRuntimeISA << std::endl;
2085#endif
2086}
2087
Andreas Gampe6aac3552014-06-09 14:55:53 -07002088TEST_F(StubTest, StringIndexOf) {
2089#if defined(__arm__) || defined(__aarch64__)
Hiroshi Yamauchi52fa8142014-06-16 12:59:49 -07002090 TEST_DISABLED_FOR_HEAP_REFERENCE_POISONING();
2091
Andreas Gampe6aac3552014-06-09 14:55:53 -07002092 Thread* self = Thread::Current();
2093 ScopedObjectAccess soa(self);
2094 // garbage is created during ClassLinker::Init
2095
2096 // Create some strings
2097 // Use array so we can index into it and use a matrix for expected results
2098 // Setup: The first half is standard. The second half uses a non-zero offset.
2099 // TODO: Shared backing arrays.
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07002100 const char* c_str[] = { "", "a", "ba", "cba", "dcba", "edcba", "asdfghjkl" };
2101 static constexpr size_t kStringCount = arraysize(c_str);
2102 const char c_char[] = { 'a', 'b', 'c', 'd', 'e' };
2103 static constexpr size_t kCharCount = arraysize(c_char);
Andreas Gampe6aac3552014-06-09 14:55:53 -07002104
2105 StackHandleScope<kStringCount> hs(self);
2106 Handle<mirror::String> s[kStringCount];
2107
2108 for (size_t i = 0; i < kStringCount; ++i) {
2109 s[i] = hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), c_str[i]));
2110 }
2111
2112 // Matrix of expectations. First component is first parameter. Note we only check against the
2113 // sign, not the value. As we are testing random offsets, we need to compute this and need to
2114 // rely on String::CompareTo being correct.
2115 static constexpr size_t kMaxLen = 9;
2116 DCHECK_LE(strlen(c_str[kStringCount-1]), kMaxLen) << "Please fix the indexof test.";
2117
2118 // Last dimension: start, offset by 1.
2119 int32_t expected[kStringCount][kCharCount][kMaxLen + 3];
2120 for (size_t x = 0; x < kStringCount; ++x) {
2121 for (size_t y = 0; y < kCharCount; ++y) {
2122 for (size_t z = 0; z <= kMaxLen + 2; ++z) {
2123 expected[x][y][z] = s[x]->FastIndexOf(c_char[y], static_cast<int32_t>(z) - 1);
2124 }
2125 }
2126 }
2127
2128 // Play with it...
2129
2130 for (size_t x = 0; x < kStringCount; ++x) {
2131 for (size_t y = 0; y < kCharCount; ++y) {
2132 for (size_t z = 0; z <= kMaxLen + 2; ++z) {
2133 int32_t start = static_cast<int32_t>(z) - 1;
2134
2135 // Test string_compareto x y
2136 size_t result = Invoke3(reinterpret_cast<size_t>(s[x].Get()), c_char[y], start,
Andreas Gampe29b38412014-08-13 00:15:43 -07002137 StubTest::GetEntrypoint(self, kQuickIndexOf), self);
Andreas Gampe6aac3552014-06-09 14:55:53 -07002138
2139 EXPECT_FALSE(self->IsExceptionPending());
2140
2141 // The result is a 32b signed integer
2142 union {
2143 size_t r;
2144 int32_t i;
2145 } conv;
2146 conv.r = result;
2147
2148 EXPECT_EQ(expected[x][y][z], conv.i) << "Wrong result for " << c_str[x] << " / " <<
2149 c_char[y] << " @ " << start;
2150 }
2151 }
2152 }
2153
2154 // TODO: Deallocate things.
2155
2156 // Tests done.
2157#else
2158 LOG(INFO) << "Skipping indexof as I don't know how to do that on " << kRuntimeISA;
2159 // Force-print to std::cout so it's also outside the logcat.
2160 std::cout << "Skipping indexof as I don't know how to do that on " << kRuntimeISA << std::endl;
Andreas Gampe51f76352014-05-21 08:28:48 -07002161#endif
2162}
2163
Andreas Gampe525cde22014-04-22 15:44:50 -07002164} // namespace art