blob: 6b74a1b1d202709194ea08baba34e039af726fec [file] [log] [blame]
Andreas Gampe525cde22014-04-22 15:44:50 -07001/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
Ian Rogerse63db272014-07-15 15:36:11 -070017#include <cstdio>
18
Andreas Gampe525cde22014-04-22 15:44:50 -070019#include "common_runtime_test.h"
Andreas Gampe29b38412014-08-13 00:15:43 -070020#include "entrypoints/quick/quick_entrypoints_enum.h"
Andreas Gampe6e4e59c2014-05-05 20:11:02 -070021#include "mirror/art_field-inl.h"
Andreas Gampe51f76352014-05-21 08:28:48 -070022#include "mirror/art_method-inl.h"
23#include "mirror/class-inl.h"
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -070024#include "mirror/string-inl.h"
Ian Rogerse63db272014-07-15 15:36:11 -070025#include "scoped_thread_state_change.h"
Andreas Gampe525cde22014-04-22 15:44:50 -070026
27namespace art {
28
29
30class StubTest : public CommonRuntimeTest {
31 protected:
32 // We need callee-save methods set up in the Runtime for exceptions.
33 void SetUp() OVERRIDE {
34 // Do the normal setup.
35 CommonRuntimeTest::SetUp();
36
37 {
38 // Create callee-save methods
39 ScopedObjectAccess soa(Thread::Current());
Vladimir Marko7624d252014-05-02 14:40:15 +010040 runtime_->SetInstructionSet(kRuntimeISA);
Andreas Gampe525cde22014-04-22 15:44:50 -070041 for (int i = 0; i < Runtime::kLastCalleeSaveType; i++) {
42 Runtime::CalleeSaveType type = Runtime::CalleeSaveType(i);
43 if (!runtime_->HasCalleeSaveMethod(type)) {
Vladimir Marko7624d252014-05-02 14:40:15 +010044 runtime_->SetCalleeSaveMethod(runtime_->CreateCalleeSaveMethod(type), type);
Andreas Gampe525cde22014-04-22 15:44:50 -070045 }
46 }
47 }
48 }
49
Ian Rogerse63db272014-07-15 15:36:11 -070050 void SetUpRuntimeOptions(RuntimeOptions *options) OVERRIDE {
Andreas Gampe00c1e6d2014-04-25 15:47:13 -070051 // Use a smaller heap
52 for (std::pair<std::string, const void*>& pair : *options) {
53 if (pair.first.find("-Xmx") == 0) {
54 pair.first = "-Xmx4M"; // Smallest we can go.
55 }
56 }
Andreas Gampe51f76352014-05-21 08:28:48 -070057 options->push_back(std::make_pair("-Xint", nullptr));
Andreas Gampe00c1e6d2014-04-25 15:47:13 -070058 }
Andreas Gampe525cde22014-04-22 15:44:50 -070059
Mathieu Chartier119c6bd2014-05-09 14:11:47 -070060 // Helper function needed since TEST_F makes a new class.
61 Thread::tls_ptr_sized_values* GetTlsPtr(Thread* self) {
62 return &self->tlsPtr_;
63 }
64
Andreas Gampe4fc046e2014-05-06 16:56:39 -070065 public:
Andreas Gampe525cde22014-04-22 15:44:50 -070066 size_t Invoke3(size_t arg0, size_t arg1, size_t arg2, uintptr_t code, Thread* self) {
Andreas Gampe6cf80102014-05-19 11:32:41 -070067 return Invoke3WithReferrer(arg0, arg1, arg2, code, self, nullptr);
Andreas Gampe525cde22014-04-22 15:44:50 -070068 }
Andreas Gampe6e4e59c2014-05-05 20:11:02 -070069
Andreas Gampe6e4e59c2014-05-05 20:11:02 -070070 // TODO: Set up a frame according to referrer's specs.
71 size_t Invoke3WithReferrer(size_t arg0, size_t arg1, size_t arg2, uintptr_t code, Thread* self,
72 mirror::ArtMethod* referrer) {
73 // Push a transition back into managed code onto the linked list in thread.
74 ManagedStack fragment;
75 self->PushManagedStackFragment(&fragment);
76
77 size_t result;
Andreas Gampe6cf80102014-05-19 11:32:41 -070078 size_t fpr_result = 0;
Andreas Gampe6e4e59c2014-05-05 20:11:02 -070079#if defined(__i386__)
80 // TODO: Set the thread?
81 __asm__ __volatile__(
Ian Rogersc5f17732014-06-05 20:48:42 -070082 "subl $12, %%esp\n\t" // Align stack.
83 "pushl %[referrer]\n\t" // Store referrer.
Andreas Gampe6e4e59c2014-05-05 20:11:02 -070084 "call *%%edi\n\t" // Call the stub
Ian Rogersc5f17732014-06-05 20:48:42 -070085 "addl $16, %%esp" // Pop referrer
Andreas Gampe6e4e59c2014-05-05 20:11:02 -070086 : "=a" (result)
87 // Use the result from eax
Andreas Gampe2f6e3512014-06-07 01:32:33 -070088 : "a"(arg0), "c"(arg1), "d"(arg2), "D"(code), [referrer]"r"(referrer)
89 // This places code into edi, arg0 into eax, arg1 into ecx, and arg2 into edx
90 : "memory"); // clobber.
Andreas Gampe6e4e59c2014-05-05 20:11:02 -070091 // TODO: Should we clobber the other registers? EBX gets clobbered by some of the stubs,
92 // but compilation fails when declaring that.
93#elif defined(__arm__)
94 __asm__ __volatile__(
95 "push {r1-r12, lr}\n\t" // Save state, 13*4B = 52B
96 ".cfi_adjust_cfa_offset 52\n\t"
97 "push {r9}\n\t"
98 ".cfi_adjust_cfa_offset 4\n\t"
99 "mov r9, %[referrer]\n\n"
100 "str r9, [sp, #-8]!\n\t" // Push referrer, +8B padding so 16B aligned
101 ".cfi_adjust_cfa_offset 8\n\t"
102 "ldr r9, [sp, #8]\n\t"
103
104 // Push everything on the stack, so we don't rely on the order. What a mess. :-(
105 "sub sp, sp, #20\n\t"
106 "str %[arg0], [sp]\n\t"
107 "str %[arg1], [sp, #4]\n\t"
108 "str %[arg2], [sp, #8]\n\t"
109 "str %[code], [sp, #12]\n\t"
110 "str %[self], [sp, #16]\n\t"
111 "ldr r0, [sp]\n\t"
112 "ldr r1, [sp, #4]\n\t"
113 "ldr r2, [sp, #8]\n\t"
114 "ldr r3, [sp, #12]\n\t"
115 "ldr r9, [sp, #16]\n\t"
116 "add sp, sp, #20\n\t"
117
118 "blx r3\n\t" // Call the stub
119 "add sp, sp, #12\n\t" // Pop nullptr and padding
120 ".cfi_adjust_cfa_offset -12\n\t"
121 "pop {r1-r12, lr}\n\t" // Restore state
122 ".cfi_adjust_cfa_offset -52\n\t"
123 "mov %[result], r0\n\t" // Save the result
124 : [result] "=r" (result)
125 // Use the result from r0
126 : [arg0] "r"(arg0), [arg1] "r"(arg1), [arg2] "r"(arg2), [code] "r"(code), [self] "r"(self),
127 [referrer] "r"(referrer)
Andreas Gampe2f6e3512014-06-07 01:32:33 -0700128 : "memory"); // clobber.
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700129#elif defined(__aarch64__)
130 __asm__ __volatile__(
Andreas Gampef39b3782014-06-03 14:38:30 -0700131 // Spill x0-x7 which we say we don't clobber. May contain args.
Andreas Gampe6cf80102014-05-19 11:32:41 -0700132 "sub sp, sp, #64\n\t"
133 ".cfi_adjust_cfa_offset 64\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700134 "stp x0, x1, [sp]\n\t"
135 "stp x2, x3, [sp, #16]\n\t"
136 "stp x4, x5, [sp, #32]\n\t"
137 "stp x6, x7, [sp, #48]\n\t"
Andreas Gampe6cf80102014-05-19 11:32:41 -0700138
Andreas Gampef39b3782014-06-03 14:38:30 -0700139 "sub sp, sp, #16\n\t" // Reserve stack space, 16B aligned
140 ".cfi_adjust_cfa_offset 16\n\t"
141 "str %[referrer], [sp]\n\t" // referrer
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700142
143 // Push everything on the stack, so we don't rely on the order. What a mess. :-(
144 "sub sp, sp, #48\n\t"
Andreas Gampe6cf80102014-05-19 11:32:41 -0700145 ".cfi_adjust_cfa_offset 48\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700146 // All things are "r" constraints, so direct str/stp should work.
147 "stp %[arg0], %[arg1], [sp]\n\t"
148 "stp %[arg2], %[code], [sp, #16]\n\t"
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700149 "str %[self], [sp, #32]\n\t"
Andreas Gampe6cf80102014-05-19 11:32:41 -0700150
151 // Now we definitely have x0-x3 free, use it to garble d8 - d15
152 "movk x0, #0xfad0\n\t"
153 "movk x0, #0xebad, lsl #16\n\t"
154 "movk x0, #0xfad0, lsl #32\n\t"
155 "movk x0, #0xebad, lsl #48\n\t"
156 "fmov d8, x0\n\t"
157 "add x0, x0, 1\n\t"
158 "fmov d9, x0\n\t"
159 "add x0, x0, 1\n\t"
160 "fmov d10, x0\n\t"
161 "add x0, x0, 1\n\t"
162 "fmov d11, x0\n\t"
163 "add x0, x0, 1\n\t"
164 "fmov d12, x0\n\t"
165 "add x0, x0, 1\n\t"
166 "fmov d13, x0\n\t"
167 "add x0, x0, 1\n\t"
168 "fmov d14, x0\n\t"
169 "add x0, x0, 1\n\t"
170 "fmov d15, x0\n\t"
171
Andreas Gampef39b3782014-06-03 14:38:30 -0700172 // Load call params into the right registers.
173 "ldp x0, x1, [sp]\n\t"
174 "ldp x2, x3, [sp, #16]\n\t"
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700175 "ldr x18, [sp, #32]\n\t"
176 "add sp, sp, #48\n\t"
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700177 ".cfi_adjust_cfa_offset -48\n\t"
178
Andreas Gampe6cf80102014-05-19 11:32:41 -0700179
180 "blr x3\n\t" // Call the stub
Andreas Gampef39b3782014-06-03 14:38:30 -0700181 "mov x8, x0\n\t" // Store result
182 "add sp, sp, #16\n\t" // Drop the quick "frame"
183 ".cfi_adjust_cfa_offset -16\n\t"
Andreas Gampe6cf80102014-05-19 11:32:41 -0700184
185 // Test d8 - d15. We can use x1 and x2.
186 "movk x1, #0xfad0\n\t"
187 "movk x1, #0xebad, lsl #16\n\t"
188 "movk x1, #0xfad0, lsl #32\n\t"
189 "movk x1, #0xebad, lsl #48\n\t"
190 "fmov x2, d8\n\t"
191 "cmp x1, x2\n\t"
192 "b.ne 1f\n\t"
193 "add x1, x1, 1\n\t"
194
195 "fmov x2, d9\n\t"
196 "cmp x1, x2\n\t"
197 "b.ne 1f\n\t"
198 "add x1, x1, 1\n\t"
199
200 "fmov x2, d10\n\t"
201 "cmp x1, x2\n\t"
202 "b.ne 1f\n\t"
203 "add x1, x1, 1\n\t"
204
205 "fmov x2, d11\n\t"
206 "cmp x1, x2\n\t"
207 "b.ne 1f\n\t"
208 "add x1, x1, 1\n\t"
209
210 "fmov x2, d12\n\t"
211 "cmp x1, x2\n\t"
212 "b.ne 1f\n\t"
213 "add x1, x1, 1\n\t"
214
215 "fmov x2, d13\n\t"
216 "cmp x1, x2\n\t"
217 "b.ne 1f\n\t"
218 "add x1, x1, 1\n\t"
219
220 "fmov x2, d14\n\t"
221 "cmp x1, x2\n\t"
222 "b.ne 1f\n\t"
223 "add x1, x1, 1\n\t"
224
225 "fmov x2, d15\n\t"
226 "cmp x1, x2\n\t"
227 "b.ne 1f\n\t"
228
Andreas Gampef39b3782014-06-03 14:38:30 -0700229 "mov x9, #0\n\t" // Use x9 as flag, in clobber list
Andreas Gampe6cf80102014-05-19 11:32:41 -0700230
231 // Finish up.
232 "2:\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700233 "ldp x0, x1, [sp]\n\t" // Restore stuff not named clobbered, may contain fpr_result
234 "ldp x2, x3, [sp, #16]\n\t"
235 "ldp x4, x5, [sp, #32]\n\t"
236 "ldp x6, x7, [sp, #48]\n\t"
237 "add sp, sp, #64\n\t" // Free stack space, now sp as on entry
Andreas Gampe6cf80102014-05-19 11:32:41 -0700238 ".cfi_adjust_cfa_offset -64\n\t"
239
Andreas Gampef39b3782014-06-03 14:38:30 -0700240 "str x9, %[fpr_result]\n\t" // Store the FPR comparison result
241 "mov %[result], x8\n\t" // Store the call result
242
Andreas Gampe6cf80102014-05-19 11:32:41 -0700243 "b 3f\n\t" // Goto end
244
245 // Failed fpr verification.
246 "1:\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700247 "mov x9, #1\n\t"
Andreas Gampe6cf80102014-05-19 11:32:41 -0700248 "b 2b\n\t" // Goto finish-up
249
250 // End
251 "3:\n\t"
Andreas Gampecf4035a2014-05-28 22:43:01 -0700252 : [result] "=r" (result)
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700253 // Use the result from r0
254 : [arg0] "0"(arg0), [arg1] "r"(arg1), [arg2] "r"(arg2), [code] "r"(code), [self] "r"(self),
Andreas Gampecf4035a2014-05-28 22:43:01 -0700255 [referrer] "r"(referrer), [fpr_result] "m" (fpr_result)
Andreas Gampef39b3782014-06-03 14:38:30 -0700256 : "x8", "x9", "x10", "x11", "x12", "x13", "x14", "x15", "x16", "x17", "x18", "x19", "x20",
257 "x21", "x22", "x23", "x24", "x25", "x26", "x27", "x28", "x30",
258 "d0", "d1", "d2", "d3", "d4", "d5", "d6", "d7",
259 "d8", "d9", "d10", "d11", "d12", "d13", "d14", "d15",
260 "d16", "d17", "d18", "d19", "d20", "d21", "d22", "d23",
Andreas Gampe2f6e3512014-06-07 01:32:33 -0700261 "d24", "d25", "d26", "d27", "d28", "d29", "d30", "d31",
262 "memory"); // clobber.
Ian Rogersc3ccc102014-06-25 11:52:14 -0700263#elif defined(__x86_64__) && !defined(__APPLE__)
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700264 // Note: Uses the native convention
265 // TODO: Set the thread?
266 __asm__ __volatile__(
267 "pushq %[referrer]\n\t" // Push referrer
268 "pushq (%%rsp)\n\t" // & 16B alignment padding
269 ".cfi_adjust_cfa_offset 16\n\t"
270 "call *%%rax\n\t" // Call the stub
271 "addq $16, %%rsp\n\t" // Pop nullptr and padding
272 ".cfi_adjust_cfa_offset -16\n\t"
273 : "=a" (result)
274 // Use the result from rax
Andreas Gampe2f6e3512014-06-07 01:32:33 -0700275 : "D"(arg0), "S"(arg1), "d"(arg2), "a"(code), [referrer] "m"(referrer)
276 // This places arg0 into rdi, arg1 into rsi, arg2 into rdx, and code into rax
277 : "rbx", "rcx", "rbp", "r8", "r9", "r10", "r11", "r12", "r13", "r14", "r15",
278 "memory"); // clobber all
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700279 // TODO: Should we clobber the other registers?
280#else
281 LOG(WARNING) << "Was asked to invoke for an architecture I do not understand.";
282 result = 0;
283#endif
284 // Pop transition.
285 self->PopManagedStackFragment(fragment);
Andreas Gampe6cf80102014-05-19 11:32:41 -0700286
287 fp_result = fpr_result;
288 EXPECT_EQ(0U, fp_result);
289
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700290 return result;
291 }
292
Andreas Gampe51f76352014-05-21 08:28:48 -0700293 // TODO: Set up a frame according to referrer's specs.
294 size_t Invoke3WithReferrerAndHidden(size_t arg0, size_t arg1, size_t arg2, uintptr_t code,
295 Thread* self, mirror::ArtMethod* referrer, size_t hidden) {
296 // Push a transition back into managed code onto the linked list in thread.
297 ManagedStack fragment;
298 self->PushManagedStackFragment(&fragment);
299
300 size_t result;
301 size_t fpr_result = 0;
302#if defined(__i386__)
303 // TODO: Set the thread?
304 __asm__ __volatile__(
305 "movd %[hidden], %%xmm0\n\t"
Ian Rogersc5f17732014-06-05 20:48:42 -0700306 "subl $12, %%esp\n\t" // Align stack.
Andreas Gampe51f76352014-05-21 08:28:48 -0700307 "pushl %[referrer]\n\t" // Store referrer
308 "call *%%edi\n\t" // Call the stub
Ian Rogersc5f17732014-06-05 20:48:42 -0700309 "addl $16, %%esp" // Pop referrer
Andreas Gampe51f76352014-05-21 08:28:48 -0700310 : "=a" (result)
311 // Use the result from eax
Andreas Gampe1a7e2922014-05-21 15:37:53 -0700312 : "a"(arg0), "c"(arg1), "d"(arg2), "D"(code), [referrer]"r"(referrer), [hidden]"m"(hidden)
Andreas Gampe2f6e3512014-06-07 01:32:33 -0700313 // This places code into edi, arg0 into eax, arg1 into ecx, and arg2 into edx
314 : "memory"); // clobber.
Andreas Gampe51f76352014-05-21 08:28:48 -0700315 // TODO: Should we clobber the other registers? EBX gets clobbered by some of the stubs,
316 // but compilation fails when declaring that.
317#elif defined(__arm__)
318 __asm__ __volatile__(
319 "push {r1-r12, lr}\n\t" // Save state, 13*4B = 52B
320 ".cfi_adjust_cfa_offset 52\n\t"
321 "push {r9}\n\t"
322 ".cfi_adjust_cfa_offset 4\n\t"
323 "mov r9, %[referrer]\n\n"
324 "str r9, [sp, #-8]!\n\t" // Push referrer, +8B padding so 16B aligned
325 ".cfi_adjust_cfa_offset 8\n\t"
326 "ldr r9, [sp, #8]\n\t"
327
328 // Push everything on the stack, so we don't rely on the order. What a mess. :-(
329 "sub sp, sp, #24\n\t"
330 "str %[arg0], [sp]\n\t"
331 "str %[arg1], [sp, #4]\n\t"
332 "str %[arg2], [sp, #8]\n\t"
333 "str %[code], [sp, #12]\n\t"
334 "str %[self], [sp, #16]\n\t"
335 "str %[hidden], [sp, #20]\n\t"
336 "ldr r0, [sp]\n\t"
337 "ldr r1, [sp, #4]\n\t"
338 "ldr r2, [sp, #8]\n\t"
339 "ldr r3, [sp, #12]\n\t"
340 "ldr r9, [sp, #16]\n\t"
341 "ldr r12, [sp, #20]\n\t"
342 "add sp, sp, #24\n\t"
343
344 "blx r3\n\t" // Call the stub
345 "add sp, sp, #12\n\t" // Pop nullptr and padding
346 ".cfi_adjust_cfa_offset -12\n\t"
347 "pop {r1-r12, lr}\n\t" // Restore state
348 ".cfi_adjust_cfa_offset -52\n\t"
349 "mov %[result], r0\n\t" // Save the result
350 : [result] "=r" (result)
351 // Use the result from r0
Andreas Gampe2f6e3512014-06-07 01:32:33 -0700352 : [arg0] "r"(arg0), [arg1] "r"(arg1), [arg2] "r"(arg2), [code] "r"(code), [self] "r"(self),
353 [referrer] "r"(referrer), [hidden] "r"(hidden)
354 : "memory"); // clobber.
Andreas Gampe51f76352014-05-21 08:28:48 -0700355#elif defined(__aarch64__)
356 __asm__ __volatile__(
Andreas Gampef39b3782014-06-03 14:38:30 -0700357 // Spill x0-x7 which we say we don't clobber. May contain args.
Andreas Gampe51f76352014-05-21 08:28:48 -0700358 "sub sp, sp, #64\n\t"
359 ".cfi_adjust_cfa_offset 64\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700360 "stp x0, x1, [sp]\n\t"
361 "stp x2, x3, [sp, #16]\n\t"
362 "stp x4, x5, [sp, #32]\n\t"
363 "stp x6, x7, [sp, #48]\n\t"
Andreas Gampe51f76352014-05-21 08:28:48 -0700364
Andreas Gampef39b3782014-06-03 14:38:30 -0700365 "sub sp, sp, #16\n\t" // Reserve stack space, 16B aligned
366 ".cfi_adjust_cfa_offset 16\n\t"
367 "str %[referrer], [sp]\n\t" // referrer
Andreas Gampe51f76352014-05-21 08:28:48 -0700368
369 // Push everything on the stack, so we don't rely on the order. What a mess. :-(
370 "sub sp, sp, #48\n\t"
371 ".cfi_adjust_cfa_offset 48\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700372 // All things are "r" constraints, so direct str/stp should work.
373 "stp %[arg0], %[arg1], [sp]\n\t"
374 "stp %[arg2], %[code], [sp, #16]\n\t"
375 "stp %[self], %[hidden], [sp, #32]\n\t"
Andreas Gampe51f76352014-05-21 08:28:48 -0700376
377 // Now we definitely have x0-x3 free, use it to garble d8 - d15
378 "movk x0, #0xfad0\n\t"
379 "movk x0, #0xebad, lsl #16\n\t"
380 "movk x0, #0xfad0, lsl #32\n\t"
381 "movk x0, #0xebad, lsl #48\n\t"
382 "fmov d8, x0\n\t"
383 "add x0, x0, 1\n\t"
384 "fmov d9, x0\n\t"
385 "add x0, x0, 1\n\t"
386 "fmov d10, x0\n\t"
387 "add x0, x0, 1\n\t"
388 "fmov d11, x0\n\t"
389 "add x0, x0, 1\n\t"
390 "fmov d12, x0\n\t"
391 "add x0, x0, 1\n\t"
392 "fmov d13, x0\n\t"
393 "add x0, x0, 1\n\t"
394 "fmov d14, x0\n\t"
395 "add x0, x0, 1\n\t"
396 "fmov d15, x0\n\t"
397
Andreas Gampef39b3782014-06-03 14:38:30 -0700398 // Load call params into the right registers.
399 "ldp x0, x1, [sp]\n\t"
400 "ldp x2, x3, [sp, #16]\n\t"
Andreas Gampe1a7e2922014-05-21 15:37:53 -0700401 "ldp x18, x17, [sp, #32]\n\t"
Andreas Gampe51f76352014-05-21 08:28:48 -0700402 "add sp, sp, #48\n\t"
403 ".cfi_adjust_cfa_offset -48\n\t"
404
Andreas Gampe51f76352014-05-21 08:28:48 -0700405 "blr x3\n\t" // Call the stub
Andreas Gampef39b3782014-06-03 14:38:30 -0700406 "mov x8, x0\n\t" // Store result
407 "add sp, sp, #16\n\t" // Drop the quick "frame"
408 ".cfi_adjust_cfa_offset -16\n\t"
Andreas Gampe51f76352014-05-21 08:28:48 -0700409
410 // Test d8 - d15. We can use x1 and x2.
411 "movk x1, #0xfad0\n\t"
412 "movk x1, #0xebad, lsl #16\n\t"
413 "movk x1, #0xfad0, lsl #32\n\t"
414 "movk x1, #0xebad, lsl #48\n\t"
415 "fmov x2, d8\n\t"
416 "cmp x1, x2\n\t"
417 "b.ne 1f\n\t"
418 "add x1, x1, 1\n\t"
419
420 "fmov x2, d9\n\t"
421 "cmp x1, x2\n\t"
422 "b.ne 1f\n\t"
423 "add x1, x1, 1\n\t"
424
425 "fmov x2, d10\n\t"
426 "cmp x1, x2\n\t"
427 "b.ne 1f\n\t"
428 "add x1, x1, 1\n\t"
429
430 "fmov x2, d11\n\t"
431 "cmp x1, x2\n\t"
432 "b.ne 1f\n\t"
433 "add x1, x1, 1\n\t"
434
435 "fmov x2, d12\n\t"
436 "cmp x1, x2\n\t"
437 "b.ne 1f\n\t"
438 "add x1, x1, 1\n\t"
439
440 "fmov x2, d13\n\t"
441 "cmp x1, x2\n\t"
442 "b.ne 1f\n\t"
443 "add x1, x1, 1\n\t"
444
445 "fmov x2, d14\n\t"
446 "cmp x1, x2\n\t"
447 "b.ne 1f\n\t"
448 "add x1, x1, 1\n\t"
449
450 "fmov x2, d15\n\t"
451 "cmp x1, x2\n\t"
452 "b.ne 1f\n\t"
453
Andreas Gampef39b3782014-06-03 14:38:30 -0700454 "mov x9, #0\n\t" // Use x9 as flag, in clobber list
Andreas Gampe51f76352014-05-21 08:28:48 -0700455
456 // Finish up.
457 "2:\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700458 "ldp x0, x1, [sp]\n\t" // Restore stuff not named clobbered, may contain fpr_result
459 "ldp x2, x3, [sp, #16]\n\t"
460 "ldp x4, x5, [sp, #32]\n\t"
461 "ldp x6, x7, [sp, #48]\n\t"
462 "add sp, sp, #64\n\t" // Free stack space, now sp as on entry
Andreas Gampe51f76352014-05-21 08:28:48 -0700463 ".cfi_adjust_cfa_offset -64\n\t"
464
Andreas Gampef39b3782014-06-03 14:38:30 -0700465 "str x9, %[fpr_result]\n\t" // Store the FPR comparison result
466 "mov %[result], x8\n\t" // Store the call result
467
Andreas Gampe51f76352014-05-21 08:28:48 -0700468 "b 3f\n\t" // Goto end
469
470 // Failed fpr verification.
471 "1:\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700472 "mov x9, #1\n\t"
Andreas Gampe51f76352014-05-21 08:28:48 -0700473 "b 2b\n\t" // Goto finish-up
474
475 // End
476 "3:\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700477 : [result] "=r" (result)
478 // Use the result from r0
Andreas Gampe51f76352014-05-21 08:28:48 -0700479 : [arg0] "0"(arg0), [arg1] "r"(arg1), [arg2] "r"(arg2), [code] "r"(code), [self] "r"(self),
Andreas Gampef39b3782014-06-03 14:38:30 -0700480 [referrer] "r"(referrer), [hidden] "r"(hidden), [fpr_result] "m" (fpr_result)
481 : "x8", "x9", "x10", "x11", "x12", "x13", "x14", "x15", "x16", "x17", "x18", "x19", "x20",
482 "x21", "x22", "x23", "x24", "x25", "x26", "x27", "x28", "x30",
483 "d0", "d1", "d2", "d3", "d4", "d5", "d6", "d7",
484 "d8", "d9", "d10", "d11", "d12", "d13", "d14", "d15",
485 "d16", "d17", "d18", "d19", "d20", "d21", "d22", "d23",
Andreas Gampe2f6e3512014-06-07 01:32:33 -0700486 "d24", "d25", "d26", "d27", "d28", "d29", "d30", "d31",
487 "memory"); // clobber.
Ian Rogersc3ccc102014-06-25 11:52:14 -0700488#elif defined(__x86_64__) && !defined(__APPLE__)
Andreas Gampe51f76352014-05-21 08:28:48 -0700489 // Note: Uses the native convention
490 // TODO: Set the thread?
491 __asm__ __volatile__(
Andreas Gampe51f76352014-05-21 08:28:48 -0700492 "pushq %[referrer]\n\t" // Push referrer
493 "pushq (%%rsp)\n\t" // & 16B alignment padding
494 ".cfi_adjust_cfa_offset 16\n\t"
Andreas Gampe1a7e2922014-05-21 15:37:53 -0700495 "call *%%rbx\n\t" // Call the stub
Andreas Gampe51f76352014-05-21 08:28:48 -0700496 "addq $16, %%rsp\n\t" // Pop nullptr and padding
497 ".cfi_adjust_cfa_offset -16\n\t"
498 : "=a" (result)
499 // Use the result from rax
Andreas Gampe1a7e2922014-05-21 15:37:53 -0700500 : "D"(arg0), "S"(arg1), "d"(arg2), "b"(code), [referrer] "c"(referrer), [hidden] "a"(hidden)
Andreas Gampe51f76352014-05-21 08:28:48 -0700501 // This places arg0 into rdi, arg1 into rsi, arg2 into rdx, and code into rax
Andreas Gampe1a7e2922014-05-21 15:37:53 -0700502 : "rbp", "r8", "r9", "r10", "r11", "r12", "r13", "r14", "r15",
Andreas Gampe2f6e3512014-06-07 01:32:33 -0700503 "memory"); // clobber all
Andreas Gampe51f76352014-05-21 08:28:48 -0700504 // TODO: Should we clobber the other registers?
505#else
506 LOG(WARNING) << "Was asked to invoke for an architecture I do not understand.";
507 result = 0;
508#endif
509 // Pop transition.
510 self->PopManagedStackFragment(fragment);
511
512 fp_result = fpr_result;
513 EXPECT_EQ(0U, fp_result);
514
515 return result;
516 }
517
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700518 // Method with 32b arg0, 64b arg1
519 size_t Invoke3UWithReferrer(size_t arg0, uint64_t arg1, uintptr_t code, Thread* self,
520 mirror::ArtMethod* referrer) {
Ian Rogersc3ccc102014-06-25 11:52:14 -0700521#if (defined(__x86_64__) && !defined(__APPLE__)) || defined(__aarch64__)
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700522 // Just pass through.
523 return Invoke3WithReferrer(arg0, arg1, 0U, code, self, referrer);
524#else
525 // Need to split up arguments.
526 uint32_t lower = static_cast<uint32_t>(arg1 & 0xFFFFFFFF);
527 uint32_t upper = static_cast<uint32_t>((arg1 >> 32) & 0xFFFFFFFF);
528
529 return Invoke3WithReferrer(arg0, lower, upper, code, self, referrer);
530#endif
531 }
532
533 // Method with 32b arg0, 32b arg1, 64b arg2
534 size_t Invoke3UUWithReferrer(uint32_t arg0, uint32_t arg1, uint64_t arg2, uintptr_t code,
535 Thread* self, mirror::ArtMethod* referrer) {
Ian Rogersc3ccc102014-06-25 11:52:14 -0700536#if (defined(__x86_64__) && !defined(__APPLE__)) || defined(__aarch64__)
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700537 // Just pass through.
538 return Invoke3WithReferrer(arg0, arg1, arg2, code, self, referrer);
539#else
540 // TODO: Needs 4-param invoke.
541 return 0;
542#endif
543 }
Andreas Gampe6cf80102014-05-19 11:32:41 -0700544
Andreas Gampe29b38412014-08-13 00:15:43 -0700545 static uintptr_t GetEntrypoint(Thread* self, QuickEntrypointEnum entrypoint) {
546 int32_t offset;
547#ifdef __LP64__
548 offset = GetThreadOffset<8>(entrypoint).Int32Value();
549#else
550 offset = GetThreadOffset<4>(entrypoint).Int32Value();
551#endif
552 return *reinterpret_cast<uintptr_t*>(reinterpret_cast<uint8_t*>(self) + offset);
553 }
554
Andreas Gampe6cf80102014-05-19 11:32:41 -0700555 protected:
556 size_t fp_result;
Andreas Gampe525cde22014-04-22 15:44:50 -0700557};
558
559
Andreas Gampe525cde22014-04-22 15:44:50 -0700560TEST_F(StubTest, Memcpy) {
Ian Rogersc3ccc102014-06-25 11:52:14 -0700561#if defined(__i386__) || (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe525cde22014-04-22 15:44:50 -0700562 Thread* self = Thread::Current();
563
564 uint32_t orig[20];
565 uint32_t trg[20];
566 for (size_t i = 0; i < 20; ++i) {
567 orig[i] = i;
568 trg[i] = 0;
569 }
570
571 Invoke3(reinterpret_cast<size_t>(&trg[4]), reinterpret_cast<size_t>(&orig[4]),
Andreas Gampe29b38412014-08-13 00:15:43 -0700572 10 * sizeof(uint32_t), StubTest::GetEntrypoint(self, kQuickMemcpy), self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700573
574 EXPECT_EQ(orig[0], trg[0]);
575
576 for (size_t i = 1; i < 4; ++i) {
577 EXPECT_NE(orig[i], trg[i]);
578 }
579
580 for (size_t i = 4; i < 14; ++i) {
581 EXPECT_EQ(orig[i], trg[i]);
582 }
583
584 for (size_t i = 14; i < 20; ++i) {
585 EXPECT_NE(orig[i], trg[i]);
586 }
587
588 // TODO: Test overlapping?
589
590#else
591 LOG(INFO) << "Skipping memcpy as I don't know how to do that on " << kRuntimeISA;
592 // Force-print to std::cout so it's also outside the logcat.
593 std::cout << "Skipping memcpy as I don't know how to do that on " << kRuntimeISA << std::endl;
594#endif
595}
596
Andreas Gampe525cde22014-04-22 15:44:50 -0700597TEST_F(StubTest, LockObject) {
Ian Rogersc3ccc102014-06-25 11:52:14 -0700598#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -0700599 static constexpr size_t kThinLockLoops = 100;
600
Andreas Gampe525cde22014-04-22 15:44:50 -0700601 Thread* self = Thread::Current();
Andreas Gampe29b38412014-08-13 00:15:43 -0700602
603 const uintptr_t art_quick_lock_object = StubTest::GetEntrypoint(self, kQuickLockObject);
604
Andreas Gampe525cde22014-04-22 15:44:50 -0700605 // Create an object
606 ScopedObjectAccess soa(self);
607 // garbage is created during ClassLinker::Init
608
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700609 StackHandleScope<2> hs(soa.Self());
610 Handle<mirror::String> obj(
611 hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), "hello, world!")));
Andreas Gampe525cde22014-04-22 15:44:50 -0700612 LockWord lock = obj->GetLockWord(false);
613 LockWord::LockState old_state = lock.GetState();
614 EXPECT_EQ(LockWord::LockState::kUnlocked, old_state);
615
Andreas Gampe29b38412014-08-13 00:15:43 -0700616 Invoke3(reinterpret_cast<size_t>(obj.Get()), 0U, 0U, art_quick_lock_object, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700617
618 LockWord lock_after = obj->GetLockWord(false);
619 LockWord::LockState new_state = lock_after.GetState();
620 EXPECT_EQ(LockWord::LockState::kThinLocked, new_state);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700621 EXPECT_EQ(lock_after.ThinLockCount(), 0U); // Thin lock starts count at zero
622
623 for (size_t i = 1; i < kThinLockLoops; ++i) {
Andreas Gampe29b38412014-08-13 00:15:43 -0700624 Invoke3(reinterpret_cast<size_t>(obj.Get()), 0U, 0U, art_quick_lock_object, self);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700625
626 // Check we're at lock count i
627
628 LockWord l_inc = obj->GetLockWord(false);
629 LockWord::LockState l_inc_state = l_inc.GetState();
630 EXPECT_EQ(LockWord::LockState::kThinLocked, l_inc_state);
631 EXPECT_EQ(l_inc.ThinLockCount(), i);
632 }
633
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700634 // Force a fat lock by running identity hashcode to fill up lock word.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700635 Handle<mirror::String> obj2(hs.NewHandle(
636 mirror::String::AllocFromModifiedUtf8(soa.Self(), "hello, world!")));
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700637
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700638 obj2->IdentityHashCode();
639
Andreas Gampe29b38412014-08-13 00:15:43 -0700640 Invoke3(reinterpret_cast<size_t>(obj2.Get()), 0U, 0U, art_quick_lock_object, self);
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700641
642 LockWord lock_after2 = obj2->GetLockWord(false);
643 LockWord::LockState new_state2 = lock_after2.GetState();
644 EXPECT_EQ(LockWord::LockState::kFatLocked, new_state2);
645 EXPECT_NE(lock_after2.FatLockMonitor(), static_cast<Monitor*>(nullptr));
646
647 // Test done.
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700648#else
649 LOG(INFO) << "Skipping lock_object as I don't know how to do that on " << kRuntimeISA;
650 // Force-print to std::cout so it's also outside the logcat.
651 std::cout << "Skipping lock_object as I don't know how to do that on " << kRuntimeISA << std::endl;
652#endif
653}
654
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700655
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700656class RandGen {
657 public:
658 explicit RandGen(uint32_t seed) : val_(seed) {}
659
660 uint32_t next() {
661 val_ = val_ * 48271 % 2147483647 + 13;
662 return val_;
663 }
664
665 uint32_t val_;
666};
667
668
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700669// NO_THREAD_SAFETY_ANALYSIS as we do not want to grab exclusive mutator lock for MonitorInfo.
670static void TestUnlockObject(StubTest* test) NO_THREAD_SAFETY_ANALYSIS {
Ian Rogersc3ccc102014-06-25 11:52:14 -0700671#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -0700672 static constexpr size_t kThinLockLoops = 100;
673
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700674 Thread* self = Thread::Current();
Andreas Gampe29b38412014-08-13 00:15:43 -0700675
676 const uintptr_t art_quick_lock_object = StubTest::GetEntrypoint(self, kQuickLockObject);
677 const uintptr_t art_quick_unlock_object = StubTest::GetEntrypoint(self, kQuickUnlockObject);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700678 // Create an object
679 ScopedObjectAccess soa(self);
680 // garbage is created during ClassLinker::Init
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700681 static constexpr size_t kNumberOfLocks = 10; // Number of objects = lock
682 StackHandleScope<kNumberOfLocks + 1> hs(self);
683 Handle<mirror::String> obj(
684 hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), "hello, world!")));
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700685 LockWord lock = obj->GetLockWord(false);
686 LockWord::LockState old_state = lock.GetState();
687 EXPECT_EQ(LockWord::LockState::kUnlocked, old_state);
688
Andreas Gampe29b38412014-08-13 00:15:43 -0700689 test->Invoke3(reinterpret_cast<size_t>(obj.Get()), 0U, 0U, art_quick_unlock_object, self);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700690 // This should be an illegal monitor state.
691 EXPECT_TRUE(self->IsExceptionPending());
692 self->ClearException();
693
694 LockWord lock_after = obj->GetLockWord(false);
695 LockWord::LockState new_state = lock_after.GetState();
696 EXPECT_EQ(LockWord::LockState::kUnlocked, new_state);
Andreas Gampe525cde22014-04-22 15:44:50 -0700697
Andreas Gampe29b38412014-08-13 00:15:43 -0700698 test->Invoke3(reinterpret_cast<size_t>(obj.Get()), 0U, 0U, art_quick_lock_object, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700699
700 LockWord lock_after2 = obj->GetLockWord(false);
701 LockWord::LockState new_state2 = lock_after2.GetState();
702 EXPECT_EQ(LockWord::LockState::kThinLocked, new_state2);
703
Andreas Gampe29b38412014-08-13 00:15:43 -0700704 test->Invoke3(reinterpret_cast<size_t>(obj.Get()), 0U, 0U, art_quick_unlock_object, self);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700705
706 LockWord lock_after3 = obj->GetLockWord(false);
707 LockWord::LockState new_state3 = lock_after3.GetState();
708 EXPECT_EQ(LockWord::LockState::kUnlocked, new_state3);
709
710 // Stress test:
711 // Keep a number of objects and their locks in flight. Randomly lock or unlock one of them in
712 // each step.
713
714 RandGen r(0x1234);
715
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700716 constexpr size_t kIterations = 10000; // Number of iterations
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700717 constexpr size_t kMoveToFat = 1000; // Chance of 1:kMoveFat to make a lock fat.
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700718
719 size_t counts[kNumberOfLocks];
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700720 bool fat[kNumberOfLocks]; // Whether a lock should be thin or fat.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700721 Handle<mirror::String> objects[kNumberOfLocks];
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700722
723 // Initialize = allocate.
724 for (size_t i = 0; i < kNumberOfLocks; ++i) {
725 counts[i] = 0;
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700726 fat[i] = false;
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700727 objects[i] = hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), ""));
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700728 }
729
730 for (size_t i = 0; i < kIterations; ++i) {
731 // Select which lock to update.
732 size_t index = r.next() % kNumberOfLocks;
733
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700734 // Make lock fat?
735 if (!fat[index] && (r.next() % kMoveToFat == 0)) {
736 fat[index] = true;
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700737 objects[index]->IdentityHashCode();
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700738
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700739 LockWord lock_iter = objects[index]->GetLockWord(false);
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700740 LockWord::LockState iter_state = lock_iter.GetState();
741 if (counts[index] == 0) {
742 EXPECT_EQ(LockWord::LockState::kHashCode, iter_state);
743 } else {
744 EXPECT_EQ(LockWord::LockState::kFatLocked, iter_state);
745 }
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700746 } else {
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700747 bool lock; // Whether to lock or unlock in this step.
748 if (counts[index] == 0) {
749 lock = true;
750 } else if (counts[index] == kThinLockLoops) {
751 lock = false;
752 } else {
753 // Randomly.
754 lock = r.next() % 2 == 0;
755 }
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700756
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700757 if (lock) {
Andreas Gampe29b38412014-08-13 00:15:43 -0700758 test->Invoke3(reinterpret_cast<size_t>(objects[index].Get()), 0U, 0U, art_quick_lock_object,
759 self);
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700760 counts[index]++;
761 } else {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700762 test->Invoke3(reinterpret_cast<size_t>(objects[index].Get()), 0U, 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -0700763 art_quick_unlock_object, self);
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700764 counts[index]--;
765 }
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700766
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700767 EXPECT_FALSE(self->IsExceptionPending());
768
769 // Check the new state.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700770 LockWord lock_iter = objects[index]->GetLockWord(true);
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700771 LockWord::LockState iter_state = lock_iter.GetState();
772 if (fat[index]) {
773 // Abuse MonitorInfo.
774 EXPECT_EQ(LockWord::LockState::kFatLocked, iter_state) << index;
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700775 MonitorInfo info(objects[index].Get());
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700776 EXPECT_EQ(counts[index], info.entry_count_) << index;
777 } else {
778 if (counts[index] > 0) {
779 EXPECT_EQ(LockWord::LockState::kThinLocked, iter_state);
780 EXPECT_EQ(counts[index] - 1, lock_iter.ThinLockCount());
781 } else {
782 EXPECT_EQ(LockWord::LockState::kUnlocked, iter_state);
783 }
784 }
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700785 }
786 }
787
788 // Unlock the remaining count times and then check it's unlocked. Then deallocate.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700789 // Go reverse order to correctly handle Handles.
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700790 for (size_t i = 0; i < kNumberOfLocks; ++i) {
791 size_t index = kNumberOfLocks - 1 - i;
792 size_t count = counts[index];
793 while (count > 0) {
Andreas Gampe29b38412014-08-13 00:15:43 -0700794 test->Invoke3(reinterpret_cast<size_t>(objects[index].Get()), 0U, 0U, art_quick_unlock_object,
795 self);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700796 count--;
797 }
798
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700799 LockWord lock_after4 = objects[index]->GetLockWord(false);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700800 LockWord::LockState new_state4 = lock_after4.GetState();
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700801 EXPECT_TRUE(LockWord::LockState::kUnlocked == new_state4
802 || LockWord::LockState::kFatLocked == new_state4);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700803 }
804
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700805 // Test done.
Andreas Gampe525cde22014-04-22 15:44:50 -0700806#else
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700807 LOG(INFO) << "Skipping unlock_object as I don't know how to do that on " << kRuntimeISA;
Andreas Gampe525cde22014-04-22 15:44:50 -0700808 // Force-print to std::cout so it's also outside the logcat.
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700809 std::cout << "Skipping unlock_object as I don't know how to do that on " << kRuntimeISA << std::endl;
Andreas Gampe525cde22014-04-22 15:44:50 -0700810#endif
811}
812
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700813TEST_F(StubTest, UnlockObject) {
814 TestUnlockObject(this);
815}
Andreas Gampe525cde22014-04-22 15:44:50 -0700816
Ian Rogersc3ccc102014-06-25 11:52:14 -0700817#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe525cde22014-04-22 15:44:50 -0700818extern "C" void art_quick_check_cast(void);
819#endif
820
821TEST_F(StubTest, CheckCast) {
Ian Rogersc3ccc102014-06-25 11:52:14 -0700822#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe525cde22014-04-22 15:44:50 -0700823 Thread* self = Thread::Current();
Andreas Gampe29b38412014-08-13 00:15:43 -0700824
825 const uintptr_t art_quick_check_cast = StubTest::GetEntrypoint(self, kQuickCheckCast);
826
Andreas Gampe525cde22014-04-22 15:44:50 -0700827 // Find some classes.
828 ScopedObjectAccess soa(self);
829 // garbage is created during ClassLinker::Init
830
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700831 StackHandleScope<2> hs(soa.Self());
832 Handle<mirror::Class> c(
833 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "[Ljava/lang/Object;")));
834 Handle<mirror::Class> c2(
835 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "[Ljava/lang/String;")));
Andreas Gampe525cde22014-04-22 15:44:50 -0700836
837 EXPECT_FALSE(self->IsExceptionPending());
838
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700839 Invoke3(reinterpret_cast<size_t>(c.Get()), reinterpret_cast<size_t>(c.Get()), 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -0700840 art_quick_check_cast, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700841
842 EXPECT_FALSE(self->IsExceptionPending());
843
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700844 Invoke3(reinterpret_cast<size_t>(c2.Get()), reinterpret_cast<size_t>(c2.Get()), 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -0700845 art_quick_check_cast, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700846
847 EXPECT_FALSE(self->IsExceptionPending());
848
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700849 Invoke3(reinterpret_cast<size_t>(c.Get()), reinterpret_cast<size_t>(c2.Get()), 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -0700850 art_quick_check_cast, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700851
852 EXPECT_FALSE(self->IsExceptionPending());
853
854 // TODO: Make the following work. But that would require correct managed frames.
855
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700856 Invoke3(reinterpret_cast<size_t>(c2.Get()), reinterpret_cast<size_t>(c.Get()), 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -0700857 art_quick_check_cast, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700858
859 EXPECT_TRUE(self->IsExceptionPending());
860 self->ClearException();
861
862#else
863 LOG(INFO) << "Skipping check_cast as I don't know how to do that on " << kRuntimeISA;
864 // Force-print to std::cout so it's also outside the logcat.
865 std::cout << "Skipping check_cast as I don't know how to do that on " << kRuntimeISA << std::endl;
866#endif
867}
868
869
Andreas Gampe525cde22014-04-22 15:44:50 -0700870TEST_F(StubTest, APutObj) {
Hiroshi Yamauchid6881ae2014-04-28 17:21:48 -0700871 TEST_DISABLED_FOR_HEAP_REFERENCE_POISONING();
872
Ian Rogersc3ccc102014-06-25 11:52:14 -0700873#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe525cde22014-04-22 15:44:50 -0700874 Thread* self = Thread::Current();
Andreas Gampe29b38412014-08-13 00:15:43 -0700875
876 // Do not check non-checked ones, we'd need handlers and stuff...
877 const uintptr_t art_quick_aput_obj_with_null_and_bound_check =
878 StubTest::GetEntrypoint(self, kQuickAputObjectWithNullAndBoundCheck);
879
Andreas Gampe525cde22014-04-22 15:44:50 -0700880 // Create an object
881 ScopedObjectAccess soa(self);
882 // garbage is created during ClassLinker::Init
883
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700884 StackHandleScope<5> hs(soa.Self());
885 Handle<mirror::Class> c(
886 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "Ljava/lang/Object;")));
887 Handle<mirror::Class> ca(
888 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "[Ljava/lang/String;")));
Andreas Gampe525cde22014-04-22 15:44:50 -0700889
890 // Build a string array of size 1
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700891 Handle<mirror::ObjectArray<mirror::Object>> array(
892 hs.NewHandle(mirror::ObjectArray<mirror::Object>::Alloc(soa.Self(), ca.Get(), 10)));
Andreas Gampe525cde22014-04-22 15:44:50 -0700893
894 // Build a string -> should be assignable
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700895 Handle<mirror::String> str_obj(
896 hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), "hello, world!")));
Andreas Gampe525cde22014-04-22 15:44:50 -0700897
898 // Build a generic object -> should fail assigning
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700899 Handle<mirror::Object> obj_obj(hs.NewHandle(c->AllocObject(soa.Self())));
Andreas Gampe525cde22014-04-22 15:44:50 -0700900
901 // Play with it...
902
903 // 1) Success cases
Andreas Gampef4e910b2014-04-29 16:55:52 -0700904 // 1.1) Assign str_obj to array[0..3]
Andreas Gampe525cde22014-04-22 15:44:50 -0700905
906 EXPECT_FALSE(self->IsExceptionPending());
907
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700908 Invoke3(reinterpret_cast<size_t>(array.Get()), 0U, reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampe29b38412014-08-13 00:15:43 -0700909 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700910
911 EXPECT_FALSE(self->IsExceptionPending());
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700912 EXPECT_EQ(str_obj.Get(), array->Get(0));
Andreas Gampe525cde22014-04-22 15:44:50 -0700913
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700914 Invoke3(reinterpret_cast<size_t>(array.Get()), 1U, reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampe29b38412014-08-13 00:15:43 -0700915 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampef4e910b2014-04-29 16:55:52 -0700916
917 EXPECT_FALSE(self->IsExceptionPending());
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700918 EXPECT_EQ(str_obj.Get(), array->Get(1));
Andreas Gampef4e910b2014-04-29 16:55:52 -0700919
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700920 Invoke3(reinterpret_cast<size_t>(array.Get()), 2U, reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampe29b38412014-08-13 00:15:43 -0700921 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampef4e910b2014-04-29 16:55:52 -0700922
923 EXPECT_FALSE(self->IsExceptionPending());
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700924 EXPECT_EQ(str_obj.Get(), array->Get(2));
Andreas Gampef4e910b2014-04-29 16:55:52 -0700925
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700926 Invoke3(reinterpret_cast<size_t>(array.Get()), 3U, reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampe29b38412014-08-13 00:15:43 -0700927 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampef4e910b2014-04-29 16:55:52 -0700928
929 EXPECT_FALSE(self->IsExceptionPending());
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700930 EXPECT_EQ(str_obj.Get(), array->Get(3));
Andreas Gampef4e910b2014-04-29 16:55:52 -0700931
932 // 1.2) Assign null to array[0..3]
Andreas Gampe525cde22014-04-22 15:44:50 -0700933
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700934 Invoke3(reinterpret_cast<size_t>(array.Get()), 0U, reinterpret_cast<size_t>(nullptr),
Andreas Gampe29b38412014-08-13 00:15:43 -0700935 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700936
937 EXPECT_FALSE(self->IsExceptionPending());
Andreas Gampef4e910b2014-04-29 16:55:52 -0700938 EXPECT_EQ(nullptr, array->Get(0));
939
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700940 Invoke3(reinterpret_cast<size_t>(array.Get()), 1U, reinterpret_cast<size_t>(nullptr),
Andreas Gampe29b38412014-08-13 00:15:43 -0700941 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampef4e910b2014-04-29 16:55:52 -0700942
943 EXPECT_FALSE(self->IsExceptionPending());
944 EXPECT_EQ(nullptr, array->Get(1));
945
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700946 Invoke3(reinterpret_cast<size_t>(array.Get()), 2U, reinterpret_cast<size_t>(nullptr),
Andreas Gampe29b38412014-08-13 00:15:43 -0700947 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampef4e910b2014-04-29 16:55:52 -0700948
949 EXPECT_FALSE(self->IsExceptionPending());
950 EXPECT_EQ(nullptr, array->Get(2));
951
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700952 Invoke3(reinterpret_cast<size_t>(array.Get()), 3U, reinterpret_cast<size_t>(nullptr),
Andreas Gampe29b38412014-08-13 00:15:43 -0700953 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampef4e910b2014-04-29 16:55:52 -0700954
955 EXPECT_FALSE(self->IsExceptionPending());
956 EXPECT_EQ(nullptr, array->Get(3));
Andreas Gampe525cde22014-04-22 15:44:50 -0700957
958 // TODO: Check _which_ exception is thrown. Then make 3) check that it's the right check order.
959
960 // 2) Failure cases (str into str[])
961 // 2.1) Array = null
962 // TODO: Throwing NPE needs actual DEX code
963
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700964// Invoke3(reinterpret_cast<size_t>(nullptr), 0U, reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampe525cde22014-04-22 15:44:50 -0700965// reinterpret_cast<uintptr_t>(&art_quick_aput_obj_with_null_and_bound_check), self);
966//
967// EXPECT_TRUE(self->IsExceptionPending());
968// self->ClearException();
969
970 // 2.2) Index < 0
971
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700972 Invoke3(reinterpret_cast<size_t>(array.Get()), static_cast<size_t>(-1),
973 reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampe29b38412014-08-13 00:15:43 -0700974 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700975
976 EXPECT_TRUE(self->IsExceptionPending());
977 self->ClearException();
978
979 // 2.3) Index > 0
980
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700981 Invoke3(reinterpret_cast<size_t>(array.Get()), 10U, reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampe29b38412014-08-13 00:15:43 -0700982 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700983
984 EXPECT_TRUE(self->IsExceptionPending());
985 self->ClearException();
986
987 // 3) Failure cases (obj into str[])
988
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700989 Invoke3(reinterpret_cast<size_t>(array.Get()), 0U, reinterpret_cast<size_t>(obj_obj.Get()),
Andreas Gampe29b38412014-08-13 00:15:43 -0700990 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700991
992 EXPECT_TRUE(self->IsExceptionPending());
993 self->ClearException();
994
995 // Tests done.
996#else
997 LOG(INFO) << "Skipping aput_obj as I don't know how to do that on " << kRuntimeISA;
998 // Force-print to std::cout so it's also outside the logcat.
999 std::cout << "Skipping aput_obj as I don't know how to do that on " << kRuntimeISA << std::endl;
1000#endif
1001}
1002
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001003TEST_F(StubTest, AllocObject) {
1004 TEST_DISABLED_FOR_HEAP_REFERENCE_POISONING();
1005
Ian Rogersc3ccc102014-06-25 11:52:14 -07001006#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001007 // TODO: Check the "Unresolved" allocation stubs
1008
1009 Thread* self = Thread::Current();
1010 // Create an object
1011 ScopedObjectAccess soa(self);
1012 // garbage is created during ClassLinker::Init
1013
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001014 StackHandleScope<2> hs(soa.Self());
1015 Handle<mirror::Class> c(
1016 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "Ljava/lang/Object;")));
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001017
1018 // Play with it...
1019
1020 EXPECT_FALSE(self->IsExceptionPending());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001021 {
1022 // Use an arbitrary method from c to use as referrer
1023 size_t result = Invoke3(static_cast<size_t>(c->GetDexTypeIndex()), // type_idx
1024 reinterpret_cast<size_t>(c->GetVirtualMethod(0)), // arbitrary
1025 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001026 StubTest::GetEntrypoint(self, kQuickAllocObject),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001027 self);
1028
1029 EXPECT_FALSE(self->IsExceptionPending());
1030 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
1031 mirror::Object* obj = reinterpret_cast<mirror::Object*>(result);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001032 EXPECT_EQ(c.Get(), obj->GetClass());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001033 VerifyObject(obj);
1034 }
1035
1036 {
1037 // We can use nullptr in the second argument as we do not need a method here (not used in
1038 // resolved/initialized cases)
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001039 size_t result = Invoke3(reinterpret_cast<size_t>(c.Get()), reinterpret_cast<size_t>(nullptr), 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001040 StubTest::GetEntrypoint(self, kQuickAllocObjectResolved),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001041 self);
1042
1043 EXPECT_FALSE(self->IsExceptionPending());
1044 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
1045 mirror::Object* obj = reinterpret_cast<mirror::Object*>(result);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001046 EXPECT_EQ(c.Get(), obj->GetClass());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001047 VerifyObject(obj);
1048 }
1049
1050 {
1051 // We can use nullptr in the second argument as we do not need a method here (not used in
1052 // resolved/initialized cases)
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001053 size_t result = Invoke3(reinterpret_cast<size_t>(c.Get()), reinterpret_cast<size_t>(nullptr), 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001054 StubTest::GetEntrypoint(self, kQuickAllocObjectInitialized),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001055 self);
1056
1057 EXPECT_FALSE(self->IsExceptionPending());
1058 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
1059 mirror::Object* obj = reinterpret_cast<mirror::Object*>(result);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001060 EXPECT_EQ(c.Get(), obj->GetClass());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001061 VerifyObject(obj);
1062 }
1063
1064 // Failure tests.
1065
1066 // Out-of-memory.
1067 {
1068 Runtime::Current()->GetHeap()->SetIdealFootprint(1 * GB);
1069
1070 // Array helps to fill memory faster.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001071 Handle<mirror::Class> ca(
1072 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "[Ljava/lang/Object;")));
1073
1074 // Use arbitrary large amount for now.
1075 static const size_t kMaxHandles = 1000000;
Ian Rogers700a4022014-05-19 16:49:03 -07001076 std::unique_ptr<StackHandleScope<kMaxHandles>> hsp(new StackHandleScope<kMaxHandles>(self));
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001077
1078 std::vector<Handle<mirror::Object>> handles;
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001079 // Start allocating with 128K
1080 size_t length = 128 * KB / 4;
1081 while (length > 10) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001082 Handle<mirror::Object> h(hsp->NewHandle<mirror::Object>(
1083 mirror::ObjectArray<mirror::Object>::Alloc(soa.Self(), ca.Get(), length / 4)));
1084 if (self->IsExceptionPending() || h.Get() == nullptr) {
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001085 self->ClearException();
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001086
1087 // Try a smaller length
1088 length = length / 8;
1089 // Use at most half the reported free space.
1090 size_t mem = Runtime::Current()->GetHeap()->GetFreeMemory();
1091 if (length * 8 > mem) {
1092 length = mem / 8;
1093 }
1094 } else {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001095 handles.push_back(h);
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001096 }
1097 }
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001098 LOG(INFO) << "Used " << handles.size() << " arrays to fill space.";
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001099
1100 // Allocate simple objects till it fails.
1101 while (!self->IsExceptionPending()) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001102 Handle<mirror::Object> h = hsp->NewHandle(c->AllocObject(soa.Self()));
1103 if (!self->IsExceptionPending() && h.Get() != nullptr) {
1104 handles.push_back(h);
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001105 }
1106 }
1107 self->ClearException();
1108
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001109 size_t result = Invoke3(reinterpret_cast<size_t>(c.Get()), reinterpret_cast<size_t>(nullptr), 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001110 StubTest::GetEntrypoint(self, kQuickAllocObjectInitialized),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001111 self);
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001112 EXPECT_TRUE(self->IsExceptionPending());
1113 self->ClearException();
1114 EXPECT_EQ(reinterpret_cast<size_t>(nullptr), result);
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001115 }
1116
1117 // Tests done.
1118#else
1119 LOG(INFO) << "Skipping alloc_object as I don't know how to do that on " << kRuntimeISA;
1120 // Force-print to std::cout so it's also outside the logcat.
1121 std::cout << "Skipping alloc_object as I don't know how to do that on " << kRuntimeISA << std::endl;
1122#endif
1123}
1124
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001125TEST_F(StubTest, AllocObjectArray) {
1126 TEST_DISABLED_FOR_HEAP_REFERENCE_POISONING();
1127
Ian Rogersc3ccc102014-06-25 11:52:14 -07001128#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001129 // TODO: Check the "Unresolved" allocation stubs
1130
1131 Thread* self = Thread::Current();
1132 // Create an object
1133 ScopedObjectAccess soa(self);
1134 // garbage is created during ClassLinker::Init
1135
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001136 StackHandleScope<2> hs(self);
1137 Handle<mirror::Class> c(
1138 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "[Ljava/lang/Object;")));
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001139
1140 // Needed to have a linked method.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001141 Handle<mirror::Class> c_obj(
1142 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "Ljava/lang/Object;")));
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001143
1144 // Play with it...
1145
1146 EXPECT_FALSE(self->IsExceptionPending());
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001147
1148 // For some reason this does not work, as the type_idx is artificial and outside what the
1149 // resolved types of c_obj allow...
1150
1151 if (false) {
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001152 // Use an arbitrary method from c to use as referrer
1153 size_t result = Invoke3(static_cast<size_t>(c->GetDexTypeIndex()), // type_idx
1154 reinterpret_cast<size_t>(c_obj->GetVirtualMethod(0)), // arbitrary
1155 10U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001156 StubTest::GetEntrypoint(self, kQuickAllocArray),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001157 self);
1158
1159 EXPECT_FALSE(self->IsExceptionPending());
1160 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
1161 mirror::Array* obj = reinterpret_cast<mirror::Array*>(result);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001162 EXPECT_EQ(c.Get(), obj->GetClass());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001163 VerifyObject(obj);
1164 EXPECT_EQ(obj->GetLength(), 10);
1165 }
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001166
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001167 {
1168 // We can use nullptr in the second argument as we do not need a method here (not used in
1169 // resolved/initialized cases)
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001170 size_t result = Invoke3(reinterpret_cast<size_t>(c.Get()), reinterpret_cast<size_t>(nullptr), 10U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001171 StubTest::GetEntrypoint(self, kQuickAllocArrayResolved),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001172 self);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001173 EXPECT_FALSE(self->IsExceptionPending()) << PrettyTypeOf(self->GetException(nullptr));
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001174 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
1175 mirror::Object* obj = reinterpret_cast<mirror::Object*>(result);
1176 EXPECT_TRUE(obj->IsArrayInstance());
1177 EXPECT_TRUE(obj->IsObjectArray());
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001178 EXPECT_EQ(c.Get(), obj->GetClass());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001179 VerifyObject(obj);
1180 mirror::Array* array = reinterpret_cast<mirror::Array*>(result);
1181 EXPECT_EQ(array->GetLength(), 10);
1182 }
1183
1184 // Failure tests.
1185
1186 // Out-of-memory.
1187 {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001188 size_t result = Invoke3(reinterpret_cast<size_t>(c.Get()), reinterpret_cast<size_t>(nullptr),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001189 GB, // that should fail...
Andreas Gampe29b38412014-08-13 00:15:43 -07001190 StubTest::GetEntrypoint(self, kQuickAllocArrayResolved),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001191 self);
1192
1193 EXPECT_TRUE(self->IsExceptionPending());
1194 self->ClearException();
1195 EXPECT_EQ(reinterpret_cast<size_t>(nullptr), result);
1196 }
1197
1198 // Tests done.
1199#else
1200 LOG(INFO) << "Skipping alloc_array as I don't know how to do that on " << kRuntimeISA;
1201 // Force-print to std::cout so it's also outside the logcat.
1202 std::cout << "Skipping alloc_array as I don't know how to do that on " << kRuntimeISA << std::endl;
1203#endif
1204}
1205
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001206
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001207TEST_F(StubTest, StringCompareTo) {
1208 TEST_DISABLED_FOR_HEAP_REFERENCE_POISONING();
1209
Ian Rogersc3ccc102014-06-25 11:52:14 -07001210#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001211 // TODO: Check the "Unresolved" allocation stubs
1212
1213 Thread* self = Thread::Current();
Andreas Gampe29b38412014-08-13 00:15:43 -07001214
1215 const uintptr_t art_quick_string_compareto = StubTest::GetEntrypoint(self, kQuickStringCompareTo);
1216
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001217 ScopedObjectAccess soa(self);
1218 // garbage is created during ClassLinker::Init
1219
1220 // Create some strings
1221 // Use array so we can index into it and use a matrix for expected results
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -07001222 // Setup: The first half is standard. The second half uses a non-zero offset.
1223 // TODO: Shared backing arrays.
Serban Constantinescu86797a72014-06-19 16:17:56 +01001224 static constexpr size_t kBaseStringCount = 8;
1225 const char* c[kBaseStringCount] = { "", "", "a", "aa", "ab",
1226 "aacaacaacaacaacaac", // This one's under the default limit to go to __memcmp16.
1227 "aacaacaacaacaacaacaacaacaacaacaacaac", // This one's over.
1228 "aacaacaacaacaacaacaacaacaacaacaacaaca" }; // As is this one. We need a separate one to
1229 // defeat object-equal optimizations.
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -07001230
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001231 static constexpr size_t kStringCount = 2 * kBaseStringCount;
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001232
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001233 StackHandleScope<kStringCount> hs(self);
1234 Handle<mirror::String> s[kStringCount];
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001235
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001236 for (size_t i = 0; i < kBaseStringCount; ++i) {
1237 s[i] = hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), c[i]));
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001238 }
1239
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -07001240 RandGen r(0x1234);
1241
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001242 for (size_t i = kBaseStringCount; i < kStringCount; ++i) {
1243 s[i] = hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), c[i - kBaseStringCount]));
1244 int32_t length = s[i]->GetLength();
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -07001245 if (length > 1) {
1246 // Set a random offset and length.
1247 int32_t new_offset = 1 + (r.next() % (length - 1));
1248 int32_t rest = length - new_offset - 1;
1249 int32_t new_length = 1 + (rest > 0 ? r.next() % rest : 0);
1250
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001251 s[i]->SetField32<false>(mirror::String::CountOffset(), new_length);
1252 s[i]->SetField32<false>(mirror::String::OffsetOffset(), new_offset);
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -07001253 }
1254 }
1255
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001256 // TODO: wide characters
1257
1258 // Matrix of expectations. First component is first parameter. Note we only check against the
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -07001259 // sign, not the value. As we are testing random offsets, we need to compute this and need to
1260 // rely on String::CompareTo being correct.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001261 int32_t expected[kStringCount][kStringCount];
1262 for (size_t x = 0; x < kStringCount; ++x) {
1263 for (size_t y = 0; y < kStringCount; ++y) {
1264 expected[x][y] = s[x]->CompareTo(s[y].Get());
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -07001265 }
1266 }
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001267
1268 // Play with it...
1269
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001270 for (size_t x = 0; x < kStringCount; ++x) {
1271 for (size_t y = 0; y < kStringCount; ++y) {
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001272 // Test string_compareto x y
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001273 size_t result = Invoke3(reinterpret_cast<size_t>(s[x].Get()),
1274 reinterpret_cast<size_t>(s[y].Get()), 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001275 art_quick_string_compareto, self);
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001276
1277 EXPECT_FALSE(self->IsExceptionPending());
1278
1279 // The result is a 32b signed integer
1280 union {
1281 size_t r;
1282 int32_t i;
1283 } conv;
1284 conv.r = result;
1285 int32_t e = expected[x][y];
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -07001286 EXPECT_TRUE(e == 0 ? conv.i == 0 : true) << "x=" << c[x] << " y=" << c[y] << " res=" <<
1287 conv.r;
1288 EXPECT_TRUE(e < 0 ? conv.i < 0 : true) << "x=" << c[x] << " y=" << c[y] << " res=" <<
1289 conv.r;
1290 EXPECT_TRUE(e > 0 ? conv.i > 0 : true) << "x=" << c[x] << " y=" << c[y] << " res=" <<
1291 conv.r;
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001292 }
1293 }
1294
Andreas Gampe7177d7c2014-05-02 12:10:02 -07001295 // TODO: Deallocate things.
1296
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001297 // Tests done.
1298#else
1299 LOG(INFO) << "Skipping string_compareto as I don't know how to do that on " << kRuntimeISA;
1300 // Force-print to std::cout so it's also outside the logcat.
1301 std::cout << "Skipping string_compareto as I don't know how to do that on " << kRuntimeISA <<
1302 std::endl;
1303#endif
1304}
1305
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001306
Fred Shih37f05ef2014-07-16 18:38:08 -07001307static void GetSetBooleanStatic(Handle<mirror::Object>* obj, Handle<mirror::ArtField>* f, Thread* self,
1308 mirror::ArtMethod* referrer, StubTest* test)
1309 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1310#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
1311 constexpr size_t num_values = 5;
1312 uint8_t values[num_values] = { 0, 1, 2, 128, 0xFF };
1313
1314 for (size_t i = 0; i < num_values; ++i) {
1315 test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
1316 static_cast<size_t>(values[i]),
1317 0U,
1318 StubTest::GetEntrypoint(self, kQuickSet8Static),
1319 self,
1320 referrer);
1321
1322 size_t res = test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
1323 0U, 0U,
1324 StubTest::GetEntrypoint(self, kQuickGetBooleanStatic),
1325 self,
1326 referrer);
1327 // Boolean currently stores bools as uint8_t, be more zealous about asserting correct writes/gets.
1328 EXPECT_EQ(values[i], static_cast<uint8_t>(res)) << "Iteration " << i;
1329 }
1330#else
1331 LOG(INFO) << "Skipping set_boolean_static as I don't know how to do that on " << kRuntimeISA;
1332 // Force-print to std::cout so it's also outside the logcat.
1333 std::cout << "Skipping set_boolean_static as I don't know how to do that on " << kRuntimeISA << std::endl;
1334#endif
1335}
1336static void GetSetByteStatic(Handle<mirror::Object>* obj, Handle<mirror::ArtField>* f, Thread* self,
1337 mirror::ArtMethod* referrer, StubTest* test)
1338 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1339#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
1340 constexpr size_t num_values = 5;
1341 int8_t values[num_values] = { -128, -64, 0, 64, 127 };
1342
1343 for (size_t i = 0; i < num_values; ++i) {
1344 test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
1345 static_cast<size_t>(values[i]),
1346 0U,
1347 StubTest::GetEntrypoint(self, kQuickSet8Static),
1348 self,
1349 referrer);
1350
1351 size_t res = test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
1352 0U, 0U,
1353 StubTest::GetEntrypoint(self, kQuickGetByteStatic),
1354 self,
1355 referrer);
1356 EXPECT_EQ(values[i], static_cast<int8_t>(res)) << "Iteration " << i;
1357 }
1358#else
1359 LOG(INFO) << "Skipping set_byte_static as I don't know how to do that on " << kRuntimeISA;
1360 // Force-print to std::cout so it's also outside the logcat.
1361 std::cout << "Skipping set_byte_static as I don't know how to do that on " << kRuntimeISA << std::endl;
1362#endif
1363}
1364
1365
Fred Shih37f05ef2014-07-16 18:38:08 -07001366static void GetSetBooleanInstance(Handle<mirror::Object>* obj, Handle<mirror::ArtField>* f,
1367 Thread* self, mirror::ArtMethod* referrer, StubTest* test)
1368 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1369#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
1370 constexpr size_t num_values = 5;
1371 uint8_t values[num_values] = { 0, true, 2, 128, 0xFF };
1372
1373 for (size_t i = 0; i < num_values; ++i) {
1374 test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
1375 reinterpret_cast<size_t>(obj->Get()),
1376 static_cast<size_t>(values[i]),
1377 StubTest::GetEntrypoint(self, kQuickSet8Instance),
1378 self,
1379 referrer);
1380
1381 uint8_t res = f->Get()->GetBoolean(obj->Get());
1382 EXPECT_EQ(values[i], res) << "Iteration " << i;
1383
1384 f->Get()->SetBoolean<false>(obj->Get(), res);
1385
1386 size_t res2 = test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
1387 reinterpret_cast<size_t>(obj->Get()),
1388 0U,
1389 StubTest::GetEntrypoint(self, kQuickGetBooleanInstance),
1390 self,
1391 referrer);
1392 EXPECT_EQ(res, static_cast<uint8_t>(res2));
1393 }
1394#else
1395 LOG(INFO) << "Skipping set_boolean_instance as I don't know how to do that on " << kRuntimeISA;
1396 // Force-print to std::cout so it's also outside the logcat.
1397 std::cout << "Skipping set_boolean_instance as I don't know how to do that on " << kRuntimeISA << std::endl;
1398#endif
1399}
1400static void GetSetByteInstance(Handle<mirror::Object>* obj, Handle<mirror::ArtField>* f,
1401 Thread* self, mirror::ArtMethod* referrer, StubTest* test)
1402 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1403#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
1404 constexpr size_t num_values = 5;
1405 int8_t values[num_values] = { -128, -64, 0, 64, 127 };
1406
1407 for (size_t i = 0; i < num_values; ++i) {
1408 test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
1409 reinterpret_cast<size_t>(obj->Get()),
1410 static_cast<size_t>(values[i]),
1411 StubTest::GetEntrypoint(self, kQuickSet8Instance),
1412 self,
1413 referrer);
1414
1415 int8_t res = f->Get()->GetByte(obj->Get());
1416 EXPECT_EQ(res, values[i]) << "Iteration " << i;
1417 f->Get()->SetByte<false>(obj->Get(), ++res);
1418
1419 size_t res2 = test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
1420 reinterpret_cast<size_t>(obj->Get()),
1421 0U,
1422 StubTest::GetEntrypoint(self, kQuickGetByteInstance),
1423 self,
1424 referrer);
1425 EXPECT_EQ(res, static_cast<int8_t>(res2));
1426 }
1427#else
1428 LOG(INFO) << "Skipping set_byte_instance as I don't know how to do that on " << kRuntimeISA;
1429 // Force-print to std::cout so it's also outside the logcat.
1430 std::cout << "Skipping set_byte_instance as I don't know how to do that on " << kRuntimeISA << std::endl;
1431#endif
1432}
1433
Fred Shih37f05ef2014-07-16 18:38:08 -07001434static void GetSetCharStatic(Handle<mirror::Object>* obj, Handle<mirror::ArtField>* f, Thread* self,
1435 mirror::ArtMethod* referrer, StubTest* test)
1436 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1437#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
1438 constexpr size_t num_values = 6;
1439 uint16_t values[num_values] = { 0, 1, 2, 255, 32768, 0xFFFF };
1440
1441 for (size_t i = 0; i < num_values; ++i) {
1442 test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
1443 static_cast<size_t>(values[i]),
1444 0U,
1445 StubTest::GetEntrypoint(self, kQuickSet16Static),
1446 self,
1447 referrer);
1448
1449 size_t res = test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
1450 0U, 0U,
1451 StubTest::GetEntrypoint(self, kQuickGetCharStatic),
1452 self,
1453 referrer);
1454
1455 EXPECT_EQ(values[i], static_cast<uint16_t>(res)) << "Iteration " << i;
1456 }
1457#else
1458 LOG(INFO) << "Skipping set_char_static as I don't know how to do that on " << kRuntimeISA;
1459 // Force-print to std::cout so it's also outside the logcat.
1460 std::cout << "Skipping set_char_static as I don't know how to do that on " << kRuntimeISA << std::endl;
1461#endif
1462}
1463static void GetSetShortStatic(Handle<mirror::Object>* obj, Handle<mirror::ArtField>* f, Thread* self,
1464 mirror::ArtMethod* referrer, StubTest* test)
1465 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1466#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
1467 constexpr size_t num_values = 6;
1468 int16_t values[num_values] = { -0x7FFF, -32768, 0, 255, 32767, 0x7FFE };
1469
1470 for (size_t i = 0; i < num_values; ++i) {
1471 test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
1472 static_cast<size_t>(values[i]),
1473 0U,
1474 StubTest::GetEntrypoint(self, kQuickSet16Static),
1475 self,
1476 referrer);
1477
1478 size_t res = test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
1479 0U, 0U,
1480 StubTest::GetEntrypoint(self, kQuickGetShortStatic),
1481 self,
1482 referrer);
1483
1484 EXPECT_EQ(static_cast<int16_t>(res), values[i]) << "Iteration " << i;
1485 }
1486#else
1487 LOG(INFO) << "Skipping set_short_static as I don't know how to do that on " << kRuntimeISA;
1488 // Force-print to std::cout so it's also outside the logcat.
1489 std::cout << "Skipping set_short_static as I don't know how to do that on " << kRuntimeISA << std::endl;
1490#endif
1491}
1492
Fred Shih37f05ef2014-07-16 18:38:08 -07001493static void GetSetCharInstance(Handle<mirror::Object>* obj, Handle<mirror::ArtField>* f,
1494 Thread* self, mirror::ArtMethod* referrer, StubTest* test)
1495 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1496#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
1497 constexpr size_t num_values = 6;
1498 uint16_t values[num_values] = { 0, 1, 2, 255, 32768, 0xFFFF };
1499
1500 for (size_t i = 0; i < num_values; ++i) {
1501 test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
1502 reinterpret_cast<size_t>(obj->Get()),
1503 static_cast<size_t>(values[i]),
1504 StubTest::GetEntrypoint(self, kQuickSet16Instance),
1505 self,
1506 referrer);
1507
1508 uint16_t res = f->Get()->GetChar(obj->Get());
1509 EXPECT_EQ(res, values[i]) << "Iteration " << i;
1510 f->Get()->SetChar<false>(obj->Get(), ++res);
1511
1512 size_t res2 = test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
1513 reinterpret_cast<size_t>(obj->Get()),
1514 0U,
1515 StubTest::GetEntrypoint(self, kQuickGetCharInstance),
1516 self,
1517 referrer);
1518 EXPECT_EQ(res, static_cast<uint16_t>(res2));
1519 }
1520#else
1521 LOG(INFO) << "Skipping set_char_instance as I don't know how to do that on " << kRuntimeISA;
1522 // Force-print to std::cout so it's also outside the logcat.
1523 std::cout << "Skipping set_char_instance as I don't know how to do that on " << kRuntimeISA << std::endl;
1524#endif
1525}
1526static void GetSetShortInstance(Handle<mirror::Object>* obj, Handle<mirror::ArtField>* f,
1527 Thread* self, mirror::ArtMethod* referrer, StubTest* test)
1528 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1529#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
1530 constexpr size_t num_values = 6;
1531 int16_t values[num_values] = { -0x7FFF, -32768, 0, 255, 32767, 0x7FFE };
1532
1533 for (size_t i = 0; i < num_values; ++i) {
1534 test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
1535 reinterpret_cast<size_t>(obj->Get()),
1536 static_cast<size_t>(values[i]),
1537 StubTest::GetEntrypoint(self, kQuickSet16Instance),
1538 self,
1539 referrer);
1540
1541 int16_t res = f->Get()->GetShort(obj->Get());
1542 EXPECT_EQ(res, values[i]) << "Iteration " << i;
1543 f->Get()->SetShort<false>(obj->Get(), ++res);
1544
1545 size_t res2 = test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
1546 reinterpret_cast<size_t>(obj->Get()),
1547 0U,
1548 StubTest::GetEntrypoint(self, kQuickGetShortInstance),
1549 self,
1550 referrer);
1551 EXPECT_EQ(res, static_cast<int16_t>(res2));
1552 }
1553#else
1554 LOG(INFO) << "Skipping set_short_instance as I don't know how to do that on " << kRuntimeISA;
1555 // Force-print to std::cout so it's also outside the logcat.
1556 std::cout << "Skipping set_short_instance as I don't know how to do that on " << kRuntimeISA << std::endl;
1557#endif
1558}
1559
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001560static void GetSet32Static(Handle<mirror::Object>* obj, Handle<mirror::ArtField>* f, Thread* self,
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001561 mirror::ArtMethod* referrer, StubTest* test)
1562 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Ian Rogersc3ccc102014-06-25 11:52:14 -07001563#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001564 constexpr size_t num_values = 7;
1565 uint32_t values[num_values] = { 0, 1, 2, 255, 32768, 1000000, 0xFFFFFFFF };
1566
1567 for (size_t i = 0; i < num_values; ++i) {
1568 test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
1569 static_cast<size_t>(values[i]),
1570 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001571 StubTest::GetEntrypoint(self, kQuickSet32Static),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001572 self,
1573 referrer);
1574
1575 size_t res = test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
1576 0U, 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001577 StubTest::GetEntrypoint(self, kQuickGet32Static),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001578 self,
1579 referrer);
1580
1581 EXPECT_EQ(res, values[i]) << "Iteration " << i;
1582 }
1583#else
1584 LOG(INFO) << "Skipping set32static as I don't know how to do that on " << kRuntimeISA;
1585 // Force-print to std::cout so it's also outside the logcat.
1586 std::cout << "Skipping set32static as I don't know how to do that on " << kRuntimeISA << std::endl;
1587#endif
1588}
1589
1590
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001591static void GetSet32Instance(Handle<mirror::Object>* obj, Handle<mirror::ArtField>* f,
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001592 Thread* self, mirror::ArtMethod* referrer, StubTest* test)
1593 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Ian Rogersc3ccc102014-06-25 11:52:14 -07001594#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001595 constexpr size_t num_values = 7;
1596 uint32_t values[num_values] = { 0, 1, 2, 255, 32768, 1000000, 0xFFFFFFFF };
1597
1598 for (size_t i = 0; i < num_values; ++i) {
1599 test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001600 reinterpret_cast<size_t>(obj->Get()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001601 static_cast<size_t>(values[i]),
Andreas Gampe29b38412014-08-13 00:15:43 -07001602 StubTest::GetEntrypoint(self, kQuickSet32Instance),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001603 self,
1604 referrer);
1605
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001606 int32_t res = f->Get()->GetInt(obj->Get());
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001607 EXPECT_EQ(res, static_cast<int32_t>(values[i])) << "Iteration " << i;
1608
1609 res++;
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001610 f->Get()->SetInt<false>(obj->Get(), res);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001611
1612 size_t res2 = test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001613 reinterpret_cast<size_t>(obj->Get()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001614 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001615 StubTest::GetEntrypoint(self, kQuickGet32Instance),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001616 self,
1617 referrer);
1618 EXPECT_EQ(res, static_cast<int32_t>(res2));
1619 }
1620#else
1621 LOG(INFO) << "Skipping set32instance as I don't know how to do that on " << kRuntimeISA;
1622 // Force-print to std::cout so it's also outside the logcat.
1623 std::cout << "Skipping set32instance as I don't know how to do that on " << kRuntimeISA << std::endl;
1624#endif
1625}
1626
1627
Ian Rogersc3ccc102014-06-25 11:52:14 -07001628#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001629
1630static void set_and_check_static(uint32_t f_idx, mirror::Object* val, Thread* self,
1631 mirror::ArtMethod* referrer, StubTest* test)
1632 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1633 test->Invoke3WithReferrer(static_cast<size_t>(f_idx),
1634 reinterpret_cast<size_t>(val),
1635 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001636 StubTest::GetEntrypoint(self, kQuickSetObjStatic),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001637 self,
1638 referrer);
1639
1640 size_t res = test->Invoke3WithReferrer(static_cast<size_t>(f_idx),
1641 0U, 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001642 StubTest::GetEntrypoint(self, kQuickGetObjStatic),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001643 self,
1644 referrer);
1645
1646 EXPECT_EQ(res, reinterpret_cast<size_t>(val)) << "Value " << val;
1647}
1648#endif
1649
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001650static void GetSetObjStatic(Handle<mirror::Object>* obj, Handle<mirror::ArtField>* f, Thread* self,
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001651 mirror::ArtMethod* referrer, StubTest* test)
1652 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Ian Rogersc3ccc102014-06-25 11:52:14 -07001653#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001654 set_and_check_static((*f)->GetDexFieldIndex(), nullptr, self, referrer, test);
1655
1656 // Allocate a string object for simplicity.
1657 mirror::String* str = mirror::String::AllocFromModifiedUtf8(self, "Test");
1658 set_and_check_static((*f)->GetDexFieldIndex(), str, self, referrer, test);
1659
1660 set_and_check_static((*f)->GetDexFieldIndex(), nullptr, self, referrer, test);
1661#else
1662 LOG(INFO) << "Skipping setObjstatic as I don't know how to do that on " << kRuntimeISA;
1663 // Force-print to std::cout so it's also outside the logcat.
1664 std::cout << "Skipping setObjstatic as I don't know how to do that on " << kRuntimeISA << std::endl;
1665#endif
1666}
1667
1668
Ian Rogersc3ccc102014-06-25 11:52:14 -07001669#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001670static void set_and_check_instance(Handle<mirror::ArtField>* f, mirror::Object* trg,
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001671 mirror::Object* val, Thread* self, mirror::ArtMethod* referrer,
1672 StubTest* test)
1673 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1674 test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
1675 reinterpret_cast<size_t>(trg),
1676 reinterpret_cast<size_t>(val),
Andreas Gampe29b38412014-08-13 00:15:43 -07001677 StubTest::GetEntrypoint(self, kQuickSetObjInstance),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001678 self,
1679 referrer);
1680
1681 size_t res = test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
1682 reinterpret_cast<size_t>(trg),
1683 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001684 StubTest::GetEntrypoint(self, kQuickGetObjInstance),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001685 self,
1686 referrer);
1687
1688 EXPECT_EQ(res, reinterpret_cast<size_t>(val)) << "Value " << val;
1689
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001690 EXPECT_EQ(val, f->Get()->GetObj(trg));
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001691}
1692#endif
1693
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001694static void GetSetObjInstance(Handle<mirror::Object>* obj, Handle<mirror::ArtField>* f,
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001695 Thread* self, mirror::ArtMethod* referrer, StubTest* test)
1696 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Ian Rogersc3ccc102014-06-25 11:52:14 -07001697#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001698 set_and_check_instance(f, obj->Get(), nullptr, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001699
1700 // Allocate a string object for simplicity.
1701 mirror::String* str = mirror::String::AllocFromModifiedUtf8(self, "Test");
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001702 set_and_check_instance(f, obj->Get(), str, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001703
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001704 set_and_check_instance(f, obj->Get(), nullptr, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001705#else
1706 LOG(INFO) << "Skipping setObjinstance as I don't know how to do that on " << kRuntimeISA;
1707 // Force-print to std::cout so it's also outside the logcat.
1708 std::cout << "Skipping setObjinstance as I don't know how to do that on " << kRuntimeISA << std::endl;
1709#endif
1710}
1711
1712
1713// TODO: Complete these tests for 32b architectures.
1714
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001715static void GetSet64Static(Handle<mirror::Object>* obj, Handle<mirror::ArtField>* f, Thread* self,
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001716 mirror::ArtMethod* referrer, StubTest* test)
1717 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Ian Rogersc3ccc102014-06-25 11:52:14 -07001718#if (defined(__x86_64__) && !defined(__APPLE__)) || defined(__aarch64__)
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001719 constexpr size_t num_values = 8;
1720 uint64_t values[num_values] = { 0, 1, 2, 255, 32768, 1000000, 0xFFFFFFFF, 0xFFFFFFFFFFFF };
1721
1722 for (size_t i = 0; i < num_values; ++i) {
1723 test->Invoke3UWithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
1724 values[i],
Andreas Gampe29b38412014-08-13 00:15:43 -07001725 StubTest::GetEntrypoint(self, kQuickSet64Static),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001726 self,
1727 referrer);
1728
1729 size_t res = test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
1730 0U, 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001731 StubTest::GetEntrypoint(self, kQuickGet64Static),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001732 self,
1733 referrer);
1734
1735 EXPECT_EQ(res, values[i]) << "Iteration " << i;
1736 }
1737#else
1738 LOG(INFO) << "Skipping set64static as I don't know how to do that on " << kRuntimeISA;
1739 // Force-print to std::cout so it's also outside the logcat.
1740 std::cout << "Skipping set64static as I don't know how to do that on " << kRuntimeISA << std::endl;
1741#endif
1742}
1743
1744
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001745static void GetSet64Instance(Handle<mirror::Object>* obj, Handle<mirror::ArtField>* f,
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001746 Thread* self, mirror::ArtMethod* referrer, StubTest* test)
1747 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Ian Rogersc3ccc102014-06-25 11:52:14 -07001748#if (defined(__x86_64__) && !defined(__APPLE__)) || defined(__aarch64__)
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001749 constexpr size_t num_values = 8;
1750 uint64_t values[num_values] = { 0, 1, 2, 255, 32768, 1000000, 0xFFFFFFFF, 0xFFFFFFFFFFFF };
1751
1752 for (size_t i = 0; i < num_values; ++i) {
1753 test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001754 reinterpret_cast<size_t>(obj->Get()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001755 static_cast<size_t>(values[i]),
Andreas Gampe29b38412014-08-13 00:15:43 -07001756 StubTest::GetEntrypoint(self, kQuickSet64Instance),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001757 self,
1758 referrer);
1759
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001760 int64_t res = f->Get()->GetLong(obj->Get());
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001761 EXPECT_EQ(res, static_cast<int64_t>(values[i])) << "Iteration " << i;
1762
1763 res++;
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001764 f->Get()->SetLong<false>(obj->Get(), res);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001765
1766 size_t res2 = test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001767 reinterpret_cast<size_t>(obj->Get()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001768 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001769 StubTest::GetEntrypoint(self, kQuickGet64Instance),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001770 self,
1771 referrer);
1772 EXPECT_EQ(res, static_cast<int64_t>(res2));
1773 }
1774#else
1775 LOG(INFO) << "Skipping set64instance as I don't know how to do that on " << kRuntimeISA;
1776 // Force-print to std::cout so it's also outside the logcat.
1777 std::cout << "Skipping set64instance as I don't know how to do that on " << kRuntimeISA << std::endl;
1778#endif
1779}
1780
1781static void TestFields(Thread* self, StubTest* test, Primitive::Type test_type) {
1782 // garbage is created during ClassLinker::Init
1783
1784 JNIEnv* env = Thread::Current()->GetJniEnv();
1785 jclass jc = env->FindClass("AllFields");
1786 CHECK(jc != NULL);
1787 jobject o = env->AllocObject(jc);
1788 CHECK(o != NULL);
1789
1790 ScopedObjectAccess soa(self);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001791 StackHandleScope<5> hs(self);
1792 Handle<mirror::Object> obj(hs.NewHandle(soa.Decode<mirror::Object*>(o)));
1793 Handle<mirror::Class> c(hs.NewHandle(obj->GetClass()));
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001794 // Need a method as a referrer
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001795 Handle<mirror::ArtMethod> m(hs.NewHandle(c->GetDirectMethod(0)));
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001796
1797 // Play with it...
1798
1799 // Static fields.
1800 {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001801 Handle<mirror::ObjectArray<mirror::ArtField>> fields(hs.NewHandle(c.Get()->GetSFields()));
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001802 int32_t num_fields = fields->GetLength();
1803 for (int32_t i = 0; i < num_fields; ++i) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001804 StackHandleScope<1> hs(self);
1805 Handle<mirror::ArtField> f(hs.NewHandle(fields->Get(i)));
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001806
Mathieu Chartier61c5ebc2014-06-05 17:42:53 -07001807 Primitive::Type type = f->GetTypeAsPrimitiveType();
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001808 switch (type) {
Fred Shih37f05ef2014-07-16 18:38:08 -07001809 case Primitive::Type::kPrimBoolean:
1810 if (test_type == type) {
1811 GetSetBooleanStatic(&obj, &f, self, m.Get(), test);
1812 }
1813 break;
1814 case Primitive::Type::kPrimByte:
1815 if (test_type == type) {
1816 GetSetByteStatic(&obj, &f, self, m.Get(), test);
1817 }
1818 break;
1819 case Primitive::Type::kPrimChar:
1820 if (test_type == type) {
1821 GetSetCharStatic(&obj, &f, self, m.Get(), test);
1822 }
1823 break;
1824 case Primitive::Type::kPrimShort:
1825 if (test_type == type) {
1826 GetSetShortStatic(&obj, &f, self, m.Get(), test);
1827 }
1828 break;
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001829 case Primitive::Type::kPrimInt:
1830 if (test_type == type) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001831 GetSet32Static(&obj, &f, self, m.Get(), test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001832 }
1833 break;
1834
1835 case Primitive::Type::kPrimLong:
1836 if (test_type == type) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001837 GetSet64Static(&obj, &f, self, m.Get(), test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001838 }
1839 break;
1840
1841 case Primitive::Type::kPrimNot:
1842 // Don't try array.
Mathieu Chartier61c5ebc2014-06-05 17:42:53 -07001843 if (test_type == type && f->GetTypeDescriptor()[0] != '[') {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001844 GetSetObjStatic(&obj, &f, self, m.Get(), test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001845 }
1846 break;
1847
1848 default:
1849 break; // Skip.
1850 }
1851 }
1852 }
1853
1854 // Instance fields.
1855 {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001856 Handle<mirror::ObjectArray<mirror::ArtField>> fields(hs.NewHandle(c.Get()->GetIFields()));
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001857 int32_t num_fields = fields->GetLength();
1858 for (int32_t i = 0; i < num_fields; ++i) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001859 StackHandleScope<1> hs(self);
1860 Handle<mirror::ArtField> f(hs.NewHandle(fields->Get(i)));
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001861
Mathieu Chartier61c5ebc2014-06-05 17:42:53 -07001862 Primitive::Type type = f->GetTypeAsPrimitiveType();
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001863 switch (type) {
Fred Shih37f05ef2014-07-16 18:38:08 -07001864 case Primitive::Type::kPrimBoolean:
1865 if (test_type == type) {
1866 GetSetBooleanInstance(&obj, &f, self, m.Get(), test);
1867 }
1868 break;
1869 case Primitive::Type::kPrimByte:
1870 if (test_type == type) {
1871 GetSetByteInstance(&obj, &f, self, m.Get(), test);
1872 }
1873 break;
1874 case Primitive::Type::kPrimChar:
1875 if (test_type == type) {
1876 GetSetCharInstance(&obj, &f, self, m.Get(), test);
1877 }
1878 break;
1879 case Primitive::Type::kPrimShort:
1880 if (test_type == type) {
1881 GetSetShortInstance(&obj, &f, self, m.Get(), test);
1882 }
1883 break;
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001884 case Primitive::Type::kPrimInt:
1885 if (test_type == type) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001886 GetSet32Instance(&obj, &f, self, m.Get(), test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001887 }
1888 break;
1889
1890 case Primitive::Type::kPrimLong:
1891 if (test_type == type) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001892 GetSet64Instance(&obj, &f, self, m.Get(), test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001893 }
1894 break;
1895
1896 case Primitive::Type::kPrimNot:
1897 // Don't try array.
Mathieu Chartier61c5ebc2014-06-05 17:42:53 -07001898 if (test_type == type && f->GetTypeDescriptor()[0] != '[') {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001899 GetSetObjInstance(&obj, &f, self, m.Get(), test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001900 }
1901 break;
1902
1903 default:
1904 break; // Skip.
1905 }
1906 }
1907 }
1908
1909 // TODO: Deallocate things.
1910}
1911
Fred Shih37f05ef2014-07-16 18:38:08 -07001912TEST_F(StubTest, Fields8) {
1913 TEST_DISABLED_FOR_HEAP_REFERENCE_POISONING();
1914
1915 Thread* self = Thread::Current();
1916
1917 self->TransitionFromSuspendedToRunnable();
1918 LoadDex("AllFields");
1919 bool started = runtime_->Start();
1920 CHECK(started);
1921
1922 TestFields(self, this, Primitive::Type::kPrimBoolean);
1923 TestFields(self, this, Primitive::Type::kPrimByte);
1924}
1925
1926TEST_F(StubTest, Fields16) {
1927 TEST_DISABLED_FOR_HEAP_REFERENCE_POISONING();
1928
1929 Thread* self = Thread::Current();
1930
1931 self->TransitionFromSuspendedToRunnable();
1932 LoadDex("AllFields");
1933 bool started = runtime_->Start();
1934 CHECK(started);
1935
1936 TestFields(self, this, Primitive::Type::kPrimChar);
1937 TestFields(self, this, Primitive::Type::kPrimShort);
1938}
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001939
1940TEST_F(StubTest, Fields32) {
1941 TEST_DISABLED_FOR_HEAP_REFERENCE_POISONING();
1942
1943 Thread* self = Thread::Current();
1944
1945 self->TransitionFromSuspendedToRunnable();
1946 LoadDex("AllFields");
1947 bool started = runtime_->Start();
1948 CHECK(started);
1949
1950 TestFields(self, this, Primitive::Type::kPrimInt);
1951}
1952
1953TEST_F(StubTest, FieldsObj) {
1954 TEST_DISABLED_FOR_HEAP_REFERENCE_POISONING();
1955
1956 Thread* self = Thread::Current();
1957
1958 self->TransitionFromSuspendedToRunnable();
1959 LoadDex("AllFields");
1960 bool started = runtime_->Start();
1961 CHECK(started);
1962
1963 TestFields(self, this, Primitive::Type::kPrimNot);
1964}
1965
1966TEST_F(StubTest, Fields64) {
1967 TEST_DISABLED_FOR_HEAP_REFERENCE_POISONING();
1968
1969 Thread* self = Thread::Current();
1970
1971 self->TransitionFromSuspendedToRunnable();
1972 LoadDex("AllFields");
1973 bool started = runtime_->Start();
1974 CHECK(started);
1975
1976 TestFields(self, this, Primitive::Type::kPrimLong);
1977}
1978
Andreas Gampe51f76352014-05-21 08:28:48 -07001979TEST_F(StubTest, IMT) {
Ian Rogersc3ccc102014-06-25 11:52:14 -07001980#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe51f76352014-05-21 08:28:48 -07001981 TEST_DISABLED_FOR_HEAP_REFERENCE_POISONING();
1982
1983 Thread* self = Thread::Current();
1984
1985 ScopedObjectAccess soa(self);
1986 StackHandleScope<7> hs(self);
1987
1988 JNIEnv* env = Thread::Current()->GetJniEnv();
1989
1990 // ArrayList
1991
1992 // Load ArrayList and used methods (JNI).
1993 jclass arraylist_jclass = env->FindClass("java/util/ArrayList");
1994 ASSERT_NE(nullptr, arraylist_jclass);
1995 jmethodID arraylist_constructor = env->GetMethodID(arraylist_jclass, "<init>", "()V");
1996 ASSERT_NE(nullptr, arraylist_constructor);
1997 jmethodID contains_jmethod = env->GetMethodID(arraylist_jclass, "contains", "(Ljava/lang/Object;)Z");
1998 ASSERT_NE(nullptr, contains_jmethod);
1999 jmethodID add_jmethod = env->GetMethodID(arraylist_jclass, "add", "(Ljava/lang/Object;)Z");
2000 ASSERT_NE(nullptr, add_jmethod);
2001
2002 // Get mirror representation.
2003 Handle<mirror::ArtMethod> contains_amethod(hs.NewHandle(soa.DecodeMethod(contains_jmethod)));
2004
2005 // Patch up ArrayList.contains.
2006 if (contains_amethod.Get()->GetEntryPointFromQuickCompiledCode() == nullptr) {
2007 contains_amethod.Get()->SetEntryPointFromQuickCompiledCode(reinterpret_cast<void*>(
Andreas Gampe29b38412014-08-13 00:15:43 -07002008 StubTest::GetEntrypoint(self, kQuickQuickToInterpreterBridge)));
Andreas Gampe51f76352014-05-21 08:28:48 -07002009 }
2010
2011 // List
2012
2013 // Load List and used methods (JNI).
2014 jclass list_jclass = env->FindClass("java/util/List");
2015 ASSERT_NE(nullptr, list_jclass);
2016 jmethodID inf_contains_jmethod = env->GetMethodID(list_jclass, "contains", "(Ljava/lang/Object;)Z");
2017 ASSERT_NE(nullptr, inf_contains_jmethod);
2018
2019 // Get mirror representation.
2020 Handle<mirror::ArtMethod> inf_contains(hs.NewHandle(soa.DecodeMethod(inf_contains_jmethod)));
2021
2022 // Object
2023
2024 jclass obj_jclass = env->FindClass("java/lang/Object");
2025 ASSERT_NE(nullptr, obj_jclass);
2026 jmethodID obj_constructor = env->GetMethodID(obj_jclass, "<init>", "()V");
2027 ASSERT_NE(nullptr, obj_constructor);
2028
Andreas Gampe51f76352014-05-21 08:28:48 -07002029 // Create instances.
2030
2031 jobject jarray_list = env->NewObject(arraylist_jclass, arraylist_constructor);
2032 ASSERT_NE(nullptr, jarray_list);
2033 Handle<mirror::Object> array_list(hs.NewHandle(soa.Decode<mirror::Object*>(jarray_list)));
2034
2035 jobject jobj = env->NewObject(obj_jclass, obj_constructor);
2036 ASSERT_NE(nullptr, jobj);
2037 Handle<mirror::Object> obj(hs.NewHandle(soa.Decode<mirror::Object*>(jobj)));
2038
Andreas Gampe1a7e2922014-05-21 15:37:53 -07002039 // Invocation tests.
2040
2041 // 1. imt_conflict
2042
2043 // Contains.
Andreas Gampe51f76352014-05-21 08:28:48 -07002044
2045 size_t result =
2046 Invoke3WithReferrerAndHidden(0U, reinterpret_cast<size_t>(array_list.Get()),
2047 reinterpret_cast<size_t>(obj.Get()),
Andreas Gampe29b38412014-08-13 00:15:43 -07002048 StubTest::GetEntrypoint(self, kQuickQuickImtConflictTrampoline),
Andreas Gampe51f76352014-05-21 08:28:48 -07002049 self, contains_amethod.Get(),
2050 static_cast<size_t>(inf_contains.Get()->GetDexMethodIndex()));
2051
2052 ASSERT_FALSE(self->IsExceptionPending());
2053 EXPECT_EQ(static_cast<size_t>(JNI_FALSE), result);
2054
2055 // Add object.
2056
2057 env->CallBooleanMethod(jarray_list, add_jmethod, jobj);
2058
2059 ASSERT_FALSE(self->IsExceptionPending()) << PrettyTypeOf(self->GetException(nullptr));
2060
Andreas Gampe1a7e2922014-05-21 15:37:53 -07002061 // Contains.
Andreas Gampe51f76352014-05-21 08:28:48 -07002062
2063 result = Invoke3WithReferrerAndHidden(0U, reinterpret_cast<size_t>(array_list.Get()),
2064 reinterpret_cast<size_t>(obj.Get()),
Andreas Gampe29b38412014-08-13 00:15:43 -07002065 StubTest::GetEntrypoint(self, kQuickQuickImtConflictTrampoline),
Andreas Gampe51f76352014-05-21 08:28:48 -07002066 self, contains_amethod.Get(),
2067 static_cast<size_t>(inf_contains.Get()->GetDexMethodIndex()));
2068
2069 ASSERT_FALSE(self->IsExceptionPending());
2070 EXPECT_EQ(static_cast<size_t>(JNI_TRUE), result);
Andreas Gampe1a7e2922014-05-21 15:37:53 -07002071
2072 // 2. regular interface trampoline
2073
2074 result = Invoke3WithReferrer(static_cast<size_t>(inf_contains.Get()->GetDexMethodIndex()),
2075 reinterpret_cast<size_t>(array_list.Get()),
2076 reinterpret_cast<size_t>(obj.Get()),
2077 StubTest::GetEntrypoint(self,
2078 kQuickInvokeInterfaceTrampolineWithAccessCheck),
2079 self, contains_amethod.Get());
2080
2081 ASSERT_FALSE(self->IsExceptionPending());
2082 EXPECT_EQ(static_cast<size_t>(JNI_TRUE), result);
2083
2084 result = Invoke3WithReferrer(static_cast<size_t>(inf_contains.Get()->GetDexMethodIndex()),
2085 reinterpret_cast<size_t>(array_list.Get()),
2086 reinterpret_cast<size_t>(array_list.Get()),
2087 StubTest::GetEntrypoint(self,
2088 kQuickInvokeInterfaceTrampolineWithAccessCheck),
2089 self, contains_amethod.Get());
2090
2091 ASSERT_FALSE(self->IsExceptionPending());
2092 EXPECT_EQ(static_cast<size_t>(JNI_FALSE), result);
Andreas Gampe51f76352014-05-21 08:28:48 -07002093#else
Andreas Gampe6aac3552014-06-09 14:55:53 -07002094 LOG(INFO) << "Skipping imt as I don't know how to do that on " << kRuntimeISA;
Andreas Gampe51f76352014-05-21 08:28:48 -07002095 // Force-print to std::cout so it's also outside the logcat.
Andreas Gampe6aac3552014-06-09 14:55:53 -07002096 std::cout << "Skipping imt as I don't know how to do that on " << kRuntimeISA << std::endl;
2097#endif
2098}
2099
Andreas Gampe6aac3552014-06-09 14:55:53 -07002100TEST_F(StubTest, StringIndexOf) {
2101#if defined(__arm__) || defined(__aarch64__)
Hiroshi Yamauchi52fa8142014-06-16 12:59:49 -07002102 TEST_DISABLED_FOR_HEAP_REFERENCE_POISONING();
2103
Andreas Gampe6aac3552014-06-09 14:55:53 -07002104 Thread* self = Thread::Current();
2105 ScopedObjectAccess soa(self);
2106 // garbage is created during ClassLinker::Init
2107
2108 // Create some strings
2109 // Use array so we can index into it and use a matrix for expected results
2110 // Setup: The first half is standard. The second half uses a non-zero offset.
2111 // TODO: Shared backing arrays.
2112 static constexpr size_t kStringCount = 7;
2113 const char* c_str[kStringCount] = { "", "a", "ba", "cba", "dcba", "edcba", "asdfghjkl" };
2114 static constexpr size_t kCharCount = 5;
2115 const char c_char[kCharCount] = { 'a', 'b', 'c', 'd', 'e' };
2116
2117 StackHandleScope<kStringCount> hs(self);
2118 Handle<mirror::String> s[kStringCount];
2119
2120 for (size_t i = 0; i < kStringCount; ++i) {
2121 s[i] = hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), c_str[i]));
2122 }
2123
2124 // Matrix of expectations. First component is first parameter. Note we only check against the
2125 // sign, not the value. As we are testing random offsets, we need to compute this and need to
2126 // rely on String::CompareTo being correct.
2127 static constexpr size_t kMaxLen = 9;
2128 DCHECK_LE(strlen(c_str[kStringCount-1]), kMaxLen) << "Please fix the indexof test.";
2129
2130 // Last dimension: start, offset by 1.
2131 int32_t expected[kStringCount][kCharCount][kMaxLen + 3];
2132 for (size_t x = 0; x < kStringCount; ++x) {
2133 for (size_t y = 0; y < kCharCount; ++y) {
2134 for (size_t z = 0; z <= kMaxLen + 2; ++z) {
2135 expected[x][y][z] = s[x]->FastIndexOf(c_char[y], static_cast<int32_t>(z) - 1);
2136 }
2137 }
2138 }
2139
2140 // Play with it...
2141
2142 for (size_t x = 0; x < kStringCount; ++x) {
2143 for (size_t y = 0; y < kCharCount; ++y) {
2144 for (size_t z = 0; z <= kMaxLen + 2; ++z) {
2145 int32_t start = static_cast<int32_t>(z) - 1;
2146
2147 // Test string_compareto x y
2148 size_t result = Invoke3(reinterpret_cast<size_t>(s[x].Get()), c_char[y], start,
Andreas Gampe29b38412014-08-13 00:15:43 -07002149 StubTest::GetEntrypoint(self, kQuickIndexOf), self);
Andreas Gampe6aac3552014-06-09 14:55:53 -07002150
2151 EXPECT_FALSE(self->IsExceptionPending());
2152
2153 // The result is a 32b signed integer
2154 union {
2155 size_t r;
2156 int32_t i;
2157 } conv;
2158 conv.r = result;
2159
2160 EXPECT_EQ(expected[x][y][z], conv.i) << "Wrong result for " << c_str[x] << " / " <<
2161 c_char[y] << " @ " << start;
2162 }
2163 }
2164 }
2165
2166 // TODO: Deallocate things.
2167
2168 // Tests done.
2169#else
2170 LOG(INFO) << "Skipping indexof as I don't know how to do that on " << kRuntimeISA;
2171 // Force-print to std::cout so it's also outside the logcat.
2172 std::cout << "Skipping indexof as I don't know how to do that on " << kRuntimeISA << std::endl;
Andreas Gampe51f76352014-05-21 08:28:48 -07002173#endif
2174}
2175
Andreas Gampe525cde22014-04-22 15:44:50 -07002176} // namespace art