blob: 0b7f268c5e99e6dee1a8bd660517fc97e7cd3140 [file] [log] [blame]
Andreas Gampe525cde22014-04-22 15:44:50 -07001/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "common_runtime_test.h"
Andreas Gampe6e4e59c2014-05-05 20:11:02 -070018#include "mirror/art_field-inl.h"
Andreas Gampe51f76352014-05-21 08:28:48 -070019#include "mirror/art_method-inl.h"
20#include "mirror/class-inl.h"
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -070021#include "mirror/string-inl.h"
Andreas Gampe525cde22014-04-22 15:44:50 -070022
23#include <cstdio>
24
25namespace art {
26
27
28class StubTest : public CommonRuntimeTest {
29 protected:
30 // We need callee-save methods set up in the Runtime for exceptions.
31 void SetUp() OVERRIDE {
32 // Do the normal setup.
33 CommonRuntimeTest::SetUp();
34
35 {
36 // Create callee-save methods
37 ScopedObjectAccess soa(Thread::Current());
Vladimir Marko7624d252014-05-02 14:40:15 +010038 runtime_->SetInstructionSet(kRuntimeISA);
Andreas Gampe525cde22014-04-22 15:44:50 -070039 for (int i = 0; i < Runtime::kLastCalleeSaveType; i++) {
40 Runtime::CalleeSaveType type = Runtime::CalleeSaveType(i);
41 if (!runtime_->HasCalleeSaveMethod(type)) {
Vladimir Marko7624d252014-05-02 14:40:15 +010042 runtime_->SetCalleeSaveMethod(runtime_->CreateCalleeSaveMethod(type), type);
Andreas Gampe525cde22014-04-22 15:44:50 -070043 }
44 }
45 }
46 }
47
Andreas Gampe00c1e6d2014-04-25 15:47:13 -070048 void SetUpRuntimeOptions(Runtime::Options *options) OVERRIDE {
49 // Use a smaller heap
50 for (std::pair<std::string, const void*>& pair : *options) {
51 if (pair.first.find("-Xmx") == 0) {
52 pair.first = "-Xmx4M"; // Smallest we can go.
53 }
54 }
Andreas Gampe51f76352014-05-21 08:28:48 -070055 options->push_back(std::make_pair("-Xint", nullptr));
Andreas Gampe00c1e6d2014-04-25 15:47:13 -070056 }
Andreas Gampe525cde22014-04-22 15:44:50 -070057
Mathieu Chartier119c6bd2014-05-09 14:11:47 -070058 // Helper function needed since TEST_F makes a new class.
59 Thread::tls_ptr_sized_values* GetTlsPtr(Thread* self) {
60 return &self->tlsPtr_;
61 }
62
Andreas Gampe4fc046e2014-05-06 16:56:39 -070063 public:
Andreas Gampe525cde22014-04-22 15:44:50 -070064 size_t Invoke3(size_t arg0, size_t arg1, size_t arg2, uintptr_t code, Thread* self) {
Andreas Gampe6cf80102014-05-19 11:32:41 -070065 return Invoke3WithReferrer(arg0, arg1, arg2, code, self, nullptr);
Andreas Gampe525cde22014-04-22 15:44:50 -070066 }
Andreas Gampe6e4e59c2014-05-05 20:11:02 -070067
Andreas Gampe6e4e59c2014-05-05 20:11:02 -070068 // TODO: Set up a frame according to referrer's specs.
69 size_t Invoke3WithReferrer(size_t arg0, size_t arg1, size_t arg2, uintptr_t code, Thread* self,
70 mirror::ArtMethod* referrer) {
71 // Push a transition back into managed code onto the linked list in thread.
72 ManagedStack fragment;
73 self->PushManagedStackFragment(&fragment);
74
75 size_t result;
Andreas Gampe6cf80102014-05-19 11:32:41 -070076 size_t fpr_result = 0;
Andreas Gampe6e4e59c2014-05-05 20:11:02 -070077#if defined(__i386__)
78 // TODO: Set the thread?
79 __asm__ __volatile__(
80 "pushl %[referrer]\n\t" // Store referrer
81 "call *%%edi\n\t" // Call the stub
82 "addl $4, %%esp" // Pop referrer
83 : "=a" (result)
84 // Use the result from eax
85 : "a"(arg0), "c"(arg1), "d"(arg2), "D"(code), [referrer]"r"(referrer)
86 // This places code into edi, arg0 into eax, arg1 into ecx, and arg2 into edx
87 : ); // clobber.
88 // TODO: Should we clobber the other registers? EBX gets clobbered by some of the stubs,
89 // but compilation fails when declaring that.
90#elif defined(__arm__)
91 __asm__ __volatile__(
92 "push {r1-r12, lr}\n\t" // Save state, 13*4B = 52B
93 ".cfi_adjust_cfa_offset 52\n\t"
94 "push {r9}\n\t"
95 ".cfi_adjust_cfa_offset 4\n\t"
96 "mov r9, %[referrer]\n\n"
97 "str r9, [sp, #-8]!\n\t" // Push referrer, +8B padding so 16B aligned
98 ".cfi_adjust_cfa_offset 8\n\t"
99 "ldr r9, [sp, #8]\n\t"
100
101 // Push everything on the stack, so we don't rely on the order. What a mess. :-(
102 "sub sp, sp, #20\n\t"
103 "str %[arg0], [sp]\n\t"
104 "str %[arg1], [sp, #4]\n\t"
105 "str %[arg2], [sp, #8]\n\t"
106 "str %[code], [sp, #12]\n\t"
107 "str %[self], [sp, #16]\n\t"
108 "ldr r0, [sp]\n\t"
109 "ldr r1, [sp, #4]\n\t"
110 "ldr r2, [sp, #8]\n\t"
111 "ldr r3, [sp, #12]\n\t"
112 "ldr r9, [sp, #16]\n\t"
113 "add sp, sp, #20\n\t"
114
115 "blx r3\n\t" // Call the stub
116 "add sp, sp, #12\n\t" // Pop nullptr and padding
117 ".cfi_adjust_cfa_offset -12\n\t"
118 "pop {r1-r12, lr}\n\t" // Restore state
119 ".cfi_adjust_cfa_offset -52\n\t"
120 "mov %[result], r0\n\t" // Save the result
121 : [result] "=r" (result)
122 // Use the result from r0
123 : [arg0] "r"(arg0), [arg1] "r"(arg1), [arg2] "r"(arg2), [code] "r"(code), [self] "r"(self),
124 [referrer] "r"(referrer)
125 : ); // clobber.
126#elif defined(__aarch64__)
127 __asm__ __volatile__(
Andreas Gampef39b3782014-06-03 14:38:30 -0700128 // Spill x0-x7 which we say we don't clobber. May contain args.
Andreas Gampe6cf80102014-05-19 11:32:41 -0700129 "sub sp, sp, #64\n\t"
130 ".cfi_adjust_cfa_offset 64\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700131 "stp x0, x1, [sp]\n\t"
132 "stp x2, x3, [sp, #16]\n\t"
133 "stp x4, x5, [sp, #32]\n\t"
134 "stp x6, x7, [sp, #48]\n\t"
Andreas Gampe6cf80102014-05-19 11:32:41 -0700135
Andreas Gampef39b3782014-06-03 14:38:30 -0700136 "sub sp, sp, #16\n\t" // Reserve stack space, 16B aligned
137 ".cfi_adjust_cfa_offset 16\n\t"
138 "str %[referrer], [sp]\n\t" // referrer
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700139
140 // Push everything on the stack, so we don't rely on the order. What a mess. :-(
141 "sub sp, sp, #48\n\t"
Andreas Gampe6cf80102014-05-19 11:32:41 -0700142 ".cfi_adjust_cfa_offset 48\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700143 // All things are "r" constraints, so direct str/stp should work.
144 "stp %[arg0], %[arg1], [sp]\n\t"
145 "stp %[arg2], %[code], [sp, #16]\n\t"
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700146 "str %[self], [sp, #32]\n\t"
Andreas Gampe6cf80102014-05-19 11:32:41 -0700147
148 // Now we definitely have x0-x3 free, use it to garble d8 - d15
149 "movk x0, #0xfad0\n\t"
150 "movk x0, #0xebad, lsl #16\n\t"
151 "movk x0, #0xfad0, lsl #32\n\t"
152 "movk x0, #0xebad, lsl #48\n\t"
153 "fmov d8, x0\n\t"
154 "add x0, x0, 1\n\t"
155 "fmov d9, x0\n\t"
156 "add x0, x0, 1\n\t"
157 "fmov d10, x0\n\t"
158 "add x0, x0, 1\n\t"
159 "fmov d11, x0\n\t"
160 "add x0, x0, 1\n\t"
161 "fmov d12, x0\n\t"
162 "add x0, x0, 1\n\t"
163 "fmov d13, x0\n\t"
164 "add x0, x0, 1\n\t"
165 "fmov d14, x0\n\t"
166 "add x0, x0, 1\n\t"
167 "fmov d15, x0\n\t"
168
Andreas Gampef39b3782014-06-03 14:38:30 -0700169 // Load call params into the right registers.
170 "ldp x0, x1, [sp]\n\t"
171 "ldp x2, x3, [sp, #16]\n\t"
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700172 "ldr x18, [sp, #32]\n\t"
173 "add sp, sp, #48\n\t"
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700174 ".cfi_adjust_cfa_offset -48\n\t"
175
Andreas Gampe6cf80102014-05-19 11:32:41 -0700176
177 "blr x3\n\t" // Call the stub
Andreas Gampef39b3782014-06-03 14:38:30 -0700178 "mov x8, x0\n\t" // Store result
179 "add sp, sp, #16\n\t" // Drop the quick "frame"
180 ".cfi_adjust_cfa_offset -16\n\t"
Andreas Gampe6cf80102014-05-19 11:32:41 -0700181
182 // Test d8 - d15. We can use x1 and x2.
183 "movk x1, #0xfad0\n\t"
184 "movk x1, #0xebad, lsl #16\n\t"
185 "movk x1, #0xfad0, lsl #32\n\t"
186 "movk x1, #0xebad, lsl #48\n\t"
187 "fmov x2, d8\n\t"
188 "cmp x1, x2\n\t"
189 "b.ne 1f\n\t"
190 "add x1, x1, 1\n\t"
191
192 "fmov x2, d9\n\t"
193 "cmp x1, x2\n\t"
194 "b.ne 1f\n\t"
195 "add x1, x1, 1\n\t"
196
197 "fmov x2, d10\n\t"
198 "cmp x1, x2\n\t"
199 "b.ne 1f\n\t"
200 "add x1, x1, 1\n\t"
201
202 "fmov x2, d11\n\t"
203 "cmp x1, x2\n\t"
204 "b.ne 1f\n\t"
205 "add x1, x1, 1\n\t"
206
207 "fmov x2, d12\n\t"
208 "cmp x1, x2\n\t"
209 "b.ne 1f\n\t"
210 "add x1, x1, 1\n\t"
211
212 "fmov x2, d13\n\t"
213 "cmp x1, x2\n\t"
214 "b.ne 1f\n\t"
215 "add x1, x1, 1\n\t"
216
217 "fmov x2, d14\n\t"
218 "cmp x1, x2\n\t"
219 "b.ne 1f\n\t"
220 "add x1, x1, 1\n\t"
221
222 "fmov x2, d15\n\t"
223 "cmp x1, x2\n\t"
224 "b.ne 1f\n\t"
225
Andreas Gampef39b3782014-06-03 14:38:30 -0700226 "mov x9, #0\n\t" // Use x9 as flag, in clobber list
Andreas Gampe6cf80102014-05-19 11:32:41 -0700227
228 // Finish up.
229 "2:\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700230 "ldp x0, x1, [sp]\n\t" // Restore stuff not named clobbered, may contain fpr_result
231 "ldp x2, x3, [sp, #16]\n\t"
232 "ldp x4, x5, [sp, #32]\n\t"
233 "ldp x6, x7, [sp, #48]\n\t"
234 "add sp, sp, #64\n\t" // Free stack space, now sp as on entry
Andreas Gampe6cf80102014-05-19 11:32:41 -0700235 ".cfi_adjust_cfa_offset -64\n\t"
236
Andreas Gampef39b3782014-06-03 14:38:30 -0700237 "str x9, %[fpr_result]\n\t" // Store the FPR comparison result
238 "mov %[result], x8\n\t" // Store the call result
239
Andreas Gampe6cf80102014-05-19 11:32:41 -0700240 "b 3f\n\t" // Goto end
241
242 // Failed fpr verification.
243 "1:\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700244 "mov x9, #1\n\t"
Andreas Gampe6cf80102014-05-19 11:32:41 -0700245 "b 2b\n\t" // Goto finish-up
246
247 // End
248 "3:\n\t"
Andreas Gampecf4035a2014-05-28 22:43:01 -0700249 : [result] "=r" (result)
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700250 // Use the result from r0
251 : [arg0] "0"(arg0), [arg1] "r"(arg1), [arg2] "r"(arg2), [code] "r"(code), [self] "r"(self),
Andreas Gampecf4035a2014-05-28 22:43:01 -0700252 [referrer] "r"(referrer), [fpr_result] "m" (fpr_result)
Andreas Gampef39b3782014-06-03 14:38:30 -0700253 : "x8", "x9", "x10", "x11", "x12", "x13", "x14", "x15", "x16", "x17", "x18", "x19", "x20",
254 "x21", "x22", "x23", "x24", "x25", "x26", "x27", "x28", "x30",
255 "d0", "d1", "d2", "d3", "d4", "d5", "d6", "d7",
256 "d8", "d9", "d10", "d11", "d12", "d13", "d14", "d15",
257 "d16", "d17", "d18", "d19", "d20", "d21", "d22", "d23",
258 "d24", "d25", "d26", "d27", "d28", "d29", "d30", "d31"); // clobber.
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700259#elif defined(__x86_64__)
260 // Note: Uses the native convention
261 // TODO: Set the thread?
262 __asm__ __volatile__(
263 "pushq %[referrer]\n\t" // Push referrer
264 "pushq (%%rsp)\n\t" // & 16B alignment padding
265 ".cfi_adjust_cfa_offset 16\n\t"
266 "call *%%rax\n\t" // Call the stub
267 "addq $16, %%rsp\n\t" // Pop nullptr and padding
268 ".cfi_adjust_cfa_offset -16\n\t"
269 : "=a" (result)
270 // Use the result from rax
271 : "D"(arg0), "S"(arg1), "d"(arg2), "a"(code), [referrer] "m"(referrer)
272 // This places arg0 into rdi, arg1 into rsi, arg2 into rdx, and code into rax
273 : "rbx", "rcx", "rbp", "r8", "r9", "r10", "r11", "r12", "r13", "r14", "r15"); // clobber all
274 // TODO: Should we clobber the other registers?
275#else
276 LOG(WARNING) << "Was asked to invoke for an architecture I do not understand.";
277 result = 0;
278#endif
279 // Pop transition.
280 self->PopManagedStackFragment(fragment);
Andreas Gampe6cf80102014-05-19 11:32:41 -0700281
282 fp_result = fpr_result;
283 EXPECT_EQ(0U, fp_result);
284
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700285 return result;
286 }
287
Andreas Gampe51f76352014-05-21 08:28:48 -0700288 // TODO: Set up a frame according to referrer's specs.
289 size_t Invoke3WithReferrerAndHidden(size_t arg0, size_t arg1, size_t arg2, uintptr_t code,
290 Thread* self, mirror::ArtMethod* referrer, size_t hidden) {
291 // Push a transition back into managed code onto the linked list in thread.
292 ManagedStack fragment;
293 self->PushManagedStackFragment(&fragment);
294
295 size_t result;
296 size_t fpr_result = 0;
297#if defined(__i386__)
298 // TODO: Set the thread?
299 __asm__ __volatile__(
300 "movd %[hidden], %%xmm0\n\t"
301 "pushl %[referrer]\n\t" // Store referrer
302 "call *%%edi\n\t" // Call the stub
303 "addl $4, %%esp" // Pop referrer
304 : "=a" (result)
305 // Use the result from eax
Andreas Gampe3ecbbfc2014-05-21 14:39:45 -0700306 : "a"(arg0), "c"(arg1), "d"(arg2), "D"(code), [referrer]"m"(referrer), [hidden]"r"(hidden)
Andreas Gampe51f76352014-05-21 08:28:48 -0700307 // This places code into edi, arg0 into eax, arg1 into ecx, and arg2 into edx
308 : ); // clobber.
309 // TODO: Should we clobber the other registers? EBX gets clobbered by some of the stubs,
310 // but compilation fails when declaring that.
311#elif defined(__arm__)
312 __asm__ __volatile__(
313 "push {r1-r12, lr}\n\t" // Save state, 13*4B = 52B
314 ".cfi_adjust_cfa_offset 52\n\t"
315 "push {r9}\n\t"
316 ".cfi_adjust_cfa_offset 4\n\t"
317 "mov r9, %[referrer]\n\n"
318 "str r9, [sp, #-8]!\n\t" // Push referrer, +8B padding so 16B aligned
319 ".cfi_adjust_cfa_offset 8\n\t"
320 "ldr r9, [sp, #8]\n\t"
321
322 // Push everything on the stack, so we don't rely on the order. What a mess. :-(
323 "sub sp, sp, #24\n\t"
324 "str %[arg0], [sp]\n\t"
325 "str %[arg1], [sp, #4]\n\t"
326 "str %[arg2], [sp, #8]\n\t"
327 "str %[code], [sp, #12]\n\t"
328 "str %[self], [sp, #16]\n\t"
329 "str %[hidden], [sp, #20]\n\t"
330 "ldr r0, [sp]\n\t"
331 "ldr r1, [sp, #4]\n\t"
332 "ldr r2, [sp, #8]\n\t"
333 "ldr r3, [sp, #12]\n\t"
334 "ldr r9, [sp, #16]\n\t"
335 "ldr r12, [sp, #20]\n\t"
336 "add sp, sp, #24\n\t"
337
338 "blx r3\n\t" // Call the stub
339 "add sp, sp, #12\n\t" // Pop nullptr and padding
340 ".cfi_adjust_cfa_offset -12\n\t"
341 "pop {r1-r12, lr}\n\t" // Restore state
342 ".cfi_adjust_cfa_offset -52\n\t"
343 "mov %[result], r0\n\t" // Save the result
344 : [result] "=r" (result)
345 // Use the result from r0
346 : [arg0] "r"(arg0), [arg1] "r"(arg1), [arg2] "r"(arg2), [code] "r"(code), [self] "r"(self),
347 [referrer] "r"(referrer), [hidden] "r"(hidden)
348 : ); // clobber.
349#elif defined(__aarch64__)
350 __asm__ __volatile__(
Andreas Gampef39b3782014-06-03 14:38:30 -0700351 // Spill x0-x7 which we say we don't clobber. May contain args.
Andreas Gampe51f76352014-05-21 08:28:48 -0700352 "sub sp, sp, #64\n\t"
353 ".cfi_adjust_cfa_offset 64\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700354 "stp x0, x1, [sp]\n\t"
355 "stp x2, x3, [sp, #16]\n\t"
356 "stp x4, x5, [sp, #32]\n\t"
357 "stp x6, x7, [sp, #48]\n\t"
Andreas Gampe51f76352014-05-21 08:28:48 -0700358
Andreas Gampef39b3782014-06-03 14:38:30 -0700359 "sub sp, sp, #16\n\t" // Reserve stack space, 16B aligned
360 ".cfi_adjust_cfa_offset 16\n\t"
361 "str %[referrer], [sp]\n\t" // referrer
Andreas Gampe51f76352014-05-21 08:28:48 -0700362
363 // Push everything on the stack, so we don't rely on the order. What a mess. :-(
364 "sub sp, sp, #48\n\t"
365 ".cfi_adjust_cfa_offset 48\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700366 // All things are "r" constraints, so direct str/stp should work.
367 "stp %[arg0], %[arg1], [sp]\n\t"
368 "stp %[arg2], %[code], [sp, #16]\n\t"
369 "stp %[self], %[hidden], [sp, #32]\n\t"
Andreas Gampe51f76352014-05-21 08:28:48 -0700370
371 // Now we definitely have x0-x3 free, use it to garble d8 - d15
372 "movk x0, #0xfad0\n\t"
373 "movk x0, #0xebad, lsl #16\n\t"
374 "movk x0, #0xfad0, lsl #32\n\t"
375 "movk x0, #0xebad, lsl #48\n\t"
376 "fmov d8, x0\n\t"
377 "add x0, x0, 1\n\t"
378 "fmov d9, x0\n\t"
379 "add x0, x0, 1\n\t"
380 "fmov d10, x0\n\t"
381 "add x0, x0, 1\n\t"
382 "fmov d11, x0\n\t"
383 "add x0, x0, 1\n\t"
384 "fmov d12, x0\n\t"
385 "add x0, x0, 1\n\t"
386 "fmov d13, x0\n\t"
387 "add x0, x0, 1\n\t"
388 "fmov d14, x0\n\t"
389 "add x0, x0, 1\n\t"
390 "fmov d15, x0\n\t"
391
Andreas Gampef39b3782014-06-03 14:38:30 -0700392 // Load call params into the right registers.
393 "ldp x0, x1, [sp]\n\t"
394 "ldp x2, x3, [sp, #16]\n\t"
395 "ldp x18, x12, [sp, #32]\n\t"
Andreas Gampe51f76352014-05-21 08:28:48 -0700396 "add sp, sp, #48\n\t"
397 ".cfi_adjust_cfa_offset -48\n\t"
398
Andreas Gampe51f76352014-05-21 08:28:48 -0700399 "blr x3\n\t" // Call the stub
Andreas Gampef39b3782014-06-03 14:38:30 -0700400 "mov x8, x0\n\t" // Store result
401 "add sp, sp, #16\n\t" // Drop the quick "frame"
402 ".cfi_adjust_cfa_offset -16\n\t"
Andreas Gampe51f76352014-05-21 08:28:48 -0700403
404 // Test d8 - d15. We can use x1 and x2.
405 "movk x1, #0xfad0\n\t"
406 "movk x1, #0xebad, lsl #16\n\t"
407 "movk x1, #0xfad0, lsl #32\n\t"
408 "movk x1, #0xebad, lsl #48\n\t"
409 "fmov x2, d8\n\t"
410 "cmp x1, x2\n\t"
411 "b.ne 1f\n\t"
412 "add x1, x1, 1\n\t"
413
414 "fmov x2, d9\n\t"
415 "cmp x1, x2\n\t"
416 "b.ne 1f\n\t"
417 "add x1, x1, 1\n\t"
418
419 "fmov x2, d10\n\t"
420 "cmp x1, x2\n\t"
421 "b.ne 1f\n\t"
422 "add x1, x1, 1\n\t"
423
424 "fmov x2, d11\n\t"
425 "cmp x1, x2\n\t"
426 "b.ne 1f\n\t"
427 "add x1, x1, 1\n\t"
428
429 "fmov x2, d12\n\t"
430 "cmp x1, x2\n\t"
431 "b.ne 1f\n\t"
432 "add x1, x1, 1\n\t"
433
434 "fmov x2, d13\n\t"
435 "cmp x1, x2\n\t"
436 "b.ne 1f\n\t"
437 "add x1, x1, 1\n\t"
438
439 "fmov x2, d14\n\t"
440 "cmp x1, x2\n\t"
441 "b.ne 1f\n\t"
442 "add x1, x1, 1\n\t"
443
444 "fmov x2, d15\n\t"
445 "cmp x1, x2\n\t"
446 "b.ne 1f\n\t"
447
Andreas Gampef39b3782014-06-03 14:38:30 -0700448 "mov x9, #0\n\t" // Use x9 as flag, in clobber list
Andreas Gampe51f76352014-05-21 08:28:48 -0700449
450 // Finish up.
451 "2:\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700452 "ldp x0, x1, [sp]\n\t" // Restore stuff not named clobbered, may contain fpr_result
453 "ldp x2, x3, [sp, #16]\n\t"
454 "ldp x4, x5, [sp, #32]\n\t"
455 "ldp x6, x7, [sp, #48]\n\t"
456 "add sp, sp, #64\n\t" // Free stack space, now sp as on entry
Andreas Gampe51f76352014-05-21 08:28:48 -0700457 ".cfi_adjust_cfa_offset -64\n\t"
458
Andreas Gampef39b3782014-06-03 14:38:30 -0700459 "str x9, %[fpr_result]\n\t" // Store the FPR comparison result
460 "mov %[result], x8\n\t" // Store the call result
461
Andreas Gampe51f76352014-05-21 08:28:48 -0700462 "b 3f\n\t" // Goto end
463
464 // Failed fpr verification.
465 "1:\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700466 "mov x9, #1\n\t"
Andreas Gampe51f76352014-05-21 08:28:48 -0700467 "b 2b\n\t" // Goto finish-up
468
469 // End
470 "3:\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700471 : [result] "=r" (result)
472 // Use the result from r0
Andreas Gampe51f76352014-05-21 08:28:48 -0700473 : [arg0] "0"(arg0), [arg1] "r"(arg1), [arg2] "r"(arg2), [code] "r"(code), [self] "r"(self),
Andreas Gampef39b3782014-06-03 14:38:30 -0700474 [referrer] "r"(referrer), [hidden] "r"(hidden), [fpr_result] "m" (fpr_result)
475 : "x8", "x9", "x10", "x11", "x12", "x13", "x14", "x15", "x16", "x17", "x18", "x19", "x20",
476 "x21", "x22", "x23", "x24", "x25", "x26", "x27", "x28", "x30",
477 "d0", "d1", "d2", "d3", "d4", "d5", "d6", "d7",
478 "d8", "d9", "d10", "d11", "d12", "d13", "d14", "d15",
479 "d16", "d17", "d18", "d19", "d20", "d21", "d22", "d23",
480 "d24", "d25", "d26", "d27", "d28", "d29", "d30", "d31"); // clobber.
Andreas Gampe51f76352014-05-21 08:28:48 -0700481#elif defined(__x86_64__)
482 // Note: Uses the native convention
483 // TODO: Set the thread?
484 __asm__ __volatile__(
485 "movq %[hidden], %%r9\n\t" // No need to save r9, listed as clobbered
486 "movd %%r9, %%xmm0\n\t"
487 "pushq %[referrer]\n\t" // Push referrer
488 "pushq (%%rsp)\n\t" // & 16B alignment padding
489 ".cfi_adjust_cfa_offset 16\n\t"
490 "call *%%rax\n\t" // Call the stub
491 "addq $16, %%rsp\n\t" // Pop nullptr and padding
492 ".cfi_adjust_cfa_offset -16\n\t"
493 : "=a" (result)
494 // Use the result from rax
495 : "D"(arg0), "S"(arg1), "d"(arg2), "a"(code), [referrer] "m"(referrer), [hidden] "m"(hidden)
496 // This places arg0 into rdi, arg1 into rsi, arg2 into rdx, and code into rax
497 : "rbx", "rcx", "rbp", "r8", "r9", "r10", "r11", "r12", "r13", "r14", "r15"); // clobber all
498 // TODO: Should we clobber the other registers?
499#else
500 LOG(WARNING) << "Was asked to invoke for an architecture I do not understand.";
501 result = 0;
502#endif
503 // Pop transition.
504 self->PopManagedStackFragment(fragment);
505
506 fp_result = fpr_result;
507 EXPECT_EQ(0U, fp_result);
508
509 return result;
510 }
511
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700512 // Method with 32b arg0, 64b arg1
513 size_t Invoke3UWithReferrer(size_t arg0, uint64_t arg1, uintptr_t code, Thread* self,
514 mirror::ArtMethod* referrer) {
515#if defined(__x86_64__) || defined(__aarch64__)
516 // Just pass through.
517 return Invoke3WithReferrer(arg0, arg1, 0U, code, self, referrer);
518#else
519 // Need to split up arguments.
520 uint32_t lower = static_cast<uint32_t>(arg1 & 0xFFFFFFFF);
521 uint32_t upper = static_cast<uint32_t>((arg1 >> 32) & 0xFFFFFFFF);
522
523 return Invoke3WithReferrer(arg0, lower, upper, code, self, referrer);
524#endif
525 }
526
527 // Method with 32b arg0, 32b arg1, 64b arg2
528 size_t Invoke3UUWithReferrer(uint32_t arg0, uint32_t arg1, uint64_t arg2, uintptr_t code,
529 Thread* self, mirror::ArtMethod* referrer) {
530#if defined(__x86_64__) || defined(__aarch64__)
531 // Just pass through.
532 return Invoke3WithReferrer(arg0, arg1, arg2, code, self, referrer);
533#else
534 // TODO: Needs 4-param invoke.
535 return 0;
536#endif
537 }
Andreas Gampe6cf80102014-05-19 11:32:41 -0700538
539 protected:
540 size_t fp_result;
Andreas Gampe525cde22014-04-22 15:44:50 -0700541};
542
543
544#if defined(__i386__) || defined(__x86_64__)
545extern "C" void art_quick_memcpy(void);
546#endif
547
548TEST_F(StubTest, Memcpy) {
549#if defined(__i386__) || defined(__x86_64__)
550 Thread* self = Thread::Current();
551
552 uint32_t orig[20];
553 uint32_t trg[20];
554 for (size_t i = 0; i < 20; ++i) {
555 orig[i] = i;
556 trg[i] = 0;
557 }
558
559 Invoke3(reinterpret_cast<size_t>(&trg[4]), reinterpret_cast<size_t>(&orig[4]),
560 10 * sizeof(uint32_t), reinterpret_cast<uintptr_t>(&art_quick_memcpy), self);
561
562 EXPECT_EQ(orig[0], trg[0]);
563
564 for (size_t i = 1; i < 4; ++i) {
565 EXPECT_NE(orig[i], trg[i]);
566 }
567
568 for (size_t i = 4; i < 14; ++i) {
569 EXPECT_EQ(orig[i], trg[i]);
570 }
571
572 for (size_t i = 14; i < 20; ++i) {
573 EXPECT_NE(orig[i], trg[i]);
574 }
575
576 // TODO: Test overlapping?
577
578#else
579 LOG(INFO) << "Skipping memcpy as I don't know how to do that on " << kRuntimeISA;
580 // Force-print to std::cout so it's also outside the logcat.
581 std::cout << "Skipping memcpy as I don't know how to do that on " << kRuntimeISA << std::endl;
582#endif
583}
584
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700585#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__x86_64__)
Andreas Gampe525cde22014-04-22 15:44:50 -0700586extern "C" void art_quick_lock_object(void);
587#endif
588
589TEST_F(StubTest, LockObject) {
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700590#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__x86_64__)
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -0700591 static constexpr size_t kThinLockLoops = 100;
592
Andreas Gampe525cde22014-04-22 15:44:50 -0700593 Thread* self = Thread::Current();
594 // Create an object
595 ScopedObjectAccess soa(self);
596 // garbage is created during ClassLinker::Init
597
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700598 StackHandleScope<2> hs(soa.Self());
599 Handle<mirror::String> obj(
600 hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), "hello, world!")));
Andreas Gampe525cde22014-04-22 15:44:50 -0700601 LockWord lock = obj->GetLockWord(false);
602 LockWord::LockState old_state = lock.GetState();
603 EXPECT_EQ(LockWord::LockState::kUnlocked, old_state);
604
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700605 Invoke3(reinterpret_cast<size_t>(obj.Get()), 0U, 0U,
Andreas Gampe525cde22014-04-22 15:44:50 -0700606 reinterpret_cast<uintptr_t>(&art_quick_lock_object), self);
607
608 LockWord lock_after = obj->GetLockWord(false);
609 LockWord::LockState new_state = lock_after.GetState();
610 EXPECT_EQ(LockWord::LockState::kThinLocked, new_state);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700611 EXPECT_EQ(lock_after.ThinLockCount(), 0U); // Thin lock starts count at zero
612
613 for (size_t i = 1; i < kThinLockLoops; ++i) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700614 Invoke3(reinterpret_cast<size_t>(obj.Get()), 0U, 0U,
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700615 reinterpret_cast<uintptr_t>(&art_quick_lock_object), self);
616
617 // Check we're at lock count i
618
619 LockWord l_inc = obj->GetLockWord(false);
620 LockWord::LockState l_inc_state = l_inc.GetState();
621 EXPECT_EQ(LockWord::LockState::kThinLocked, l_inc_state);
622 EXPECT_EQ(l_inc.ThinLockCount(), i);
623 }
624
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700625 // Force a fat lock by running identity hashcode to fill up lock word.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700626 Handle<mirror::String> obj2(hs.NewHandle(
627 mirror::String::AllocFromModifiedUtf8(soa.Self(), "hello, world!")));
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700628
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700629 obj2->IdentityHashCode();
630
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700631 Invoke3(reinterpret_cast<size_t>(obj2.Get()), 0U, 0U,
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700632 reinterpret_cast<uintptr_t>(&art_quick_lock_object), self);
633
634 LockWord lock_after2 = obj2->GetLockWord(false);
635 LockWord::LockState new_state2 = lock_after2.GetState();
636 EXPECT_EQ(LockWord::LockState::kFatLocked, new_state2);
637 EXPECT_NE(lock_after2.FatLockMonitor(), static_cast<Monitor*>(nullptr));
638
639 // Test done.
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700640#else
641 LOG(INFO) << "Skipping lock_object as I don't know how to do that on " << kRuntimeISA;
642 // Force-print to std::cout so it's also outside the logcat.
643 std::cout << "Skipping lock_object as I don't know how to do that on " << kRuntimeISA << std::endl;
644#endif
645}
646
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700647
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700648class RandGen {
649 public:
650 explicit RandGen(uint32_t seed) : val_(seed) {}
651
652 uint32_t next() {
653 val_ = val_ * 48271 % 2147483647 + 13;
654 return val_;
655 }
656
657 uint32_t val_;
658};
659
660
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700661#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__x86_64__)
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700662extern "C" void art_quick_lock_object(void);
663extern "C" void art_quick_unlock_object(void);
664#endif
665
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700666// NO_THREAD_SAFETY_ANALYSIS as we do not want to grab exclusive mutator lock for MonitorInfo.
667static void TestUnlockObject(StubTest* test) NO_THREAD_SAFETY_ANALYSIS {
668#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__x86_64__)
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -0700669 static constexpr size_t kThinLockLoops = 100;
670
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700671 Thread* self = Thread::Current();
672 // Create an object
673 ScopedObjectAccess soa(self);
674 // garbage is created during ClassLinker::Init
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700675 static constexpr size_t kNumberOfLocks = 10; // Number of objects = lock
676 StackHandleScope<kNumberOfLocks + 1> hs(self);
677 Handle<mirror::String> obj(
678 hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), "hello, world!")));
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700679 LockWord lock = obj->GetLockWord(false);
680 LockWord::LockState old_state = lock.GetState();
681 EXPECT_EQ(LockWord::LockState::kUnlocked, old_state);
682
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700683 test->Invoke3(reinterpret_cast<size_t>(obj.Get()), 0U, 0U,
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700684 reinterpret_cast<uintptr_t>(&art_quick_unlock_object), self);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700685 // This should be an illegal monitor state.
686 EXPECT_TRUE(self->IsExceptionPending());
687 self->ClearException();
688
689 LockWord lock_after = obj->GetLockWord(false);
690 LockWord::LockState new_state = lock_after.GetState();
691 EXPECT_EQ(LockWord::LockState::kUnlocked, new_state);
Andreas Gampe525cde22014-04-22 15:44:50 -0700692
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700693 test->Invoke3(reinterpret_cast<size_t>(obj.Get()), 0U, 0U,
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700694 reinterpret_cast<uintptr_t>(&art_quick_lock_object), self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700695
696 LockWord lock_after2 = obj->GetLockWord(false);
697 LockWord::LockState new_state2 = lock_after2.GetState();
698 EXPECT_EQ(LockWord::LockState::kThinLocked, new_state2);
699
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700700 test->Invoke3(reinterpret_cast<size_t>(obj.Get()), 0U, 0U,
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700701 reinterpret_cast<uintptr_t>(&art_quick_unlock_object), self);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700702
703 LockWord lock_after3 = obj->GetLockWord(false);
704 LockWord::LockState new_state3 = lock_after3.GetState();
705 EXPECT_EQ(LockWord::LockState::kUnlocked, new_state3);
706
707 // Stress test:
708 // Keep a number of objects and their locks in flight. Randomly lock or unlock one of them in
709 // each step.
710
711 RandGen r(0x1234);
712
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700713 constexpr size_t kIterations = 10000; // Number of iterations
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700714 constexpr size_t kMoveToFat = 1000; // Chance of 1:kMoveFat to make a lock fat.
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700715
716 size_t counts[kNumberOfLocks];
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700717 bool fat[kNumberOfLocks]; // Whether a lock should be thin or fat.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700718 Handle<mirror::String> objects[kNumberOfLocks];
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700719
720 // Initialize = allocate.
721 for (size_t i = 0; i < kNumberOfLocks; ++i) {
722 counts[i] = 0;
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700723 fat[i] = false;
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700724 objects[i] = hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), ""));
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700725 }
726
727 for (size_t i = 0; i < kIterations; ++i) {
728 // Select which lock to update.
729 size_t index = r.next() % kNumberOfLocks;
730
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700731 // Make lock fat?
732 if (!fat[index] && (r.next() % kMoveToFat == 0)) {
733 fat[index] = true;
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700734 objects[index]->IdentityHashCode();
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700735
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700736 LockWord lock_iter = objects[index]->GetLockWord(false);
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700737 LockWord::LockState iter_state = lock_iter.GetState();
738 if (counts[index] == 0) {
739 EXPECT_EQ(LockWord::LockState::kHashCode, iter_state);
740 } else {
741 EXPECT_EQ(LockWord::LockState::kFatLocked, iter_state);
742 }
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700743 } else {
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700744 bool lock; // Whether to lock or unlock in this step.
745 if (counts[index] == 0) {
746 lock = true;
747 } else if (counts[index] == kThinLockLoops) {
748 lock = false;
749 } else {
750 // Randomly.
751 lock = r.next() % 2 == 0;
752 }
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700753
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700754 if (lock) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700755 test->Invoke3(reinterpret_cast<size_t>(objects[index].Get()), 0U, 0U,
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700756 reinterpret_cast<uintptr_t>(&art_quick_lock_object), self);
757 counts[index]++;
758 } else {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700759 test->Invoke3(reinterpret_cast<size_t>(objects[index].Get()), 0U, 0U,
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700760 reinterpret_cast<uintptr_t>(&art_quick_unlock_object), self);
761 counts[index]--;
762 }
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700763
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700764 EXPECT_FALSE(self->IsExceptionPending());
765
766 // Check the new state.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700767 LockWord lock_iter = objects[index]->GetLockWord(true);
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700768 LockWord::LockState iter_state = lock_iter.GetState();
769 if (fat[index]) {
770 // Abuse MonitorInfo.
771 EXPECT_EQ(LockWord::LockState::kFatLocked, iter_state) << index;
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700772 MonitorInfo info(objects[index].Get());
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700773 EXPECT_EQ(counts[index], info.entry_count_) << index;
774 } else {
775 if (counts[index] > 0) {
776 EXPECT_EQ(LockWord::LockState::kThinLocked, iter_state);
777 EXPECT_EQ(counts[index] - 1, lock_iter.ThinLockCount());
778 } else {
779 EXPECT_EQ(LockWord::LockState::kUnlocked, iter_state);
780 }
781 }
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700782 }
783 }
784
785 // Unlock the remaining count times and then check it's unlocked. Then deallocate.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700786 // Go reverse order to correctly handle Handles.
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700787 for (size_t i = 0; i < kNumberOfLocks; ++i) {
788 size_t index = kNumberOfLocks - 1 - i;
789 size_t count = counts[index];
790 while (count > 0) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700791 test->Invoke3(reinterpret_cast<size_t>(objects[index].Get()), 0U, 0U,
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700792 reinterpret_cast<uintptr_t>(&art_quick_unlock_object), self);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700793 count--;
794 }
795
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700796 LockWord lock_after4 = objects[index]->GetLockWord(false);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700797 LockWord::LockState new_state4 = lock_after4.GetState();
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700798 EXPECT_TRUE(LockWord::LockState::kUnlocked == new_state4
799 || LockWord::LockState::kFatLocked == new_state4);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700800 }
801
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700802 // Test done.
Andreas Gampe525cde22014-04-22 15:44:50 -0700803#else
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700804 LOG(INFO) << "Skipping unlock_object as I don't know how to do that on " << kRuntimeISA;
Andreas Gampe525cde22014-04-22 15:44:50 -0700805 // Force-print to std::cout so it's also outside the logcat.
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700806 std::cout << "Skipping unlock_object as I don't know how to do that on " << kRuntimeISA << std::endl;
Andreas Gampe525cde22014-04-22 15:44:50 -0700807#endif
808}
809
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700810TEST_F(StubTest, UnlockObject) {
811 TestUnlockObject(this);
812}
Andreas Gampe525cde22014-04-22 15:44:50 -0700813
814#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__x86_64__)
815extern "C" void art_quick_check_cast(void);
816#endif
817
818TEST_F(StubTest, CheckCast) {
819#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__x86_64__)
820 Thread* self = Thread::Current();
821 // Find some classes.
822 ScopedObjectAccess soa(self);
823 // garbage is created during ClassLinker::Init
824
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700825 StackHandleScope<2> hs(soa.Self());
826 Handle<mirror::Class> c(
827 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "[Ljava/lang/Object;")));
828 Handle<mirror::Class> c2(
829 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "[Ljava/lang/String;")));
Andreas Gampe525cde22014-04-22 15:44:50 -0700830
831 EXPECT_FALSE(self->IsExceptionPending());
832
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700833 Invoke3(reinterpret_cast<size_t>(c.Get()), reinterpret_cast<size_t>(c.Get()), 0U,
Andreas Gampe525cde22014-04-22 15:44:50 -0700834 reinterpret_cast<uintptr_t>(&art_quick_check_cast), self);
835
836 EXPECT_FALSE(self->IsExceptionPending());
837
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700838 Invoke3(reinterpret_cast<size_t>(c2.Get()), reinterpret_cast<size_t>(c2.Get()), 0U,
Andreas Gampe525cde22014-04-22 15:44:50 -0700839 reinterpret_cast<uintptr_t>(&art_quick_check_cast), self);
840
841 EXPECT_FALSE(self->IsExceptionPending());
842
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700843 Invoke3(reinterpret_cast<size_t>(c.Get()), reinterpret_cast<size_t>(c2.Get()), 0U,
Andreas Gampe525cde22014-04-22 15:44:50 -0700844 reinterpret_cast<uintptr_t>(&art_quick_check_cast), self);
845
846 EXPECT_FALSE(self->IsExceptionPending());
847
848 // TODO: Make the following work. But that would require correct managed frames.
849
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700850 Invoke3(reinterpret_cast<size_t>(c2.Get()), reinterpret_cast<size_t>(c.Get()), 0U,
Andreas Gampe525cde22014-04-22 15:44:50 -0700851 reinterpret_cast<uintptr_t>(&art_quick_check_cast), self);
852
853 EXPECT_TRUE(self->IsExceptionPending());
854 self->ClearException();
855
856#else
857 LOG(INFO) << "Skipping check_cast as I don't know how to do that on " << kRuntimeISA;
858 // Force-print to std::cout so it's also outside the logcat.
859 std::cout << "Skipping check_cast as I don't know how to do that on " << kRuntimeISA << std::endl;
860#endif
861}
862
863
Andreas Gampef4e910b2014-04-29 16:55:52 -0700864#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__x86_64__)
Andreas Gampe525cde22014-04-22 15:44:50 -0700865extern "C" void art_quick_aput_obj_with_null_and_bound_check(void);
866// Do not check non-checked ones, we'd need handlers and stuff...
867#endif
868
869TEST_F(StubTest, APutObj) {
Hiroshi Yamauchid6881ae2014-04-28 17:21:48 -0700870 TEST_DISABLED_FOR_HEAP_REFERENCE_POISONING();
871
Andreas Gampef4e910b2014-04-29 16:55:52 -0700872#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__x86_64__)
Andreas Gampe525cde22014-04-22 15:44:50 -0700873 Thread* self = Thread::Current();
874 // Create an object
875 ScopedObjectAccess soa(self);
876 // garbage is created during ClassLinker::Init
877
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700878 StackHandleScope<5> hs(soa.Self());
879 Handle<mirror::Class> c(
880 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "Ljava/lang/Object;")));
881 Handle<mirror::Class> ca(
882 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "[Ljava/lang/String;")));
Andreas Gampe525cde22014-04-22 15:44:50 -0700883
884 // Build a string array of size 1
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700885 Handle<mirror::ObjectArray<mirror::Object>> array(
886 hs.NewHandle(mirror::ObjectArray<mirror::Object>::Alloc(soa.Self(), ca.Get(), 10)));
Andreas Gampe525cde22014-04-22 15:44:50 -0700887
888 // Build a string -> should be assignable
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700889 Handle<mirror::String> str_obj(
890 hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), "hello, world!")));
Andreas Gampe525cde22014-04-22 15:44:50 -0700891
892 // Build a generic object -> should fail assigning
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700893 Handle<mirror::Object> obj_obj(hs.NewHandle(c->AllocObject(soa.Self())));
Andreas Gampe525cde22014-04-22 15:44:50 -0700894
895 // Play with it...
896
897 // 1) Success cases
Andreas Gampef4e910b2014-04-29 16:55:52 -0700898 // 1.1) Assign str_obj to array[0..3]
Andreas Gampe525cde22014-04-22 15:44:50 -0700899
900 EXPECT_FALSE(self->IsExceptionPending());
901
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700902 Invoke3(reinterpret_cast<size_t>(array.Get()), 0U, reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampe525cde22014-04-22 15:44:50 -0700903 reinterpret_cast<uintptr_t>(&art_quick_aput_obj_with_null_and_bound_check), self);
904
905 EXPECT_FALSE(self->IsExceptionPending());
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700906 EXPECT_EQ(str_obj.Get(), array->Get(0));
Andreas Gampe525cde22014-04-22 15:44:50 -0700907
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700908 Invoke3(reinterpret_cast<size_t>(array.Get()), 1U, reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampef4e910b2014-04-29 16:55:52 -0700909 reinterpret_cast<uintptr_t>(&art_quick_aput_obj_with_null_and_bound_check), self);
910
911 EXPECT_FALSE(self->IsExceptionPending());
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700912 EXPECT_EQ(str_obj.Get(), array->Get(1));
Andreas Gampef4e910b2014-04-29 16:55:52 -0700913
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700914 Invoke3(reinterpret_cast<size_t>(array.Get()), 2U, reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampef4e910b2014-04-29 16:55:52 -0700915 reinterpret_cast<uintptr_t>(&art_quick_aput_obj_with_null_and_bound_check), self);
916
917 EXPECT_FALSE(self->IsExceptionPending());
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700918 EXPECT_EQ(str_obj.Get(), array->Get(2));
Andreas Gampef4e910b2014-04-29 16:55:52 -0700919
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700920 Invoke3(reinterpret_cast<size_t>(array.Get()), 3U, reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampef4e910b2014-04-29 16:55:52 -0700921 reinterpret_cast<uintptr_t>(&art_quick_aput_obj_with_null_and_bound_check), self);
922
923 EXPECT_FALSE(self->IsExceptionPending());
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700924 EXPECT_EQ(str_obj.Get(), array->Get(3));
Andreas Gampef4e910b2014-04-29 16:55:52 -0700925
926 // 1.2) Assign null to array[0..3]
Andreas Gampe525cde22014-04-22 15:44:50 -0700927
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700928 Invoke3(reinterpret_cast<size_t>(array.Get()), 0U, reinterpret_cast<size_t>(nullptr),
Andreas Gampe525cde22014-04-22 15:44:50 -0700929 reinterpret_cast<uintptr_t>(&art_quick_aput_obj_with_null_and_bound_check), self);
930
931 EXPECT_FALSE(self->IsExceptionPending());
Andreas Gampef4e910b2014-04-29 16:55:52 -0700932 EXPECT_EQ(nullptr, array->Get(0));
933
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700934 Invoke3(reinterpret_cast<size_t>(array.Get()), 1U, reinterpret_cast<size_t>(nullptr),
Andreas Gampef4e910b2014-04-29 16:55:52 -0700935 reinterpret_cast<uintptr_t>(&art_quick_aput_obj_with_null_and_bound_check), self);
936
937 EXPECT_FALSE(self->IsExceptionPending());
938 EXPECT_EQ(nullptr, array->Get(1));
939
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700940 Invoke3(reinterpret_cast<size_t>(array.Get()), 2U, reinterpret_cast<size_t>(nullptr),
Andreas Gampef4e910b2014-04-29 16:55:52 -0700941 reinterpret_cast<uintptr_t>(&art_quick_aput_obj_with_null_and_bound_check), self);
942
943 EXPECT_FALSE(self->IsExceptionPending());
944 EXPECT_EQ(nullptr, array->Get(2));
945
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700946 Invoke3(reinterpret_cast<size_t>(array.Get()), 3U, reinterpret_cast<size_t>(nullptr),
Andreas Gampef4e910b2014-04-29 16:55:52 -0700947 reinterpret_cast<uintptr_t>(&art_quick_aput_obj_with_null_and_bound_check), self);
948
949 EXPECT_FALSE(self->IsExceptionPending());
950 EXPECT_EQ(nullptr, array->Get(3));
Andreas Gampe525cde22014-04-22 15:44:50 -0700951
952 // TODO: Check _which_ exception is thrown. Then make 3) check that it's the right check order.
953
954 // 2) Failure cases (str into str[])
955 // 2.1) Array = null
956 // TODO: Throwing NPE needs actual DEX code
957
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700958// Invoke3(reinterpret_cast<size_t>(nullptr), 0U, reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampe525cde22014-04-22 15:44:50 -0700959// reinterpret_cast<uintptr_t>(&art_quick_aput_obj_with_null_and_bound_check), self);
960//
961// EXPECT_TRUE(self->IsExceptionPending());
962// self->ClearException();
963
964 // 2.2) Index < 0
965
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700966 Invoke3(reinterpret_cast<size_t>(array.Get()), static_cast<size_t>(-1),
967 reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampe525cde22014-04-22 15:44:50 -0700968 reinterpret_cast<uintptr_t>(&art_quick_aput_obj_with_null_and_bound_check), self);
969
970 EXPECT_TRUE(self->IsExceptionPending());
971 self->ClearException();
972
973 // 2.3) Index > 0
974
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700975 Invoke3(reinterpret_cast<size_t>(array.Get()), 10U, reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampe525cde22014-04-22 15:44:50 -0700976 reinterpret_cast<uintptr_t>(&art_quick_aput_obj_with_null_and_bound_check), self);
977
978 EXPECT_TRUE(self->IsExceptionPending());
979 self->ClearException();
980
981 // 3) Failure cases (obj into str[])
982
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700983 Invoke3(reinterpret_cast<size_t>(array.Get()), 0U, reinterpret_cast<size_t>(obj_obj.Get()),
Andreas Gampe525cde22014-04-22 15:44:50 -0700984 reinterpret_cast<uintptr_t>(&art_quick_aput_obj_with_null_and_bound_check), self);
985
986 EXPECT_TRUE(self->IsExceptionPending());
987 self->ClearException();
988
989 // Tests done.
990#else
991 LOG(INFO) << "Skipping aput_obj as I don't know how to do that on " << kRuntimeISA;
992 // Force-print to std::cout so it's also outside the logcat.
993 std::cout << "Skipping aput_obj as I don't know how to do that on " << kRuntimeISA << std::endl;
994#endif
995}
996
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700997TEST_F(StubTest, AllocObject) {
998 TEST_DISABLED_FOR_HEAP_REFERENCE_POISONING();
999
1000#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__x86_64__)
1001 // TODO: Check the "Unresolved" allocation stubs
1002
1003 Thread* self = Thread::Current();
1004 // Create an object
1005 ScopedObjectAccess soa(self);
1006 // garbage is created during ClassLinker::Init
1007
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001008 StackHandleScope<2> hs(soa.Self());
1009 Handle<mirror::Class> c(
1010 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "Ljava/lang/Object;")));
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001011
1012 // Play with it...
1013
1014 EXPECT_FALSE(self->IsExceptionPending());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001015 {
1016 // Use an arbitrary method from c to use as referrer
1017 size_t result = Invoke3(static_cast<size_t>(c->GetDexTypeIndex()), // type_idx
1018 reinterpret_cast<size_t>(c->GetVirtualMethod(0)), // arbitrary
1019 0U,
Mathieu Chartier119c6bd2014-05-09 14:11:47 -07001020 reinterpret_cast<uintptr_t>(GetTlsPtr(self)->quick_entrypoints.pAllocObject),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001021 self);
1022
1023 EXPECT_FALSE(self->IsExceptionPending());
1024 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
1025 mirror::Object* obj = reinterpret_cast<mirror::Object*>(result);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001026 EXPECT_EQ(c.Get(), obj->GetClass());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001027 VerifyObject(obj);
1028 }
1029
1030 {
1031 // We can use nullptr in the second argument as we do not need a method here (not used in
1032 // resolved/initialized cases)
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001033 size_t result = Invoke3(reinterpret_cast<size_t>(c.Get()), reinterpret_cast<size_t>(nullptr), 0U,
Mathieu Chartier119c6bd2014-05-09 14:11:47 -07001034 reinterpret_cast<uintptr_t>(GetTlsPtr(self)->quick_entrypoints.pAllocObjectResolved),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001035 self);
1036
1037 EXPECT_FALSE(self->IsExceptionPending());
1038 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
1039 mirror::Object* obj = reinterpret_cast<mirror::Object*>(result);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001040 EXPECT_EQ(c.Get(), obj->GetClass());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001041 VerifyObject(obj);
1042 }
1043
1044 {
1045 // We can use nullptr in the second argument as we do not need a method here (not used in
1046 // resolved/initialized cases)
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001047 size_t result = Invoke3(reinterpret_cast<size_t>(c.Get()), reinterpret_cast<size_t>(nullptr), 0U,
Mathieu Chartier119c6bd2014-05-09 14:11:47 -07001048 reinterpret_cast<uintptr_t>(GetTlsPtr(self)->quick_entrypoints.pAllocObjectInitialized),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001049 self);
1050
1051 EXPECT_FALSE(self->IsExceptionPending());
1052 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
1053 mirror::Object* obj = reinterpret_cast<mirror::Object*>(result);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001054 EXPECT_EQ(c.Get(), obj->GetClass());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001055 VerifyObject(obj);
1056 }
1057
1058 // Failure tests.
1059
1060 // Out-of-memory.
1061 {
1062 Runtime::Current()->GetHeap()->SetIdealFootprint(1 * GB);
1063
1064 // Array helps to fill memory faster.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001065 Handle<mirror::Class> ca(
1066 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "[Ljava/lang/Object;")));
1067
1068 // Use arbitrary large amount for now.
1069 static const size_t kMaxHandles = 1000000;
Ian Rogers700a4022014-05-19 16:49:03 -07001070 std::unique_ptr<StackHandleScope<kMaxHandles>> hsp(new StackHandleScope<kMaxHandles>(self));
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001071
1072 std::vector<Handle<mirror::Object>> handles;
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001073 // Start allocating with 128K
1074 size_t length = 128 * KB / 4;
1075 while (length > 10) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001076 Handle<mirror::Object> h(hsp->NewHandle<mirror::Object>(
1077 mirror::ObjectArray<mirror::Object>::Alloc(soa.Self(), ca.Get(), length / 4)));
1078 if (self->IsExceptionPending() || h.Get() == nullptr) {
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001079 self->ClearException();
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001080
1081 // Try a smaller length
1082 length = length / 8;
1083 // Use at most half the reported free space.
1084 size_t mem = Runtime::Current()->GetHeap()->GetFreeMemory();
1085 if (length * 8 > mem) {
1086 length = mem / 8;
1087 }
1088 } else {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001089 handles.push_back(h);
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001090 }
1091 }
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001092 LOG(INFO) << "Used " << handles.size() << " arrays to fill space.";
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001093
1094 // Allocate simple objects till it fails.
1095 while (!self->IsExceptionPending()) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001096 Handle<mirror::Object> h = hsp->NewHandle(c->AllocObject(soa.Self()));
1097 if (!self->IsExceptionPending() && h.Get() != nullptr) {
1098 handles.push_back(h);
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001099 }
1100 }
1101 self->ClearException();
1102
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001103 size_t result = Invoke3(reinterpret_cast<size_t>(c.Get()), reinterpret_cast<size_t>(nullptr), 0U,
Mathieu Chartier119c6bd2014-05-09 14:11:47 -07001104 reinterpret_cast<uintptr_t>(GetTlsPtr(self)->quick_entrypoints.pAllocObjectInitialized),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001105 self);
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001106 EXPECT_TRUE(self->IsExceptionPending());
1107 self->ClearException();
1108 EXPECT_EQ(reinterpret_cast<size_t>(nullptr), result);
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001109 }
1110
1111 // Tests done.
1112#else
1113 LOG(INFO) << "Skipping alloc_object as I don't know how to do that on " << kRuntimeISA;
1114 // Force-print to std::cout so it's also outside the logcat.
1115 std::cout << "Skipping alloc_object as I don't know how to do that on " << kRuntimeISA << std::endl;
1116#endif
1117}
1118
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001119TEST_F(StubTest, AllocObjectArray) {
1120 TEST_DISABLED_FOR_HEAP_REFERENCE_POISONING();
1121
1122#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__x86_64__)
1123 // TODO: Check the "Unresolved" allocation stubs
1124
1125 Thread* self = Thread::Current();
1126 // Create an object
1127 ScopedObjectAccess soa(self);
1128 // garbage is created during ClassLinker::Init
1129
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001130 StackHandleScope<2> hs(self);
1131 Handle<mirror::Class> c(
1132 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "[Ljava/lang/Object;")));
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001133
1134 // Needed to have a linked method.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001135 Handle<mirror::Class> c_obj(
1136 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "Ljava/lang/Object;")));
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001137
1138 // Play with it...
1139
1140 EXPECT_FALSE(self->IsExceptionPending());
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001141
1142 // For some reason this does not work, as the type_idx is artificial and outside what the
1143 // resolved types of c_obj allow...
1144
1145 if (false) {
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001146 // Use an arbitrary method from c to use as referrer
1147 size_t result = Invoke3(static_cast<size_t>(c->GetDexTypeIndex()), // type_idx
1148 reinterpret_cast<size_t>(c_obj->GetVirtualMethod(0)), // arbitrary
1149 10U,
Mathieu Chartier119c6bd2014-05-09 14:11:47 -07001150 reinterpret_cast<uintptr_t>(GetTlsPtr(self)->quick_entrypoints.pAllocArray),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001151 self);
1152
1153 EXPECT_FALSE(self->IsExceptionPending());
1154 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
1155 mirror::Array* obj = reinterpret_cast<mirror::Array*>(result);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001156 EXPECT_EQ(c.Get(), obj->GetClass());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001157 VerifyObject(obj);
1158 EXPECT_EQ(obj->GetLength(), 10);
1159 }
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001160
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001161 {
1162 // We can use nullptr in the second argument as we do not need a method here (not used in
1163 // resolved/initialized cases)
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001164 size_t result = Invoke3(reinterpret_cast<size_t>(c.Get()), reinterpret_cast<size_t>(nullptr), 10U,
Mathieu Chartier119c6bd2014-05-09 14:11:47 -07001165 reinterpret_cast<uintptr_t>(GetTlsPtr(self)->quick_entrypoints.pAllocArrayResolved),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001166 self);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001167 EXPECT_FALSE(self->IsExceptionPending()) << PrettyTypeOf(self->GetException(nullptr));
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001168 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
1169 mirror::Object* obj = reinterpret_cast<mirror::Object*>(result);
1170 EXPECT_TRUE(obj->IsArrayInstance());
1171 EXPECT_TRUE(obj->IsObjectArray());
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001172 EXPECT_EQ(c.Get(), obj->GetClass());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001173 VerifyObject(obj);
1174 mirror::Array* array = reinterpret_cast<mirror::Array*>(result);
1175 EXPECT_EQ(array->GetLength(), 10);
1176 }
1177
1178 // Failure tests.
1179
1180 // Out-of-memory.
1181 {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001182 size_t result = Invoke3(reinterpret_cast<size_t>(c.Get()), reinterpret_cast<size_t>(nullptr),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001183 GB, // that should fail...
Mathieu Chartier119c6bd2014-05-09 14:11:47 -07001184 reinterpret_cast<uintptr_t>(GetTlsPtr(self)->quick_entrypoints.pAllocArrayResolved),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001185 self);
1186
1187 EXPECT_TRUE(self->IsExceptionPending());
1188 self->ClearException();
1189 EXPECT_EQ(reinterpret_cast<size_t>(nullptr), result);
1190 }
1191
1192 // Tests done.
1193#else
1194 LOG(INFO) << "Skipping alloc_array as I don't know how to do that on " << kRuntimeISA;
1195 // Force-print to std::cout so it's also outside the logcat.
1196 std::cout << "Skipping alloc_array as I don't know how to do that on " << kRuntimeISA << std::endl;
1197#endif
1198}
1199
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001200
Andreas Gampe266340d2014-05-02 07:55:24 -07001201#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__x86_64__)
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001202extern "C" void art_quick_string_compareto(void);
1203#endif
1204
1205TEST_F(StubTest, StringCompareTo) {
1206 TEST_DISABLED_FOR_HEAP_REFERENCE_POISONING();
1207
Andreas Gampe266340d2014-05-02 07:55:24 -07001208#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__x86_64__)
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001209 // TODO: Check the "Unresolved" allocation stubs
1210
1211 Thread* self = Thread::Current();
1212 ScopedObjectAccess soa(self);
1213 // garbage is created during ClassLinker::Init
1214
1215 // Create some strings
1216 // Use array so we can index into it and use a matrix for expected results
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -07001217 // Setup: The first half is standard. The second half uses a non-zero offset.
1218 // TODO: Shared backing arrays.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001219 static constexpr size_t kBaseStringCount = 7;
1220 const char* c[kBaseStringCount] = { "", "", "a", "aa", "ab", "aac", "aac" , };
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -07001221
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001222 static constexpr size_t kStringCount = 2 * kBaseStringCount;
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001223
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001224 StackHandleScope<kStringCount> hs(self);
1225 Handle<mirror::String> s[kStringCount];
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001226
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001227 for (size_t i = 0; i < kBaseStringCount; ++i) {
1228 s[i] = hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), c[i]));
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001229 }
1230
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -07001231 RandGen r(0x1234);
1232
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001233 for (size_t i = kBaseStringCount; i < kStringCount; ++i) {
1234 s[i] = hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), c[i - kBaseStringCount]));
1235 int32_t length = s[i]->GetLength();
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -07001236 if (length > 1) {
1237 // Set a random offset and length.
1238 int32_t new_offset = 1 + (r.next() % (length - 1));
1239 int32_t rest = length - new_offset - 1;
1240 int32_t new_length = 1 + (rest > 0 ? r.next() % rest : 0);
1241
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001242 s[i]->SetField32<false>(mirror::String::CountOffset(), new_length);
1243 s[i]->SetField32<false>(mirror::String::OffsetOffset(), new_offset);
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -07001244 }
1245 }
1246
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001247 // TODO: wide characters
1248
1249 // Matrix of expectations. First component is first parameter. Note we only check against the
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -07001250 // sign, not the value. As we are testing random offsets, we need to compute this and need to
1251 // rely on String::CompareTo being correct.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001252 int32_t expected[kStringCount][kStringCount];
1253 for (size_t x = 0; x < kStringCount; ++x) {
1254 for (size_t y = 0; y < kStringCount; ++y) {
1255 expected[x][y] = s[x]->CompareTo(s[y].Get());
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -07001256 }
1257 }
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001258
1259 // Play with it...
1260
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001261 for (size_t x = 0; x < kStringCount; ++x) {
1262 for (size_t y = 0; y < kStringCount; ++y) {
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001263 // Test string_compareto x y
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001264 size_t result = Invoke3(reinterpret_cast<size_t>(s[x].Get()),
1265 reinterpret_cast<size_t>(s[y].Get()), 0U,
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001266 reinterpret_cast<uintptr_t>(&art_quick_string_compareto), self);
1267
1268 EXPECT_FALSE(self->IsExceptionPending());
1269
1270 // The result is a 32b signed integer
1271 union {
1272 size_t r;
1273 int32_t i;
1274 } conv;
1275 conv.r = result;
1276 int32_t e = expected[x][y];
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -07001277 EXPECT_TRUE(e == 0 ? conv.i == 0 : true) << "x=" << c[x] << " y=" << c[y] << " res=" <<
1278 conv.r;
1279 EXPECT_TRUE(e < 0 ? conv.i < 0 : true) << "x=" << c[x] << " y=" << c[y] << " res=" <<
1280 conv.r;
1281 EXPECT_TRUE(e > 0 ? conv.i > 0 : true) << "x=" << c[x] << " y=" << c[y] << " res=" <<
1282 conv.r;
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001283 }
1284 }
1285
Andreas Gampe7177d7c2014-05-02 12:10:02 -07001286 // TODO: Deallocate things.
1287
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001288 // Tests done.
1289#else
1290 LOG(INFO) << "Skipping string_compareto as I don't know how to do that on " << kRuntimeISA;
1291 // Force-print to std::cout so it's also outside the logcat.
1292 std::cout << "Skipping string_compareto as I don't know how to do that on " << kRuntimeISA <<
1293 std::endl;
1294#endif
1295}
1296
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001297
1298#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__x86_64__)
1299extern "C" void art_quick_set32_static(void);
1300extern "C" void art_quick_get32_static(void);
1301#endif
1302
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001303static void GetSet32Static(Handle<mirror::Object>* obj, Handle<mirror::ArtField>* f, Thread* self,
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001304 mirror::ArtMethod* referrer, StubTest* test)
1305 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1306#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__x86_64__)
1307 constexpr size_t num_values = 7;
1308 uint32_t values[num_values] = { 0, 1, 2, 255, 32768, 1000000, 0xFFFFFFFF };
1309
1310 for (size_t i = 0; i < num_values; ++i) {
1311 test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
1312 static_cast<size_t>(values[i]),
1313 0U,
1314 reinterpret_cast<uintptr_t>(&art_quick_set32_static),
1315 self,
1316 referrer);
1317
1318 size_t res = test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
1319 0U, 0U,
1320 reinterpret_cast<uintptr_t>(&art_quick_get32_static),
1321 self,
1322 referrer);
1323
1324 EXPECT_EQ(res, values[i]) << "Iteration " << i;
1325 }
1326#else
1327 LOG(INFO) << "Skipping set32static as I don't know how to do that on " << kRuntimeISA;
1328 // Force-print to std::cout so it's also outside the logcat.
1329 std::cout << "Skipping set32static as I don't know how to do that on " << kRuntimeISA << std::endl;
1330#endif
1331}
1332
1333
1334#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__x86_64__)
1335extern "C" void art_quick_set32_instance(void);
1336extern "C" void art_quick_get32_instance(void);
1337#endif
1338
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001339static void GetSet32Instance(Handle<mirror::Object>* obj, Handle<mirror::ArtField>* f,
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001340 Thread* self, mirror::ArtMethod* referrer, StubTest* test)
1341 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1342#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__x86_64__)
1343 constexpr size_t num_values = 7;
1344 uint32_t values[num_values] = { 0, 1, 2, 255, 32768, 1000000, 0xFFFFFFFF };
1345
1346 for (size_t i = 0; i < num_values; ++i) {
1347 test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001348 reinterpret_cast<size_t>(obj->Get()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001349 static_cast<size_t>(values[i]),
1350 reinterpret_cast<uintptr_t>(&art_quick_set32_instance),
1351 self,
1352 referrer);
1353
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001354 int32_t res = f->Get()->GetInt(obj->Get());
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001355 EXPECT_EQ(res, static_cast<int32_t>(values[i])) << "Iteration " << i;
1356
1357 res++;
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001358 f->Get()->SetInt<false>(obj->Get(), res);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001359
1360 size_t res2 = test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001361 reinterpret_cast<size_t>(obj->Get()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001362 0U,
1363 reinterpret_cast<uintptr_t>(&art_quick_get32_instance),
1364 self,
1365 referrer);
1366 EXPECT_EQ(res, static_cast<int32_t>(res2));
1367 }
1368#else
1369 LOG(INFO) << "Skipping set32instance as I don't know how to do that on " << kRuntimeISA;
1370 // Force-print to std::cout so it's also outside the logcat.
1371 std::cout << "Skipping set32instance as I don't know how to do that on " << kRuntimeISA << std::endl;
1372#endif
1373}
1374
1375
1376#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__x86_64__)
1377extern "C" void art_quick_set_obj_static(void);
1378extern "C" void art_quick_get_obj_static(void);
1379
1380static void set_and_check_static(uint32_t f_idx, mirror::Object* val, Thread* self,
1381 mirror::ArtMethod* referrer, StubTest* test)
1382 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1383 test->Invoke3WithReferrer(static_cast<size_t>(f_idx),
1384 reinterpret_cast<size_t>(val),
1385 0U,
1386 reinterpret_cast<uintptr_t>(&art_quick_set_obj_static),
1387 self,
1388 referrer);
1389
1390 size_t res = test->Invoke3WithReferrer(static_cast<size_t>(f_idx),
1391 0U, 0U,
1392 reinterpret_cast<uintptr_t>(&art_quick_get_obj_static),
1393 self,
1394 referrer);
1395
1396 EXPECT_EQ(res, reinterpret_cast<size_t>(val)) << "Value " << val;
1397}
1398#endif
1399
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001400static void GetSetObjStatic(Handle<mirror::Object>* obj, Handle<mirror::ArtField>* f, Thread* self,
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001401 mirror::ArtMethod* referrer, StubTest* test)
1402 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1403#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__x86_64__)
1404 set_and_check_static((*f)->GetDexFieldIndex(), nullptr, self, referrer, test);
1405
1406 // Allocate a string object for simplicity.
1407 mirror::String* str = mirror::String::AllocFromModifiedUtf8(self, "Test");
1408 set_and_check_static((*f)->GetDexFieldIndex(), str, self, referrer, test);
1409
1410 set_and_check_static((*f)->GetDexFieldIndex(), nullptr, self, referrer, test);
1411#else
1412 LOG(INFO) << "Skipping setObjstatic as I don't know how to do that on " << kRuntimeISA;
1413 // Force-print to std::cout so it's also outside the logcat.
1414 std::cout << "Skipping setObjstatic as I don't know how to do that on " << kRuntimeISA << std::endl;
1415#endif
1416}
1417
1418
1419#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__x86_64__)
1420extern "C" void art_quick_set_obj_instance(void);
1421extern "C" void art_quick_get_obj_instance(void);
1422
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001423static void set_and_check_instance(Handle<mirror::ArtField>* f, mirror::Object* trg,
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001424 mirror::Object* val, Thread* self, mirror::ArtMethod* referrer,
1425 StubTest* test)
1426 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1427 test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
1428 reinterpret_cast<size_t>(trg),
1429 reinterpret_cast<size_t>(val),
1430 reinterpret_cast<uintptr_t>(&art_quick_set_obj_instance),
1431 self,
1432 referrer);
1433
1434 size_t res = test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
1435 reinterpret_cast<size_t>(trg),
1436 0U,
1437 reinterpret_cast<uintptr_t>(&art_quick_get_obj_instance),
1438 self,
1439 referrer);
1440
1441 EXPECT_EQ(res, reinterpret_cast<size_t>(val)) << "Value " << val;
1442
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001443 EXPECT_EQ(val, f->Get()->GetObj(trg));
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001444}
1445#endif
1446
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001447static void GetSetObjInstance(Handle<mirror::Object>* obj, Handle<mirror::ArtField>* f,
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001448 Thread* self, mirror::ArtMethod* referrer, StubTest* test)
1449 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1450#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__x86_64__)
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001451 set_and_check_instance(f, obj->Get(), nullptr, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001452
1453 // Allocate a string object for simplicity.
1454 mirror::String* str = mirror::String::AllocFromModifiedUtf8(self, "Test");
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001455 set_and_check_instance(f, obj->Get(), str, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001456
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001457 set_and_check_instance(f, obj->Get(), nullptr, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001458#else
1459 LOG(INFO) << "Skipping setObjinstance as I don't know how to do that on " << kRuntimeISA;
1460 // Force-print to std::cout so it's also outside the logcat.
1461 std::cout << "Skipping setObjinstance as I don't know how to do that on " << kRuntimeISA << std::endl;
1462#endif
1463}
1464
1465
1466// TODO: Complete these tests for 32b architectures.
1467
1468#if defined(__x86_64__) || defined(__aarch64__)
1469extern "C" void art_quick_set64_static(void);
1470extern "C" void art_quick_get64_static(void);
1471#endif
1472
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001473static void GetSet64Static(Handle<mirror::Object>* obj, Handle<mirror::ArtField>* f, Thread* self,
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001474 mirror::ArtMethod* referrer, StubTest* test)
1475 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1476#if defined(__x86_64__) || defined(__aarch64__)
1477 constexpr size_t num_values = 8;
1478 uint64_t values[num_values] = { 0, 1, 2, 255, 32768, 1000000, 0xFFFFFFFF, 0xFFFFFFFFFFFF };
1479
1480 for (size_t i = 0; i < num_values; ++i) {
1481 test->Invoke3UWithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
1482 values[i],
1483 reinterpret_cast<uintptr_t>(&art_quick_set64_static),
1484 self,
1485 referrer);
1486
1487 size_t res = test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
1488 0U, 0U,
1489 reinterpret_cast<uintptr_t>(&art_quick_get64_static),
1490 self,
1491 referrer);
1492
1493 EXPECT_EQ(res, values[i]) << "Iteration " << i;
1494 }
1495#else
1496 LOG(INFO) << "Skipping set64static as I don't know how to do that on " << kRuntimeISA;
1497 // Force-print to std::cout so it's also outside the logcat.
1498 std::cout << "Skipping set64static as I don't know how to do that on " << kRuntimeISA << std::endl;
1499#endif
1500}
1501
1502
1503#if defined(__x86_64__) || defined(__aarch64__)
1504extern "C" void art_quick_set64_instance(void);
1505extern "C" void art_quick_get64_instance(void);
1506#endif
1507
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001508static void GetSet64Instance(Handle<mirror::Object>* obj, Handle<mirror::ArtField>* f,
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001509 Thread* self, mirror::ArtMethod* referrer, StubTest* test)
1510 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1511#if defined(__x86_64__) || defined(__aarch64__)
1512 constexpr size_t num_values = 8;
1513 uint64_t values[num_values] = { 0, 1, 2, 255, 32768, 1000000, 0xFFFFFFFF, 0xFFFFFFFFFFFF };
1514
1515 for (size_t i = 0; i < num_values; ++i) {
1516 test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001517 reinterpret_cast<size_t>(obj->Get()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001518 static_cast<size_t>(values[i]),
1519 reinterpret_cast<uintptr_t>(&art_quick_set64_instance),
1520 self,
1521 referrer);
1522
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001523 int64_t res = f->Get()->GetLong(obj->Get());
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001524 EXPECT_EQ(res, static_cast<int64_t>(values[i])) << "Iteration " << i;
1525
1526 res++;
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001527 f->Get()->SetLong<false>(obj->Get(), res);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001528
1529 size_t res2 = test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001530 reinterpret_cast<size_t>(obj->Get()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001531 0U,
1532 reinterpret_cast<uintptr_t>(&art_quick_get64_instance),
1533 self,
1534 referrer);
1535 EXPECT_EQ(res, static_cast<int64_t>(res2));
1536 }
1537#else
1538 LOG(INFO) << "Skipping set64instance as I don't know how to do that on " << kRuntimeISA;
1539 // Force-print to std::cout so it's also outside the logcat.
1540 std::cout << "Skipping set64instance as I don't know how to do that on " << kRuntimeISA << std::endl;
1541#endif
1542}
1543
1544static void TestFields(Thread* self, StubTest* test, Primitive::Type test_type) {
1545 // garbage is created during ClassLinker::Init
1546
1547 JNIEnv* env = Thread::Current()->GetJniEnv();
1548 jclass jc = env->FindClass("AllFields");
1549 CHECK(jc != NULL);
1550 jobject o = env->AllocObject(jc);
1551 CHECK(o != NULL);
1552
1553 ScopedObjectAccess soa(self);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001554 StackHandleScope<5> hs(self);
1555 Handle<mirror::Object> obj(hs.NewHandle(soa.Decode<mirror::Object*>(o)));
1556 Handle<mirror::Class> c(hs.NewHandle(obj->GetClass()));
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001557 // Need a method as a referrer
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001558 Handle<mirror::ArtMethod> m(hs.NewHandle(c->GetDirectMethod(0)));
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001559
1560 // Play with it...
1561
1562 // Static fields.
1563 {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001564 Handle<mirror::ObjectArray<mirror::ArtField>> fields(hs.NewHandle(c.Get()->GetSFields()));
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001565 int32_t num_fields = fields->GetLength();
1566 for (int32_t i = 0; i < num_fields; ++i) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001567 StackHandleScope<1> hs(self);
1568 Handle<mirror::ArtField> f(hs.NewHandle(fields->Get(i)));
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001569
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001570 FieldHelper fh(f.Get());
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001571 Primitive::Type type = fh.GetTypeAsPrimitiveType();
1572 switch (type) {
1573 case Primitive::Type::kPrimInt:
1574 if (test_type == type) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001575 GetSet32Static(&obj, &f, self, m.Get(), test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001576 }
1577 break;
1578
1579 case Primitive::Type::kPrimLong:
1580 if (test_type == type) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001581 GetSet64Static(&obj, &f, self, m.Get(), test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001582 }
1583 break;
1584
1585 case Primitive::Type::kPrimNot:
1586 // Don't try array.
1587 if (test_type == type && fh.GetTypeDescriptor()[0] != '[') {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001588 GetSetObjStatic(&obj, &f, self, m.Get(), test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001589 }
1590 break;
1591
1592 default:
1593 break; // Skip.
1594 }
1595 }
1596 }
1597
1598 // Instance fields.
1599 {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001600 Handle<mirror::ObjectArray<mirror::ArtField>> fields(hs.NewHandle(c.Get()->GetIFields()));
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001601 int32_t num_fields = fields->GetLength();
1602 for (int32_t i = 0; i < num_fields; ++i) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001603 StackHandleScope<1> hs(self);
1604 Handle<mirror::ArtField> f(hs.NewHandle(fields->Get(i)));
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001605
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001606 FieldHelper fh(f.Get());
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001607 Primitive::Type type = fh.GetTypeAsPrimitiveType();
1608 switch (type) {
1609 case Primitive::Type::kPrimInt:
1610 if (test_type == type) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001611 GetSet32Instance(&obj, &f, self, m.Get(), test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001612 }
1613 break;
1614
1615 case Primitive::Type::kPrimLong:
1616 if (test_type == type) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001617 GetSet64Instance(&obj, &f, self, m.Get(), test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001618 }
1619 break;
1620
1621 case Primitive::Type::kPrimNot:
1622 // Don't try array.
1623 if (test_type == type && fh.GetTypeDescriptor()[0] != '[') {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001624 GetSetObjInstance(&obj, &f, self, m.Get(), test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001625 }
1626 break;
1627
1628 default:
1629 break; // Skip.
1630 }
1631 }
1632 }
1633
1634 // TODO: Deallocate things.
1635}
1636
1637
1638TEST_F(StubTest, Fields32) {
1639 TEST_DISABLED_FOR_HEAP_REFERENCE_POISONING();
1640
1641 Thread* self = Thread::Current();
1642
1643 self->TransitionFromSuspendedToRunnable();
1644 LoadDex("AllFields");
1645 bool started = runtime_->Start();
1646 CHECK(started);
1647
1648 TestFields(self, this, Primitive::Type::kPrimInt);
1649}
1650
1651TEST_F(StubTest, FieldsObj) {
1652 TEST_DISABLED_FOR_HEAP_REFERENCE_POISONING();
1653
1654 Thread* self = Thread::Current();
1655
1656 self->TransitionFromSuspendedToRunnable();
1657 LoadDex("AllFields");
1658 bool started = runtime_->Start();
1659 CHECK(started);
1660
1661 TestFields(self, this, Primitive::Type::kPrimNot);
1662}
1663
1664TEST_F(StubTest, Fields64) {
1665 TEST_DISABLED_FOR_HEAP_REFERENCE_POISONING();
1666
1667 Thread* self = Thread::Current();
1668
1669 self->TransitionFromSuspendedToRunnable();
1670 LoadDex("AllFields");
1671 bool started = runtime_->Start();
1672 CHECK(started);
1673
1674 TestFields(self, this, Primitive::Type::kPrimLong);
1675}
1676
Andreas Gampe51f76352014-05-21 08:28:48 -07001677#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__x86_64__)
1678extern "C" void art_quick_imt_conflict_trampoline(void);
1679#endif
1680
1681TEST_F(StubTest, IMT) {
1682#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__x86_64__)
1683 TEST_DISABLED_FOR_HEAP_REFERENCE_POISONING();
1684
1685 Thread* self = Thread::Current();
1686
1687 ScopedObjectAccess soa(self);
1688 StackHandleScope<7> hs(self);
1689
1690 JNIEnv* env = Thread::Current()->GetJniEnv();
1691
1692 // ArrayList
1693
1694 // Load ArrayList and used methods (JNI).
1695 jclass arraylist_jclass = env->FindClass("java/util/ArrayList");
1696 ASSERT_NE(nullptr, arraylist_jclass);
1697 jmethodID arraylist_constructor = env->GetMethodID(arraylist_jclass, "<init>", "()V");
1698 ASSERT_NE(nullptr, arraylist_constructor);
1699 jmethodID contains_jmethod = env->GetMethodID(arraylist_jclass, "contains", "(Ljava/lang/Object;)Z");
1700 ASSERT_NE(nullptr, contains_jmethod);
1701 jmethodID add_jmethod = env->GetMethodID(arraylist_jclass, "add", "(Ljava/lang/Object;)Z");
1702 ASSERT_NE(nullptr, add_jmethod);
1703
1704 // Get mirror representation.
1705 Handle<mirror::ArtMethod> contains_amethod(hs.NewHandle(soa.DecodeMethod(contains_jmethod)));
1706
1707 // Patch up ArrayList.contains.
1708 if (contains_amethod.Get()->GetEntryPointFromQuickCompiledCode() == nullptr) {
1709 contains_amethod.Get()->SetEntryPointFromQuickCompiledCode(reinterpret_cast<void*>(
1710 GetTlsPtr(self)->quick_entrypoints.pQuickToInterpreterBridge));
1711 }
1712
1713 // List
1714
1715 // Load List and used methods (JNI).
1716 jclass list_jclass = env->FindClass("java/util/List");
1717 ASSERT_NE(nullptr, list_jclass);
1718 jmethodID inf_contains_jmethod = env->GetMethodID(list_jclass, "contains", "(Ljava/lang/Object;)Z");
1719 ASSERT_NE(nullptr, inf_contains_jmethod);
1720
1721 // Get mirror representation.
1722 Handle<mirror::ArtMethod> inf_contains(hs.NewHandle(soa.DecodeMethod(inf_contains_jmethod)));
1723
1724 // Object
1725
1726 jclass obj_jclass = env->FindClass("java/lang/Object");
1727 ASSERT_NE(nullptr, obj_jclass);
1728 jmethodID obj_constructor = env->GetMethodID(obj_jclass, "<init>", "()V");
1729 ASSERT_NE(nullptr, obj_constructor);
1730
1731 // Sanity check: check that there is a conflict for List.contains in ArrayList.
1732
1733 mirror::Class* arraylist_class = soa.Decode<mirror::Class*>(arraylist_jclass);
1734 mirror::ArtMethod* m = arraylist_class->GetImTable()->Get(
1735 inf_contains->GetDexMethodIndex() % ClassLinker::kImtSize);
Andreas Gampe51f76352014-05-21 08:28:48 -07001736
Andreas Gampe0ea37942014-05-21 14:12:18 -07001737 if (!m->IsImtConflictMethod()) {
1738 LOG(WARNING) << "Test is meaningless, no IMT conflict in setup: " <<
1739 PrettyMethod(m, true);
1740 LOG(WARNING) << "Please update StubTest.IMT.";
1741 return;
1742 }
Andreas Gampe51f76352014-05-21 08:28:48 -07001743
1744 // Create instances.
1745
1746 jobject jarray_list = env->NewObject(arraylist_jclass, arraylist_constructor);
1747 ASSERT_NE(nullptr, jarray_list);
1748 Handle<mirror::Object> array_list(hs.NewHandle(soa.Decode<mirror::Object*>(jarray_list)));
1749
1750 jobject jobj = env->NewObject(obj_jclass, obj_constructor);
1751 ASSERT_NE(nullptr, jobj);
1752 Handle<mirror::Object> obj(hs.NewHandle(soa.Decode<mirror::Object*>(jobj)));
1753
1754 // Invoke.
1755
1756 size_t result =
1757 Invoke3WithReferrerAndHidden(0U, reinterpret_cast<size_t>(array_list.Get()),
1758 reinterpret_cast<size_t>(obj.Get()),
1759 reinterpret_cast<uintptr_t>(&art_quick_imt_conflict_trampoline),
1760 self, contains_amethod.Get(),
1761 static_cast<size_t>(inf_contains.Get()->GetDexMethodIndex()));
1762
1763 ASSERT_FALSE(self->IsExceptionPending());
1764 EXPECT_EQ(static_cast<size_t>(JNI_FALSE), result);
1765
1766 // Add object.
1767
1768 env->CallBooleanMethod(jarray_list, add_jmethod, jobj);
1769
1770 ASSERT_FALSE(self->IsExceptionPending()) << PrettyTypeOf(self->GetException(nullptr));
1771
1772 // Invoke again.
1773
1774 result = Invoke3WithReferrerAndHidden(0U, reinterpret_cast<size_t>(array_list.Get()),
1775 reinterpret_cast<size_t>(obj.Get()),
1776 reinterpret_cast<uintptr_t>(&art_quick_imt_conflict_trampoline),
1777 self, contains_amethod.Get(),
1778 static_cast<size_t>(inf_contains.Get()->GetDexMethodIndex()));
1779
1780 ASSERT_FALSE(self->IsExceptionPending());
1781 EXPECT_EQ(static_cast<size_t>(JNI_TRUE), result);
1782#else
1783 LOG(INFO) << "Skipping memcpy as I don't know how to do that on " << kRuntimeISA;
1784 // Force-print to std::cout so it's also outside the logcat.
1785 std::cout << "Skipping memcpy as I don't know how to do that on " << kRuntimeISA << std::endl;
1786#endif
1787}
1788
Andreas Gampe525cde22014-04-22 15:44:50 -07001789} // namespace art