blob: fac988310a0b3e89c03f671ba831e1bcf23a3314 [file] [log] [blame]
Andreas Gampe525cde22014-04-22 15:44:50 -07001/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "common_runtime_test.h"
Andreas Gampe6e4e59c2014-05-05 20:11:02 -070018#include "mirror/art_field-inl.h"
Andreas Gampe51f76352014-05-21 08:28:48 -070019#include "mirror/art_method-inl.h"
20#include "mirror/class-inl.h"
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -070021#include "mirror/string-inl.h"
Andreas Gampe525cde22014-04-22 15:44:50 -070022
23#include <cstdio>
24
25namespace art {
26
27
28class StubTest : public CommonRuntimeTest {
29 protected:
30 // We need callee-save methods set up in the Runtime for exceptions.
31 void SetUp() OVERRIDE {
32 // Do the normal setup.
33 CommonRuntimeTest::SetUp();
34
35 {
36 // Create callee-save methods
37 ScopedObjectAccess soa(Thread::Current());
Vladimir Marko7624d252014-05-02 14:40:15 +010038 runtime_->SetInstructionSet(kRuntimeISA);
Andreas Gampe525cde22014-04-22 15:44:50 -070039 for (int i = 0; i < Runtime::kLastCalleeSaveType; i++) {
40 Runtime::CalleeSaveType type = Runtime::CalleeSaveType(i);
41 if (!runtime_->HasCalleeSaveMethod(type)) {
Vladimir Marko7624d252014-05-02 14:40:15 +010042 runtime_->SetCalleeSaveMethod(runtime_->CreateCalleeSaveMethod(type), type);
Andreas Gampe525cde22014-04-22 15:44:50 -070043 }
44 }
45 }
46 }
47
Andreas Gampe00c1e6d2014-04-25 15:47:13 -070048 void SetUpRuntimeOptions(Runtime::Options *options) OVERRIDE {
49 // Use a smaller heap
50 for (std::pair<std::string, const void*>& pair : *options) {
51 if (pair.first.find("-Xmx") == 0) {
52 pair.first = "-Xmx4M"; // Smallest we can go.
53 }
54 }
Andreas Gampe51f76352014-05-21 08:28:48 -070055 options->push_back(std::make_pair("-Xint", nullptr));
Andreas Gampe00c1e6d2014-04-25 15:47:13 -070056 }
Andreas Gampe525cde22014-04-22 15:44:50 -070057
Mathieu Chartier119c6bd2014-05-09 14:11:47 -070058 // Helper function needed since TEST_F makes a new class.
59 Thread::tls_ptr_sized_values* GetTlsPtr(Thread* self) {
60 return &self->tlsPtr_;
61 }
62
Andreas Gampe4fc046e2014-05-06 16:56:39 -070063 public:
Andreas Gampe525cde22014-04-22 15:44:50 -070064 size_t Invoke3(size_t arg0, size_t arg1, size_t arg2, uintptr_t code, Thread* self) {
Andreas Gampe6cf80102014-05-19 11:32:41 -070065 return Invoke3WithReferrer(arg0, arg1, arg2, code, self, nullptr);
Andreas Gampe525cde22014-04-22 15:44:50 -070066 }
Andreas Gampe6e4e59c2014-05-05 20:11:02 -070067
Andreas Gampe6e4e59c2014-05-05 20:11:02 -070068 // TODO: Set up a frame according to referrer's specs.
69 size_t Invoke3WithReferrer(size_t arg0, size_t arg1, size_t arg2, uintptr_t code, Thread* self,
70 mirror::ArtMethod* referrer) {
71 // Push a transition back into managed code onto the linked list in thread.
72 ManagedStack fragment;
73 self->PushManagedStackFragment(&fragment);
74
75 size_t result;
Andreas Gampe6cf80102014-05-19 11:32:41 -070076 size_t fpr_result = 0;
Andreas Gampe6e4e59c2014-05-05 20:11:02 -070077#if defined(__i386__)
78 // TODO: Set the thread?
79 __asm__ __volatile__(
80 "pushl %[referrer]\n\t" // Store referrer
81 "call *%%edi\n\t" // Call the stub
82 "addl $4, %%esp" // Pop referrer
83 : "=a" (result)
84 // Use the result from eax
85 : "a"(arg0), "c"(arg1), "d"(arg2), "D"(code), [referrer]"r"(referrer)
86 // This places code into edi, arg0 into eax, arg1 into ecx, and arg2 into edx
87 : ); // clobber.
88 // TODO: Should we clobber the other registers? EBX gets clobbered by some of the stubs,
89 // but compilation fails when declaring that.
90#elif defined(__arm__)
91 __asm__ __volatile__(
92 "push {r1-r12, lr}\n\t" // Save state, 13*4B = 52B
93 ".cfi_adjust_cfa_offset 52\n\t"
94 "push {r9}\n\t"
95 ".cfi_adjust_cfa_offset 4\n\t"
96 "mov r9, %[referrer]\n\n"
97 "str r9, [sp, #-8]!\n\t" // Push referrer, +8B padding so 16B aligned
98 ".cfi_adjust_cfa_offset 8\n\t"
99 "ldr r9, [sp, #8]\n\t"
100
101 // Push everything on the stack, so we don't rely on the order. What a mess. :-(
102 "sub sp, sp, #20\n\t"
103 "str %[arg0], [sp]\n\t"
104 "str %[arg1], [sp, #4]\n\t"
105 "str %[arg2], [sp, #8]\n\t"
106 "str %[code], [sp, #12]\n\t"
107 "str %[self], [sp, #16]\n\t"
108 "ldr r0, [sp]\n\t"
109 "ldr r1, [sp, #4]\n\t"
110 "ldr r2, [sp, #8]\n\t"
111 "ldr r3, [sp, #12]\n\t"
112 "ldr r9, [sp, #16]\n\t"
113 "add sp, sp, #20\n\t"
114
115 "blx r3\n\t" // Call the stub
116 "add sp, sp, #12\n\t" // Pop nullptr and padding
117 ".cfi_adjust_cfa_offset -12\n\t"
118 "pop {r1-r12, lr}\n\t" // Restore state
119 ".cfi_adjust_cfa_offset -52\n\t"
120 "mov %[result], r0\n\t" // Save the result
121 : [result] "=r" (result)
122 // Use the result from r0
123 : [arg0] "r"(arg0), [arg1] "r"(arg1), [arg2] "r"(arg2), [code] "r"(code), [self] "r"(self),
124 [referrer] "r"(referrer)
125 : ); // clobber.
126#elif defined(__aarch64__)
127 __asm__ __volatile__(
Andreas Gampe6cf80102014-05-19 11:32:41 -0700128 // Spill space for d8 - d15
129 "sub sp, sp, #64\n\t"
130 ".cfi_adjust_cfa_offset 64\n\t"
131 "stp d8, d9, [sp]\n\t"
132 "stp d10, d11, [sp, #16]\n\t"
133 "stp d12, d13, [sp, #32]\n\t"
134 "stp d14, d15, [sp, #48]\n\t"
135
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700136 "sub sp, sp, #48\n\t" // Reserve stack space, 16B aligned
137 ".cfi_adjust_cfa_offset 48\n\t"
138 "stp %[referrer], x1, [sp]\n\t"// referrer, x1
139 "stp x2, x3, [sp, #16]\n\t" // Save x2, x3
140 "stp x18, x30, [sp, #32]\n\t" // Save x18(xSELF), xLR
141
142 // Push everything on the stack, so we don't rely on the order. What a mess. :-(
143 "sub sp, sp, #48\n\t"
Andreas Gampe6cf80102014-05-19 11:32:41 -0700144 ".cfi_adjust_cfa_offset 48\n\t"
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700145 "str %[arg0], [sp]\n\t"
146 "str %[arg1], [sp, #8]\n\t"
147 "str %[arg2], [sp, #16]\n\t"
148 "str %[code], [sp, #24]\n\t"
149 "str %[self], [sp, #32]\n\t"
Andreas Gampe6cf80102014-05-19 11:32:41 -0700150
151 // Now we definitely have x0-x3 free, use it to garble d8 - d15
152 "movk x0, #0xfad0\n\t"
153 "movk x0, #0xebad, lsl #16\n\t"
154 "movk x0, #0xfad0, lsl #32\n\t"
155 "movk x0, #0xebad, lsl #48\n\t"
156 "fmov d8, x0\n\t"
157 "add x0, x0, 1\n\t"
158 "fmov d9, x0\n\t"
159 "add x0, x0, 1\n\t"
160 "fmov d10, x0\n\t"
161 "add x0, x0, 1\n\t"
162 "fmov d11, x0\n\t"
163 "add x0, x0, 1\n\t"
164 "fmov d12, x0\n\t"
165 "add x0, x0, 1\n\t"
166 "fmov d13, x0\n\t"
167 "add x0, x0, 1\n\t"
168 "fmov d14, x0\n\t"
169 "add x0, x0, 1\n\t"
170 "fmov d15, x0\n\t"
171
172 // Load call params
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700173 "ldr x0, [sp]\n\t"
174 "ldr x1, [sp, #8]\n\t"
175 "ldr x2, [sp, #16]\n\t"
176 "ldr x3, [sp, #24]\n\t"
177 "ldr x18, [sp, #32]\n\t"
178 "add sp, sp, #48\n\t"
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700179 ".cfi_adjust_cfa_offset -48\n\t"
180
Andreas Gampe6cf80102014-05-19 11:32:41 -0700181
182 "blr x3\n\t" // Call the stub
183
184 // Test d8 - d15. We can use x1 and x2.
185 "movk x1, #0xfad0\n\t"
186 "movk x1, #0xebad, lsl #16\n\t"
187 "movk x1, #0xfad0, lsl #32\n\t"
188 "movk x1, #0xebad, lsl #48\n\t"
189 "fmov x2, d8\n\t"
190 "cmp x1, x2\n\t"
191 "b.ne 1f\n\t"
192 "add x1, x1, 1\n\t"
193
194 "fmov x2, d9\n\t"
195 "cmp x1, x2\n\t"
196 "b.ne 1f\n\t"
197 "add x1, x1, 1\n\t"
198
199 "fmov x2, d10\n\t"
200 "cmp x1, x2\n\t"
201 "b.ne 1f\n\t"
202 "add x1, x1, 1\n\t"
203
204 "fmov x2, d11\n\t"
205 "cmp x1, x2\n\t"
206 "b.ne 1f\n\t"
207 "add x1, x1, 1\n\t"
208
209 "fmov x2, d12\n\t"
210 "cmp x1, x2\n\t"
211 "b.ne 1f\n\t"
212 "add x1, x1, 1\n\t"
213
214 "fmov x2, d13\n\t"
215 "cmp x1, x2\n\t"
216 "b.ne 1f\n\t"
217 "add x1, x1, 1\n\t"
218
219 "fmov x2, d14\n\t"
220 "cmp x1, x2\n\t"
221 "b.ne 1f\n\t"
222 "add x1, x1, 1\n\t"
223
224 "fmov x2, d15\n\t"
225 "cmp x1, x2\n\t"
226 "b.ne 1f\n\t"
227
228 "mov %[fpr_result], #0\n\t"
229
230 // Finish up.
231 "2:\n\t"
232 "ldp x1, x2, [sp, #8]\n\t" // Restore x1, x2
233 "ldp x3, x18, [sp, #24]\n\t" // Restore x3, xSELF
234 "ldr x30, [sp, #40]\n\t" // Restore xLR
235 "add sp, sp, #48\n\t" // Free stack space
236 ".cfi_adjust_cfa_offset -48\n\t"
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700237 "mov %[result], x0\n\t" // Save the result
Andreas Gampe6cf80102014-05-19 11:32:41 -0700238
239 "ldp d8, d9, [sp]\n\t" // Restore d8 - d15
240 "ldp d10, d11, [sp, #16]\n\t"
241 "ldp d12, d13, [sp, #32]\n\t"
242 "ldp d14, d15, [sp, #48]\n\t"
243 "add sp, sp, #64\n\t"
244 ".cfi_adjust_cfa_offset -64\n\t"
245
246 "b 3f\n\t" // Goto end
247
248 // Failed fpr verification.
249 "1:\n\t"
250 "mov %[fpr_result], #1\n\t"
251 "b 2b\n\t" // Goto finish-up
252
253 // End
254 "3:\n\t"
255 : [result] "=r" (result), [fpr_result] "=r" (fpr_result)
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700256 // Use the result from r0
257 : [arg0] "0"(arg0), [arg1] "r"(arg1), [arg2] "r"(arg2), [code] "r"(code), [self] "r"(self),
258 [referrer] "r"(referrer)
259 : "x4", "x5", "x6", "x7", "x8", "x9", "x10", "x11", "x12", "x13", "x14", "x15", "x16", "x17"); // clobber.
260#elif defined(__x86_64__)
261 // Note: Uses the native convention
262 // TODO: Set the thread?
263 __asm__ __volatile__(
264 "pushq %[referrer]\n\t" // Push referrer
265 "pushq (%%rsp)\n\t" // & 16B alignment padding
266 ".cfi_adjust_cfa_offset 16\n\t"
267 "call *%%rax\n\t" // Call the stub
268 "addq $16, %%rsp\n\t" // Pop nullptr and padding
269 ".cfi_adjust_cfa_offset -16\n\t"
270 : "=a" (result)
271 // Use the result from rax
272 : "D"(arg0), "S"(arg1), "d"(arg2), "a"(code), [referrer] "m"(referrer)
273 // This places arg0 into rdi, arg1 into rsi, arg2 into rdx, and code into rax
274 : "rbx", "rcx", "rbp", "r8", "r9", "r10", "r11", "r12", "r13", "r14", "r15"); // clobber all
275 // TODO: Should we clobber the other registers?
276#else
277 LOG(WARNING) << "Was asked to invoke for an architecture I do not understand.";
278 result = 0;
279#endif
280 // Pop transition.
281 self->PopManagedStackFragment(fragment);
Andreas Gampe6cf80102014-05-19 11:32:41 -0700282
283 fp_result = fpr_result;
284 EXPECT_EQ(0U, fp_result);
285
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700286 return result;
287 }
288
Andreas Gampe51f76352014-05-21 08:28:48 -0700289 // TODO: Set up a frame according to referrer's specs.
290 size_t Invoke3WithReferrerAndHidden(size_t arg0, size_t arg1, size_t arg2, uintptr_t code,
291 Thread* self, mirror::ArtMethod* referrer, size_t hidden) {
292 // Push a transition back into managed code onto the linked list in thread.
293 ManagedStack fragment;
294 self->PushManagedStackFragment(&fragment);
295
296 size_t result;
297 size_t fpr_result = 0;
298#if defined(__i386__)
299 // TODO: Set the thread?
300 __asm__ __volatile__(
301 "movd %[hidden], %%xmm0\n\t"
302 "pushl %[referrer]\n\t" // Store referrer
303 "call *%%edi\n\t" // Call the stub
304 "addl $4, %%esp" // Pop referrer
305 : "=a" (result)
306 // Use the result from eax
Andreas Gampe3ecbbfc2014-05-21 14:39:45 -0700307 : "a"(arg0), "c"(arg1), "d"(arg2), "D"(code), [referrer]"m"(referrer), [hidden]"r"(hidden)
Andreas Gampe51f76352014-05-21 08:28:48 -0700308 // This places code into edi, arg0 into eax, arg1 into ecx, and arg2 into edx
309 : ); // clobber.
310 // TODO: Should we clobber the other registers? EBX gets clobbered by some of the stubs,
311 // but compilation fails when declaring that.
312#elif defined(__arm__)
313 __asm__ __volatile__(
314 "push {r1-r12, lr}\n\t" // Save state, 13*4B = 52B
315 ".cfi_adjust_cfa_offset 52\n\t"
316 "push {r9}\n\t"
317 ".cfi_adjust_cfa_offset 4\n\t"
318 "mov r9, %[referrer]\n\n"
319 "str r9, [sp, #-8]!\n\t" // Push referrer, +8B padding so 16B aligned
320 ".cfi_adjust_cfa_offset 8\n\t"
321 "ldr r9, [sp, #8]\n\t"
322
323 // Push everything on the stack, so we don't rely on the order. What a mess. :-(
324 "sub sp, sp, #24\n\t"
325 "str %[arg0], [sp]\n\t"
326 "str %[arg1], [sp, #4]\n\t"
327 "str %[arg2], [sp, #8]\n\t"
328 "str %[code], [sp, #12]\n\t"
329 "str %[self], [sp, #16]\n\t"
330 "str %[hidden], [sp, #20]\n\t"
331 "ldr r0, [sp]\n\t"
332 "ldr r1, [sp, #4]\n\t"
333 "ldr r2, [sp, #8]\n\t"
334 "ldr r3, [sp, #12]\n\t"
335 "ldr r9, [sp, #16]\n\t"
336 "ldr r12, [sp, #20]\n\t"
337 "add sp, sp, #24\n\t"
338
339 "blx r3\n\t" // Call the stub
340 "add sp, sp, #12\n\t" // Pop nullptr and padding
341 ".cfi_adjust_cfa_offset -12\n\t"
342 "pop {r1-r12, lr}\n\t" // Restore state
343 ".cfi_adjust_cfa_offset -52\n\t"
344 "mov %[result], r0\n\t" // Save the result
345 : [result] "=r" (result)
346 // Use the result from r0
347 : [arg0] "r"(arg0), [arg1] "r"(arg1), [arg2] "r"(arg2), [code] "r"(code), [self] "r"(self),
348 [referrer] "r"(referrer), [hidden] "r"(hidden)
349 : ); // clobber.
350#elif defined(__aarch64__)
351 __asm__ __volatile__(
352 // Spill space for d8 - d15
353 "sub sp, sp, #64\n\t"
354 ".cfi_adjust_cfa_offset 64\n\t"
355 "stp d8, d9, [sp]\n\t"
356 "stp d10, d11, [sp, #16]\n\t"
357 "stp d12, d13, [sp, #32]\n\t"
358 "stp d14, d15, [sp, #48]\n\t"
359
360 "sub sp, sp, #48\n\t" // Reserve stack space, 16B aligned
361 ".cfi_adjust_cfa_offset 48\n\t"
362 "stp %[referrer], x1, [sp]\n\t"// referrer, x1
363 "stp x2, x3, [sp, #16]\n\t" // Save x2, x3
364 "stp x18, x30, [sp, #32]\n\t" // Save x18(xSELF), xLR
365
366 // Push everything on the stack, so we don't rely on the order. What a mess. :-(
367 "sub sp, sp, #48\n\t"
368 ".cfi_adjust_cfa_offset 48\n\t"
369 "str %[arg0], [sp]\n\t"
370 "str %[arg1], [sp, #8]\n\t"
371 "str %[arg2], [sp, #16]\n\t"
372 "str %[code], [sp, #24]\n\t"
373 "str %[self], [sp, #32]\n\t"
374 "str %[hidden], [sp, #40]\n\t"
375
376 // Now we definitely have x0-x3 free, use it to garble d8 - d15
377 "movk x0, #0xfad0\n\t"
378 "movk x0, #0xebad, lsl #16\n\t"
379 "movk x0, #0xfad0, lsl #32\n\t"
380 "movk x0, #0xebad, lsl #48\n\t"
381 "fmov d8, x0\n\t"
382 "add x0, x0, 1\n\t"
383 "fmov d9, x0\n\t"
384 "add x0, x0, 1\n\t"
385 "fmov d10, x0\n\t"
386 "add x0, x0, 1\n\t"
387 "fmov d11, x0\n\t"
388 "add x0, x0, 1\n\t"
389 "fmov d12, x0\n\t"
390 "add x0, x0, 1\n\t"
391 "fmov d13, x0\n\t"
392 "add x0, x0, 1\n\t"
393 "fmov d14, x0\n\t"
394 "add x0, x0, 1\n\t"
395 "fmov d15, x0\n\t"
396
397 // Load call params
398 "ldr x0, [sp]\n\t"
399 "ldr x1, [sp, #8]\n\t"
400 "ldr x2, [sp, #16]\n\t"
401 "ldr x3, [sp, #24]\n\t"
402 "ldr x18, [sp, #32]\n\t"
403 "ldr x12, [sp, #40]\n\t"
404 "add sp, sp, #48\n\t"
405 ".cfi_adjust_cfa_offset -48\n\t"
406
407
408 "blr x3\n\t" // Call the stub
409
410 // Test d8 - d15. We can use x1 and x2.
411 "movk x1, #0xfad0\n\t"
412 "movk x1, #0xebad, lsl #16\n\t"
413 "movk x1, #0xfad0, lsl #32\n\t"
414 "movk x1, #0xebad, lsl #48\n\t"
415 "fmov x2, d8\n\t"
416 "cmp x1, x2\n\t"
417 "b.ne 1f\n\t"
418 "add x1, x1, 1\n\t"
419
420 "fmov x2, d9\n\t"
421 "cmp x1, x2\n\t"
422 "b.ne 1f\n\t"
423 "add x1, x1, 1\n\t"
424
425 "fmov x2, d10\n\t"
426 "cmp x1, x2\n\t"
427 "b.ne 1f\n\t"
428 "add x1, x1, 1\n\t"
429
430 "fmov x2, d11\n\t"
431 "cmp x1, x2\n\t"
432 "b.ne 1f\n\t"
433 "add x1, x1, 1\n\t"
434
435 "fmov x2, d12\n\t"
436 "cmp x1, x2\n\t"
437 "b.ne 1f\n\t"
438 "add x1, x1, 1\n\t"
439
440 "fmov x2, d13\n\t"
441 "cmp x1, x2\n\t"
442 "b.ne 1f\n\t"
443 "add x1, x1, 1\n\t"
444
445 "fmov x2, d14\n\t"
446 "cmp x1, x2\n\t"
447 "b.ne 1f\n\t"
448 "add x1, x1, 1\n\t"
449
450 "fmov x2, d15\n\t"
451 "cmp x1, x2\n\t"
452 "b.ne 1f\n\t"
453
454 "mov %[fpr_result], #0\n\t"
455
456 // Finish up.
457 "2:\n\t"
458 "ldp x1, x2, [sp, #8]\n\t" // Restore x1, x2
459 "ldp x3, x18, [sp, #24]\n\t" // Restore x3, xSELF
460 "ldr x30, [sp, #40]\n\t" // Restore xLR
461 "add sp, sp, #48\n\t" // Free stack space
462 ".cfi_adjust_cfa_offset -48\n\t"
463 "mov %[result], x0\n\t" // Save the result
464
465 "ldp d8, d9, [sp]\n\t" // Restore d8 - d15
466 "ldp d10, d11, [sp, #16]\n\t"
467 "ldp d12, d13, [sp, #32]\n\t"
468 "ldp d14, d15, [sp, #48]\n\t"
469 "add sp, sp, #64\n\t"
470 ".cfi_adjust_cfa_offset -64\n\t"
471
472 "b 3f\n\t" // Goto end
473
474 // Failed fpr verification.
475 "1:\n\t"
476 "mov %[fpr_result], #1\n\t"
477 "b 2b\n\t" // Goto finish-up
478
479 // End
480 "3:\n\t"
481 : [result] "=r" (result), [fpr_result] "=r" (fpr_result)
482 // Use the result from r0
483 : [arg0] "0"(arg0), [arg1] "r"(arg1), [arg2] "r"(arg2), [code] "r"(code), [self] "r"(self),
484 [referrer] "r"(referrer), [hidden] "r"(hidden)
485 : "x4", "x5", "x6", "x7", "x8", "x9", "x10", "x11", "x12", "x13", "x14", "x15", "x16", "x17"); // clobber.
486#elif defined(__x86_64__)
487 // Note: Uses the native convention
488 // TODO: Set the thread?
489 __asm__ __volatile__(
490 "movq %[hidden], %%r9\n\t" // No need to save r9, listed as clobbered
491 "movd %%r9, %%xmm0\n\t"
492 "pushq %[referrer]\n\t" // Push referrer
493 "pushq (%%rsp)\n\t" // & 16B alignment padding
494 ".cfi_adjust_cfa_offset 16\n\t"
495 "call *%%rax\n\t" // Call the stub
496 "addq $16, %%rsp\n\t" // Pop nullptr and padding
497 ".cfi_adjust_cfa_offset -16\n\t"
498 : "=a" (result)
499 // Use the result from rax
500 : "D"(arg0), "S"(arg1), "d"(arg2), "a"(code), [referrer] "m"(referrer), [hidden] "m"(hidden)
501 // This places arg0 into rdi, arg1 into rsi, arg2 into rdx, and code into rax
502 : "rbx", "rcx", "rbp", "r8", "r9", "r10", "r11", "r12", "r13", "r14", "r15"); // clobber all
503 // TODO: Should we clobber the other registers?
504#else
505 LOG(WARNING) << "Was asked to invoke for an architecture I do not understand.";
506 result = 0;
507#endif
508 // Pop transition.
509 self->PopManagedStackFragment(fragment);
510
511 fp_result = fpr_result;
512 EXPECT_EQ(0U, fp_result);
513
514 return result;
515 }
516
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700517 // Method with 32b arg0, 64b arg1
518 size_t Invoke3UWithReferrer(size_t arg0, uint64_t arg1, uintptr_t code, Thread* self,
519 mirror::ArtMethod* referrer) {
520#if defined(__x86_64__) || defined(__aarch64__)
521 // Just pass through.
522 return Invoke3WithReferrer(arg0, arg1, 0U, code, self, referrer);
523#else
524 // Need to split up arguments.
525 uint32_t lower = static_cast<uint32_t>(arg1 & 0xFFFFFFFF);
526 uint32_t upper = static_cast<uint32_t>((arg1 >> 32) & 0xFFFFFFFF);
527
528 return Invoke3WithReferrer(arg0, lower, upper, code, self, referrer);
529#endif
530 }
531
532 // Method with 32b arg0, 32b arg1, 64b arg2
533 size_t Invoke3UUWithReferrer(uint32_t arg0, uint32_t arg1, uint64_t arg2, uintptr_t code,
534 Thread* self, mirror::ArtMethod* referrer) {
535#if defined(__x86_64__) || defined(__aarch64__)
536 // Just pass through.
537 return Invoke3WithReferrer(arg0, arg1, arg2, code, self, referrer);
538#else
539 // TODO: Needs 4-param invoke.
540 return 0;
541#endif
542 }
Andreas Gampe6cf80102014-05-19 11:32:41 -0700543
544 protected:
545 size_t fp_result;
Andreas Gampe525cde22014-04-22 15:44:50 -0700546};
547
548
549#if defined(__i386__) || defined(__x86_64__)
550extern "C" void art_quick_memcpy(void);
551#endif
552
553TEST_F(StubTest, Memcpy) {
554#if defined(__i386__) || defined(__x86_64__)
555 Thread* self = Thread::Current();
556
557 uint32_t orig[20];
558 uint32_t trg[20];
559 for (size_t i = 0; i < 20; ++i) {
560 orig[i] = i;
561 trg[i] = 0;
562 }
563
564 Invoke3(reinterpret_cast<size_t>(&trg[4]), reinterpret_cast<size_t>(&orig[4]),
565 10 * sizeof(uint32_t), reinterpret_cast<uintptr_t>(&art_quick_memcpy), self);
566
567 EXPECT_EQ(orig[0], trg[0]);
568
569 for (size_t i = 1; i < 4; ++i) {
570 EXPECT_NE(orig[i], trg[i]);
571 }
572
573 for (size_t i = 4; i < 14; ++i) {
574 EXPECT_EQ(orig[i], trg[i]);
575 }
576
577 for (size_t i = 14; i < 20; ++i) {
578 EXPECT_NE(orig[i], trg[i]);
579 }
580
581 // TODO: Test overlapping?
582
583#else
584 LOG(INFO) << "Skipping memcpy as I don't know how to do that on " << kRuntimeISA;
585 // Force-print to std::cout so it's also outside the logcat.
586 std::cout << "Skipping memcpy as I don't know how to do that on " << kRuntimeISA << std::endl;
587#endif
588}
589
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700590#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__x86_64__)
Andreas Gampe525cde22014-04-22 15:44:50 -0700591extern "C" void art_quick_lock_object(void);
592#endif
593
594TEST_F(StubTest, LockObject) {
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700595#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__x86_64__)
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -0700596 static constexpr size_t kThinLockLoops = 100;
597
Andreas Gampe525cde22014-04-22 15:44:50 -0700598 Thread* self = Thread::Current();
599 // Create an object
600 ScopedObjectAccess soa(self);
601 // garbage is created during ClassLinker::Init
602
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700603 StackHandleScope<2> hs(soa.Self());
604 Handle<mirror::String> obj(
605 hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), "hello, world!")));
Andreas Gampe525cde22014-04-22 15:44:50 -0700606 LockWord lock = obj->GetLockWord(false);
607 LockWord::LockState old_state = lock.GetState();
608 EXPECT_EQ(LockWord::LockState::kUnlocked, old_state);
609
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700610 Invoke3(reinterpret_cast<size_t>(obj.Get()), 0U, 0U,
Andreas Gampe525cde22014-04-22 15:44:50 -0700611 reinterpret_cast<uintptr_t>(&art_quick_lock_object), self);
612
613 LockWord lock_after = obj->GetLockWord(false);
614 LockWord::LockState new_state = lock_after.GetState();
615 EXPECT_EQ(LockWord::LockState::kThinLocked, new_state);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700616 EXPECT_EQ(lock_after.ThinLockCount(), 0U); // Thin lock starts count at zero
617
618 for (size_t i = 1; i < kThinLockLoops; ++i) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700619 Invoke3(reinterpret_cast<size_t>(obj.Get()), 0U, 0U,
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700620 reinterpret_cast<uintptr_t>(&art_quick_lock_object), self);
621
622 // Check we're at lock count i
623
624 LockWord l_inc = obj->GetLockWord(false);
625 LockWord::LockState l_inc_state = l_inc.GetState();
626 EXPECT_EQ(LockWord::LockState::kThinLocked, l_inc_state);
627 EXPECT_EQ(l_inc.ThinLockCount(), i);
628 }
629
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700630 // Force a fat lock by running identity hashcode to fill up lock word.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700631 Handle<mirror::String> obj2(hs.NewHandle(
632 mirror::String::AllocFromModifiedUtf8(soa.Self(), "hello, world!")));
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700633
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700634 obj2->IdentityHashCode();
635
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700636 Invoke3(reinterpret_cast<size_t>(obj2.Get()), 0U, 0U,
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700637 reinterpret_cast<uintptr_t>(&art_quick_lock_object), self);
638
639 LockWord lock_after2 = obj2->GetLockWord(false);
640 LockWord::LockState new_state2 = lock_after2.GetState();
641 EXPECT_EQ(LockWord::LockState::kFatLocked, new_state2);
642 EXPECT_NE(lock_after2.FatLockMonitor(), static_cast<Monitor*>(nullptr));
643
644 // Test done.
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700645#else
646 LOG(INFO) << "Skipping lock_object as I don't know how to do that on " << kRuntimeISA;
647 // Force-print to std::cout so it's also outside the logcat.
648 std::cout << "Skipping lock_object as I don't know how to do that on " << kRuntimeISA << std::endl;
649#endif
650}
651
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700652
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700653class RandGen {
654 public:
655 explicit RandGen(uint32_t seed) : val_(seed) {}
656
657 uint32_t next() {
658 val_ = val_ * 48271 % 2147483647 + 13;
659 return val_;
660 }
661
662 uint32_t val_;
663};
664
665
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700666#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__x86_64__)
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700667extern "C" void art_quick_lock_object(void);
668extern "C" void art_quick_unlock_object(void);
669#endif
670
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700671// NO_THREAD_SAFETY_ANALYSIS as we do not want to grab exclusive mutator lock for MonitorInfo.
672static void TestUnlockObject(StubTest* test) NO_THREAD_SAFETY_ANALYSIS {
673#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__x86_64__)
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -0700674 static constexpr size_t kThinLockLoops = 100;
675
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700676 Thread* self = Thread::Current();
677 // Create an object
678 ScopedObjectAccess soa(self);
679 // garbage is created during ClassLinker::Init
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700680 static constexpr size_t kNumberOfLocks = 10; // Number of objects = lock
681 StackHandleScope<kNumberOfLocks + 1> hs(self);
682 Handle<mirror::String> obj(
683 hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), "hello, world!")));
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700684 LockWord lock = obj->GetLockWord(false);
685 LockWord::LockState old_state = lock.GetState();
686 EXPECT_EQ(LockWord::LockState::kUnlocked, old_state);
687
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700688 test->Invoke3(reinterpret_cast<size_t>(obj.Get()), 0U, 0U,
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700689 reinterpret_cast<uintptr_t>(&art_quick_unlock_object), self);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700690 // This should be an illegal monitor state.
691 EXPECT_TRUE(self->IsExceptionPending());
692 self->ClearException();
693
694 LockWord lock_after = obj->GetLockWord(false);
695 LockWord::LockState new_state = lock_after.GetState();
696 EXPECT_EQ(LockWord::LockState::kUnlocked, new_state);
Andreas Gampe525cde22014-04-22 15:44:50 -0700697
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700698 test->Invoke3(reinterpret_cast<size_t>(obj.Get()), 0U, 0U,
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700699 reinterpret_cast<uintptr_t>(&art_quick_lock_object), self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700700
701 LockWord lock_after2 = obj->GetLockWord(false);
702 LockWord::LockState new_state2 = lock_after2.GetState();
703 EXPECT_EQ(LockWord::LockState::kThinLocked, new_state2);
704
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700705 test->Invoke3(reinterpret_cast<size_t>(obj.Get()), 0U, 0U,
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700706 reinterpret_cast<uintptr_t>(&art_quick_unlock_object), self);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700707
708 LockWord lock_after3 = obj->GetLockWord(false);
709 LockWord::LockState new_state3 = lock_after3.GetState();
710 EXPECT_EQ(LockWord::LockState::kUnlocked, new_state3);
711
712 // Stress test:
713 // Keep a number of objects and their locks in flight. Randomly lock or unlock one of them in
714 // each step.
715
716 RandGen r(0x1234);
717
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700718 constexpr size_t kIterations = 10000; // Number of iterations
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700719 constexpr size_t kMoveToFat = 1000; // Chance of 1:kMoveFat to make a lock fat.
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700720
721 size_t counts[kNumberOfLocks];
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700722 bool fat[kNumberOfLocks]; // Whether a lock should be thin or fat.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700723 Handle<mirror::String> objects[kNumberOfLocks];
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700724
725 // Initialize = allocate.
726 for (size_t i = 0; i < kNumberOfLocks; ++i) {
727 counts[i] = 0;
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700728 fat[i] = false;
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700729 objects[i] = hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), ""));
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700730 }
731
732 for (size_t i = 0; i < kIterations; ++i) {
733 // Select which lock to update.
734 size_t index = r.next() % kNumberOfLocks;
735
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700736 // Make lock fat?
737 if (!fat[index] && (r.next() % kMoveToFat == 0)) {
738 fat[index] = true;
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700739 objects[index]->IdentityHashCode();
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700740
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700741 LockWord lock_iter = objects[index]->GetLockWord(false);
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700742 LockWord::LockState iter_state = lock_iter.GetState();
743 if (counts[index] == 0) {
744 EXPECT_EQ(LockWord::LockState::kHashCode, iter_state);
745 } else {
746 EXPECT_EQ(LockWord::LockState::kFatLocked, iter_state);
747 }
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700748 } else {
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700749 bool lock; // Whether to lock or unlock in this step.
750 if (counts[index] == 0) {
751 lock = true;
752 } else if (counts[index] == kThinLockLoops) {
753 lock = false;
754 } else {
755 // Randomly.
756 lock = r.next() % 2 == 0;
757 }
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700758
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700759 if (lock) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700760 test->Invoke3(reinterpret_cast<size_t>(objects[index].Get()), 0U, 0U,
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700761 reinterpret_cast<uintptr_t>(&art_quick_lock_object), self);
762 counts[index]++;
763 } else {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700764 test->Invoke3(reinterpret_cast<size_t>(objects[index].Get()), 0U, 0U,
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700765 reinterpret_cast<uintptr_t>(&art_quick_unlock_object), self);
766 counts[index]--;
767 }
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700768
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700769 EXPECT_FALSE(self->IsExceptionPending());
770
771 // Check the new state.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700772 LockWord lock_iter = objects[index]->GetLockWord(true);
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700773 LockWord::LockState iter_state = lock_iter.GetState();
774 if (fat[index]) {
775 // Abuse MonitorInfo.
776 EXPECT_EQ(LockWord::LockState::kFatLocked, iter_state) << index;
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700777 MonitorInfo info(objects[index].Get());
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700778 EXPECT_EQ(counts[index], info.entry_count_) << index;
779 } else {
780 if (counts[index] > 0) {
781 EXPECT_EQ(LockWord::LockState::kThinLocked, iter_state);
782 EXPECT_EQ(counts[index] - 1, lock_iter.ThinLockCount());
783 } else {
784 EXPECT_EQ(LockWord::LockState::kUnlocked, iter_state);
785 }
786 }
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700787 }
788 }
789
790 // Unlock the remaining count times and then check it's unlocked. Then deallocate.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700791 // Go reverse order to correctly handle Handles.
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700792 for (size_t i = 0; i < kNumberOfLocks; ++i) {
793 size_t index = kNumberOfLocks - 1 - i;
794 size_t count = counts[index];
795 while (count > 0) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700796 test->Invoke3(reinterpret_cast<size_t>(objects[index].Get()), 0U, 0U,
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700797 reinterpret_cast<uintptr_t>(&art_quick_unlock_object), self);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700798 count--;
799 }
800
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700801 LockWord lock_after4 = objects[index]->GetLockWord(false);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700802 LockWord::LockState new_state4 = lock_after4.GetState();
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700803 EXPECT_TRUE(LockWord::LockState::kUnlocked == new_state4
804 || LockWord::LockState::kFatLocked == new_state4);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700805 }
806
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700807 // Test done.
Andreas Gampe525cde22014-04-22 15:44:50 -0700808#else
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700809 LOG(INFO) << "Skipping unlock_object as I don't know how to do that on " << kRuntimeISA;
Andreas Gampe525cde22014-04-22 15:44:50 -0700810 // Force-print to std::cout so it's also outside the logcat.
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700811 std::cout << "Skipping unlock_object as I don't know how to do that on " << kRuntimeISA << std::endl;
Andreas Gampe525cde22014-04-22 15:44:50 -0700812#endif
813}
814
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700815TEST_F(StubTest, UnlockObject) {
816 TestUnlockObject(this);
817}
Andreas Gampe525cde22014-04-22 15:44:50 -0700818
819#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__x86_64__)
820extern "C" void art_quick_check_cast(void);
821#endif
822
823TEST_F(StubTest, CheckCast) {
824#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__x86_64__)
825 Thread* self = Thread::Current();
826 // Find some classes.
827 ScopedObjectAccess soa(self);
828 // garbage is created during ClassLinker::Init
829
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700830 StackHandleScope<2> hs(soa.Self());
831 Handle<mirror::Class> c(
832 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "[Ljava/lang/Object;")));
833 Handle<mirror::Class> c2(
834 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "[Ljava/lang/String;")));
Andreas Gampe525cde22014-04-22 15:44:50 -0700835
836 EXPECT_FALSE(self->IsExceptionPending());
837
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700838 Invoke3(reinterpret_cast<size_t>(c.Get()), reinterpret_cast<size_t>(c.Get()), 0U,
Andreas Gampe525cde22014-04-22 15:44:50 -0700839 reinterpret_cast<uintptr_t>(&art_quick_check_cast), self);
840
841 EXPECT_FALSE(self->IsExceptionPending());
842
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700843 Invoke3(reinterpret_cast<size_t>(c2.Get()), reinterpret_cast<size_t>(c2.Get()), 0U,
Andreas Gampe525cde22014-04-22 15:44:50 -0700844 reinterpret_cast<uintptr_t>(&art_quick_check_cast), self);
845
846 EXPECT_FALSE(self->IsExceptionPending());
847
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700848 Invoke3(reinterpret_cast<size_t>(c.Get()), reinterpret_cast<size_t>(c2.Get()), 0U,
Andreas Gampe525cde22014-04-22 15:44:50 -0700849 reinterpret_cast<uintptr_t>(&art_quick_check_cast), self);
850
851 EXPECT_FALSE(self->IsExceptionPending());
852
853 // TODO: Make the following work. But that would require correct managed frames.
854
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700855 Invoke3(reinterpret_cast<size_t>(c2.Get()), reinterpret_cast<size_t>(c.Get()), 0U,
Andreas Gampe525cde22014-04-22 15:44:50 -0700856 reinterpret_cast<uintptr_t>(&art_quick_check_cast), self);
857
858 EXPECT_TRUE(self->IsExceptionPending());
859 self->ClearException();
860
861#else
862 LOG(INFO) << "Skipping check_cast as I don't know how to do that on " << kRuntimeISA;
863 // Force-print to std::cout so it's also outside the logcat.
864 std::cout << "Skipping check_cast as I don't know how to do that on " << kRuntimeISA << std::endl;
865#endif
866}
867
868
Andreas Gampef4e910b2014-04-29 16:55:52 -0700869#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__x86_64__)
Andreas Gampe525cde22014-04-22 15:44:50 -0700870extern "C" void art_quick_aput_obj_with_null_and_bound_check(void);
871// Do not check non-checked ones, we'd need handlers and stuff...
872#endif
873
874TEST_F(StubTest, APutObj) {
Hiroshi Yamauchid6881ae2014-04-28 17:21:48 -0700875 TEST_DISABLED_FOR_HEAP_REFERENCE_POISONING();
876
Andreas Gampef4e910b2014-04-29 16:55:52 -0700877#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__x86_64__)
Andreas Gampe525cde22014-04-22 15:44:50 -0700878 Thread* self = Thread::Current();
879 // Create an object
880 ScopedObjectAccess soa(self);
881 // garbage is created during ClassLinker::Init
882
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700883 StackHandleScope<5> hs(soa.Self());
884 Handle<mirror::Class> c(
885 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "Ljava/lang/Object;")));
886 Handle<mirror::Class> ca(
887 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "[Ljava/lang/String;")));
Andreas Gampe525cde22014-04-22 15:44:50 -0700888
889 // Build a string array of size 1
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700890 Handle<mirror::ObjectArray<mirror::Object>> array(
891 hs.NewHandle(mirror::ObjectArray<mirror::Object>::Alloc(soa.Self(), ca.Get(), 10)));
Andreas Gampe525cde22014-04-22 15:44:50 -0700892
893 // Build a string -> should be assignable
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700894 Handle<mirror::String> str_obj(
895 hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), "hello, world!")));
Andreas Gampe525cde22014-04-22 15:44:50 -0700896
897 // Build a generic object -> should fail assigning
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700898 Handle<mirror::Object> obj_obj(hs.NewHandle(c->AllocObject(soa.Self())));
Andreas Gampe525cde22014-04-22 15:44:50 -0700899
900 // Play with it...
901
902 // 1) Success cases
Andreas Gampef4e910b2014-04-29 16:55:52 -0700903 // 1.1) Assign str_obj to array[0..3]
Andreas Gampe525cde22014-04-22 15:44:50 -0700904
905 EXPECT_FALSE(self->IsExceptionPending());
906
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700907 Invoke3(reinterpret_cast<size_t>(array.Get()), 0U, reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampe525cde22014-04-22 15:44:50 -0700908 reinterpret_cast<uintptr_t>(&art_quick_aput_obj_with_null_and_bound_check), self);
909
910 EXPECT_FALSE(self->IsExceptionPending());
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700911 EXPECT_EQ(str_obj.Get(), array->Get(0));
Andreas Gampe525cde22014-04-22 15:44:50 -0700912
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700913 Invoke3(reinterpret_cast<size_t>(array.Get()), 1U, reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampef4e910b2014-04-29 16:55:52 -0700914 reinterpret_cast<uintptr_t>(&art_quick_aput_obj_with_null_and_bound_check), self);
915
916 EXPECT_FALSE(self->IsExceptionPending());
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700917 EXPECT_EQ(str_obj.Get(), array->Get(1));
Andreas Gampef4e910b2014-04-29 16:55:52 -0700918
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700919 Invoke3(reinterpret_cast<size_t>(array.Get()), 2U, reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampef4e910b2014-04-29 16:55:52 -0700920 reinterpret_cast<uintptr_t>(&art_quick_aput_obj_with_null_and_bound_check), self);
921
922 EXPECT_FALSE(self->IsExceptionPending());
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700923 EXPECT_EQ(str_obj.Get(), array->Get(2));
Andreas Gampef4e910b2014-04-29 16:55:52 -0700924
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700925 Invoke3(reinterpret_cast<size_t>(array.Get()), 3U, reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampef4e910b2014-04-29 16:55:52 -0700926 reinterpret_cast<uintptr_t>(&art_quick_aput_obj_with_null_and_bound_check), self);
927
928 EXPECT_FALSE(self->IsExceptionPending());
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700929 EXPECT_EQ(str_obj.Get(), array->Get(3));
Andreas Gampef4e910b2014-04-29 16:55:52 -0700930
931 // 1.2) Assign null to array[0..3]
Andreas Gampe525cde22014-04-22 15:44:50 -0700932
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700933 Invoke3(reinterpret_cast<size_t>(array.Get()), 0U, reinterpret_cast<size_t>(nullptr),
Andreas Gampe525cde22014-04-22 15:44:50 -0700934 reinterpret_cast<uintptr_t>(&art_quick_aput_obj_with_null_and_bound_check), self);
935
936 EXPECT_FALSE(self->IsExceptionPending());
Andreas Gampef4e910b2014-04-29 16:55:52 -0700937 EXPECT_EQ(nullptr, array->Get(0));
938
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700939 Invoke3(reinterpret_cast<size_t>(array.Get()), 1U, reinterpret_cast<size_t>(nullptr),
Andreas Gampef4e910b2014-04-29 16:55:52 -0700940 reinterpret_cast<uintptr_t>(&art_quick_aput_obj_with_null_and_bound_check), self);
941
942 EXPECT_FALSE(self->IsExceptionPending());
943 EXPECT_EQ(nullptr, array->Get(1));
944
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700945 Invoke3(reinterpret_cast<size_t>(array.Get()), 2U, reinterpret_cast<size_t>(nullptr),
Andreas Gampef4e910b2014-04-29 16:55:52 -0700946 reinterpret_cast<uintptr_t>(&art_quick_aput_obj_with_null_and_bound_check), self);
947
948 EXPECT_FALSE(self->IsExceptionPending());
949 EXPECT_EQ(nullptr, array->Get(2));
950
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700951 Invoke3(reinterpret_cast<size_t>(array.Get()), 3U, reinterpret_cast<size_t>(nullptr),
Andreas Gampef4e910b2014-04-29 16:55:52 -0700952 reinterpret_cast<uintptr_t>(&art_quick_aput_obj_with_null_and_bound_check), self);
953
954 EXPECT_FALSE(self->IsExceptionPending());
955 EXPECT_EQ(nullptr, array->Get(3));
Andreas Gampe525cde22014-04-22 15:44:50 -0700956
957 // TODO: Check _which_ exception is thrown. Then make 3) check that it's the right check order.
958
959 // 2) Failure cases (str into str[])
960 // 2.1) Array = null
961 // TODO: Throwing NPE needs actual DEX code
962
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700963// Invoke3(reinterpret_cast<size_t>(nullptr), 0U, reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampe525cde22014-04-22 15:44:50 -0700964// reinterpret_cast<uintptr_t>(&art_quick_aput_obj_with_null_and_bound_check), self);
965//
966// EXPECT_TRUE(self->IsExceptionPending());
967// self->ClearException();
968
969 // 2.2) Index < 0
970
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700971 Invoke3(reinterpret_cast<size_t>(array.Get()), static_cast<size_t>(-1),
972 reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampe525cde22014-04-22 15:44:50 -0700973 reinterpret_cast<uintptr_t>(&art_quick_aput_obj_with_null_and_bound_check), self);
974
975 EXPECT_TRUE(self->IsExceptionPending());
976 self->ClearException();
977
978 // 2.3) Index > 0
979
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700980 Invoke3(reinterpret_cast<size_t>(array.Get()), 10U, reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampe525cde22014-04-22 15:44:50 -0700981 reinterpret_cast<uintptr_t>(&art_quick_aput_obj_with_null_and_bound_check), self);
982
983 EXPECT_TRUE(self->IsExceptionPending());
984 self->ClearException();
985
986 // 3) Failure cases (obj into str[])
987
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700988 Invoke3(reinterpret_cast<size_t>(array.Get()), 0U, reinterpret_cast<size_t>(obj_obj.Get()),
Andreas Gampe525cde22014-04-22 15:44:50 -0700989 reinterpret_cast<uintptr_t>(&art_quick_aput_obj_with_null_and_bound_check), self);
990
991 EXPECT_TRUE(self->IsExceptionPending());
992 self->ClearException();
993
994 // Tests done.
995#else
996 LOG(INFO) << "Skipping aput_obj as I don't know how to do that on " << kRuntimeISA;
997 // Force-print to std::cout so it's also outside the logcat.
998 std::cout << "Skipping aput_obj as I don't know how to do that on " << kRuntimeISA << std::endl;
999#endif
1000}
1001
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001002TEST_F(StubTest, AllocObject) {
1003 TEST_DISABLED_FOR_HEAP_REFERENCE_POISONING();
1004
1005#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__x86_64__)
1006 // TODO: Check the "Unresolved" allocation stubs
1007
1008 Thread* self = Thread::Current();
1009 // Create an object
1010 ScopedObjectAccess soa(self);
1011 // garbage is created during ClassLinker::Init
1012
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001013 StackHandleScope<2> hs(soa.Self());
1014 Handle<mirror::Class> c(
1015 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "Ljava/lang/Object;")));
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001016
1017 // Play with it...
1018
1019 EXPECT_FALSE(self->IsExceptionPending());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001020 {
1021 // Use an arbitrary method from c to use as referrer
1022 size_t result = Invoke3(static_cast<size_t>(c->GetDexTypeIndex()), // type_idx
1023 reinterpret_cast<size_t>(c->GetVirtualMethod(0)), // arbitrary
1024 0U,
Mathieu Chartier119c6bd2014-05-09 14:11:47 -07001025 reinterpret_cast<uintptr_t>(GetTlsPtr(self)->quick_entrypoints.pAllocObject),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001026 self);
1027
1028 EXPECT_FALSE(self->IsExceptionPending());
1029 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
1030 mirror::Object* obj = reinterpret_cast<mirror::Object*>(result);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001031 EXPECT_EQ(c.Get(), obj->GetClass());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001032 VerifyObject(obj);
1033 }
1034
1035 {
1036 // We can use nullptr in the second argument as we do not need a method here (not used in
1037 // resolved/initialized cases)
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001038 size_t result = Invoke3(reinterpret_cast<size_t>(c.Get()), reinterpret_cast<size_t>(nullptr), 0U,
Mathieu Chartier119c6bd2014-05-09 14:11:47 -07001039 reinterpret_cast<uintptr_t>(GetTlsPtr(self)->quick_entrypoints.pAllocObjectResolved),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001040 self);
1041
1042 EXPECT_FALSE(self->IsExceptionPending());
1043 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
1044 mirror::Object* obj = reinterpret_cast<mirror::Object*>(result);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001045 EXPECT_EQ(c.Get(), obj->GetClass());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001046 VerifyObject(obj);
1047 }
1048
1049 {
1050 // We can use nullptr in the second argument as we do not need a method here (not used in
1051 // resolved/initialized cases)
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001052 size_t result = Invoke3(reinterpret_cast<size_t>(c.Get()), reinterpret_cast<size_t>(nullptr), 0U,
Mathieu Chartier119c6bd2014-05-09 14:11:47 -07001053 reinterpret_cast<uintptr_t>(GetTlsPtr(self)->quick_entrypoints.pAllocObjectInitialized),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001054 self);
1055
1056 EXPECT_FALSE(self->IsExceptionPending());
1057 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
1058 mirror::Object* obj = reinterpret_cast<mirror::Object*>(result);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001059 EXPECT_EQ(c.Get(), obj->GetClass());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001060 VerifyObject(obj);
1061 }
1062
1063 // Failure tests.
1064
1065 // Out-of-memory.
1066 {
1067 Runtime::Current()->GetHeap()->SetIdealFootprint(1 * GB);
1068
1069 // Array helps to fill memory faster.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001070 Handle<mirror::Class> ca(
1071 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "[Ljava/lang/Object;")));
1072
1073 // Use arbitrary large amount for now.
1074 static const size_t kMaxHandles = 1000000;
Ian Rogers700a4022014-05-19 16:49:03 -07001075 std::unique_ptr<StackHandleScope<kMaxHandles>> hsp(new StackHandleScope<kMaxHandles>(self));
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001076
1077 std::vector<Handle<mirror::Object>> handles;
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001078 // Start allocating with 128K
1079 size_t length = 128 * KB / 4;
1080 while (length > 10) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001081 Handle<mirror::Object> h(hsp->NewHandle<mirror::Object>(
1082 mirror::ObjectArray<mirror::Object>::Alloc(soa.Self(), ca.Get(), length / 4)));
1083 if (self->IsExceptionPending() || h.Get() == nullptr) {
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001084 self->ClearException();
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001085
1086 // Try a smaller length
1087 length = length / 8;
1088 // Use at most half the reported free space.
1089 size_t mem = Runtime::Current()->GetHeap()->GetFreeMemory();
1090 if (length * 8 > mem) {
1091 length = mem / 8;
1092 }
1093 } else {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001094 handles.push_back(h);
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001095 }
1096 }
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001097 LOG(INFO) << "Used " << handles.size() << " arrays to fill space.";
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001098
1099 // Allocate simple objects till it fails.
1100 while (!self->IsExceptionPending()) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001101 Handle<mirror::Object> h = hsp->NewHandle(c->AllocObject(soa.Self()));
1102 if (!self->IsExceptionPending() && h.Get() != nullptr) {
1103 handles.push_back(h);
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001104 }
1105 }
1106 self->ClearException();
1107
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001108 size_t result = Invoke3(reinterpret_cast<size_t>(c.Get()), reinterpret_cast<size_t>(nullptr), 0U,
Mathieu Chartier119c6bd2014-05-09 14:11:47 -07001109 reinterpret_cast<uintptr_t>(GetTlsPtr(self)->quick_entrypoints.pAllocObjectInitialized),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001110 self);
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001111 EXPECT_TRUE(self->IsExceptionPending());
1112 self->ClearException();
1113 EXPECT_EQ(reinterpret_cast<size_t>(nullptr), result);
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001114 }
1115
1116 // Tests done.
1117#else
1118 LOG(INFO) << "Skipping alloc_object as I don't know how to do that on " << kRuntimeISA;
1119 // Force-print to std::cout so it's also outside the logcat.
1120 std::cout << "Skipping alloc_object as I don't know how to do that on " << kRuntimeISA << std::endl;
1121#endif
1122}
1123
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001124TEST_F(StubTest, AllocObjectArray) {
1125 TEST_DISABLED_FOR_HEAP_REFERENCE_POISONING();
1126
1127#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__x86_64__)
1128 // TODO: Check the "Unresolved" allocation stubs
1129
1130 Thread* self = Thread::Current();
1131 // Create an object
1132 ScopedObjectAccess soa(self);
1133 // garbage is created during ClassLinker::Init
1134
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001135 StackHandleScope<2> hs(self);
1136 Handle<mirror::Class> c(
1137 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "[Ljava/lang/Object;")));
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001138
1139 // Needed to have a linked method.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001140 Handle<mirror::Class> c_obj(
1141 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "Ljava/lang/Object;")));
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001142
1143 // Play with it...
1144
1145 EXPECT_FALSE(self->IsExceptionPending());
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001146
1147 // For some reason this does not work, as the type_idx is artificial and outside what the
1148 // resolved types of c_obj allow...
1149
1150 if (false) {
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001151 // Use an arbitrary method from c to use as referrer
1152 size_t result = Invoke3(static_cast<size_t>(c->GetDexTypeIndex()), // type_idx
1153 reinterpret_cast<size_t>(c_obj->GetVirtualMethod(0)), // arbitrary
1154 10U,
Mathieu Chartier119c6bd2014-05-09 14:11:47 -07001155 reinterpret_cast<uintptr_t>(GetTlsPtr(self)->quick_entrypoints.pAllocArray),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001156 self);
1157
1158 EXPECT_FALSE(self->IsExceptionPending());
1159 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
1160 mirror::Array* obj = reinterpret_cast<mirror::Array*>(result);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001161 EXPECT_EQ(c.Get(), obj->GetClass());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001162 VerifyObject(obj);
1163 EXPECT_EQ(obj->GetLength(), 10);
1164 }
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001165
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001166 {
1167 // We can use nullptr in the second argument as we do not need a method here (not used in
1168 // resolved/initialized cases)
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001169 size_t result = Invoke3(reinterpret_cast<size_t>(c.Get()), reinterpret_cast<size_t>(nullptr), 10U,
Mathieu Chartier119c6bd2014-05-09 14:11:47 -07001170 reinterpret_cast<uintptr_t>(GetTlsPtr(self)->quick_entrypoints.pAllocArrayResolved),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001171 self);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001172 EXPECT_FALSE(self->IsExceptionPending()) << PrettyTypeOf(self->GetException(nullptr));
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001173 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
1174 mirror::Object* obj = reinterpret_cast<mirror::Object*>(result);
1175 EXPECT_TRUE(obj->IsArrayInstance());
1176 EXPECT_TRUE(obj->IsObjectArray());
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001177 EXPECT_EQ(c.Get(), obj->GetClass());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001178 VerifyObject(obj);
1179 mirror::Array* array = reinterpret_cast<mirror::Array*>(result);
1180 EXPECT_EQ(array->GetLength(), 10);
1181 }
1182
1183 // Failure tests.
1184
1185 // Out-of-memory.
1186 {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001187 size_t result = Invoke3(reinterpret_cast<size_t>(c.Get()), reinterpret_cast<size_t>(nullptr),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001188 GB, // that should fail...
Mathieu Chartier119c6bd2014-05-09 14:11:47 -07001189 reinterpret_cast<uintptr_t>(GetTlsPtr(self)->quick_entrypoints.pAllocArrayResolved),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001190 self);
1191
1192 EXPECT_TRUE(self->IsExceptionPending());
1193 self->ClearException();
1194 EXPECT_EQ(reinterpret_cast<size_t>(nullptr), result);
1195 }
1196
1197 // Tests done.
1198#else
1199 LOG(INFO) << "Skipping alloc_array as I don't know how to do that on " << kRuntimeISA;
1200 // Force-print to std::cout so it's also outside the logcat.
1201 std::cout << "Skipping alloc_array as I don't know how to do that on " << kRuntimeISA << std::endl;
1202#endif
1203}
1204
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001205
Andreas Gampe266340d2014-05-02 07:55:24 -07001206#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__x86_64__)
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001207extern "C" void art_quick_string_compareto(void);
1208#endif
1209
1210TEST_F(StubTest, StringCompareTo) {
1211 TEST_DISABLED_FOR_HEAP_REFERENCE_POISONING();
1212
Andreas Gampe266340d2014-05-02 07:55:24 -07001213#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__x86_64__)
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001214 // TODO: Check the "Unresolved" allocation stubs
1215
1216 Thread* self = Thread::Current();
1217 ScopedObjectAccess soa(self);
1218 // garbage is created during ClassLinker::Init
1219
1220 // Create some strings
1221 // Use array so we can index into it and use a matrix for expected results
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -07001222 // Setup: The first half is standard. The second half uses a non-zero offset.
1223 // TODO: Shared backing arrays.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001224 static constexpr size_t kBaseStringCount = 7;
1225 const char* c[kBaseStringCount] = { "", "", "a", "aa", "ab", "aac", "aac" , };
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -07001226
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001227 static constexpr size_t kStringCount = 2 * kBaseStringCount;
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001228
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001229 StackHandleScope<kStringCount> hs(self);
1230 Handle<mirror::String> s[kStringCount];
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001231
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001232 for (size_t i = 0; i < kBaseStringCount; ++i) {
1233 s[i] = hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), c[i]));
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001234 }
1235
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -07001236 RandGen r(0x1234);
1237
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001238 for (size_t i = kBaseStringCount; i < kStringCount; ++i) {
1239 s[i] = hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), c[i - kBaseStringCount]));
1240 int32_t length = s[i]->GetLength();
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -07001241 if (length > 1) {
1242 // Set a random offset and length.
1243 int32_t new_offset = 1 + (r.next() % (length - 1));
1244 int32_t rest = length - new_offset - 1;
1245 int32_t new_length = 1 + (rest > 0 ? r.next() % rest : 0);
1246
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001247 s[i]->SetField32<false>(mirror::String::CountOffset(), new_length);
1248 s[i]->SetField32<false>(mirror::String::OffsetOffset(), new_offset);
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -07001249 }
1250 }
1251
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001252 // TODO: wide characters
1253
1254 // Matrix of expectations. First component is first parameter. Note we only check against the
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -07001255 // sign, not the value. As we are testing random offsets, we need to compute this and need to
1256 // rely on String::CompareTo being correct.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001257 int32_t expected[kStringCount][kStringCount];
1258 for (size_t x = 0; x < kStringCount; ++x) {
1259 for (size_t y = 0; y < kStringCount; ++y) {
1260 expected[x][y] = s[x]->CompareTo(s[y].Get());
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -07001261 }
1262 }
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001263
1264 // Play with it...
1265
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001266 for (size_t x = 0; x < kStringCount; ++x) {
1267 for (size_t y = 0; y < kStringCount; ++y) {
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001268 // Test string_compareto x y
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001269 size_t result = Invoke3(reinterpret_cast<size_t>(s[x].Get()),
1270 reinterpret_cast<size_t>(s[y].Get()), 0U,
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001271 reinterpret_cast<uintptr_t>(&art_quick_string_compareto), self);
1272
1273 EXPECT_FALSE(self->IsExceptionPending());
1274
1275 // The result is a 32b signed integer
1276 union {
1277 size_t r;
1278 int32_t i;
1279 } conv;
1280 conv.r = result;
1281 int32_t e = expected[x][y];
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -07001282 EXPECT_TRUE(e == 0 ? conv.i == 0 : true) << "x=" << c[x] << " y=" << c[y] << " res=" <<
1283 conv.r;
1284 EXPECT_TRUE(e < 0 ? conv.i < 0 : true) << "x=" << c[x] << " y=" << c[y] << " res=" <<
1285 conv.r;
1286 EXPECT_TRUE(e > 0 ? conv.i > 0 : true) << "x=" << c[x] << " y=" << c[y] << " res=" <<
1287 conv.r;
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001288 }
1289 }
1290
Andreas Gampe7177d7c2014-05-02 12:10:02 -07001291 // TODO: Deallocate things.
1292
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001293 // Tests done.
1294#else
1295 LOG(INFO) << "Skipping string_compareto as I don't know how to do that on " << kRuntimeISA;
1296 // Force-print to std::cout so it's also outside the logcat.
1297 std::cout << "Skipping string_compareto as I don't know how to do that on " << kRuntimeISA <<
1298 std::endl;
1299#endif
1300}
1301
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001302
1303#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__x86_64__)
1304extern "C" void art_quick_set32_static(void);
1305extern "C" void art_quick_get32_static(void);
1306#endif
1307
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001308static void GetSet32Static(Handle<mirror::Object>* obj, Handle<mirror::ArtField>* f, Thread* self,
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001309 mirror::ArtMethod* referrer, StubTest* test)
1310 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1311#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__x86_64__)
1312 constexpr size_t num_values = 7;
1313 uint32_t values[num_values] = { 0, 1, 2, 255, 32768, 1000000, 0xFFFFFFFF };
1314
1315 for (size_t i = 0; i < num_values; ++i) {
1316 test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
1317 static_cast<size_t>(values[i]),
1318 0U,
1319 reinterpret_cast<uintptr_t>(&art_quick_set32_static),
1320 self,
1321 referrer);
1322
1323 size_t res = test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
1324 0U, 0U,
1325 reinterpret_cast<uintptr_t>(&art_quick_get32_static),
1326 self,
1327 referrer);
1328
1329 EXPECT_EQ(res, values[i]) << "Iteration " << i;
1330 }
1331#else
1332 LOG(INFO) << "Skipping set32static as I don't know how to do that on " << kRuntimeISA;
1333 // Force-print to std::cout so it's also outside the logcat.
1334 std::cout << "Skipping set32static as I don't know how to do that on " << kRuntimeISA << std::endl;
1335#endif
1336}
1337
1338
1339#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__x86_64__)
1340extern "C" void art_quick_set32_instance(void);
1341extern "C" void art_quick_get32_instance(void);
1342#endif
1343
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001344static void GetSet32Instance(Handle<mirror::Object>* obj, Handle<mirror::ArtField>* f,
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001345 Thread* self, mirror::ArtMethod* referrer, StubTest* test)
1346 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1347#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__x86_64__)
1348 constexpr size_t num_values = 7;
1349 uint32_t values[num_values] = { 0, 1, 2, 255, 32768, 1000000, 0xFFFFFFFF };
1350
1351 for (size_t i = 0; i < num_values; ++i) {
1352 test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001353 reinterpret_cast<size_t>(obj->Get()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001354 static_cast<size_t>(values[i]),
1355 reinterpret_cast<uintptr_t>(&art_quick_set32_instance),
1356 self,
1357 referrer);
1358
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001359 int32_t res = f->Get()->GetInt(obj->Get());
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001360 EXPECT_EQ(res, static_cast<int32_t>(values[i])) << "Iteration " << i;
1361
1362 res++;
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001363 f->Get()->SetInt<false>(obj->Get(), res);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001364
1365 size_t res2 = test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001366 reinterpret_cast<size_t>(obj->Get()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001367 0U,
1368 reinterpret_cast<uintptr_t>(&art_quick_get32_instance),
1369 self,
1370 referrer);
1371 EXPECT_EQ(res, static_cast<int32_t>(res2));
1372 }
1373#else
1374 LOG(INFO) << "Skipping set32instance as I don't know how to do that on " << kRuntimeISA;
1375 // Force-print to std::cout so it's also outside the logcat.
1376 std::cout << "Skipping set32instance as I don't know how to do that on " << kRuntimeISA << std::endl;
1377#endif
1378}
1379
1380
1381#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__x86_64__)
1382extern "C" void art_quick_set_obj_static(void);
1383extern "C" void art_quick_get_obj_static(void);
1384
1385static void set_and_check_static(uint32_t f_idx, mirror::Object* val, Thread* self,
1386 mirror::ArtMethod* referrer, StubTest* test)
1387 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1388 test->Invoke3WithReferrer(static_cast<size_t>(f_idx),
1389 reinterpret_cast<size_t>(val),
1390 0U,
1391 reinterpret_cast<uintptr_t>(&art_quick_set_obj_static),
1392 self,
1393 referrer);
1394
1395 size_t res = test->Invoke3WithReferrer(static_cast<size_t>(f_idx),
1396 0U, 0U,
1397 reinterpret_cast<uintptr_t>(&art_quick_get_obj_static),
1398 self,
1399 referrer);
1400
1401 EXPECT_EQ(res, reinterpret_cast<size_t>(val)) << "Value " << val;
1402}
1403#endif
1404
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001405static void GetSetObjStatic(Handle<mirror::Object>* obj, Handle<mirror::ArtField>* f, Thread* self,
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001406 mirror::ArtMethod* referrer, StubTest* test)
1407 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1408#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__x86_64__)
1409 set_and_check_static((*f)->GetDexFieldIndex(), nullptr, self, referrer, test);
1410
1411 // Allocate a string object for simplicity.
1412 mirror::String* str = mirror::String::AllocFromModifiedUtf8(self, "Test");
1413 set_and_check_static((*f)->GetDexFieldIndex(), str, self, referrer, test);
1414
1415 set_and_check_static((*f)->GetDexFieldIndex(), nullptr, self, referrer, test);
1416#else
1417 LOG(INFO) << "Skipping setObjstatic as I don't know how to do that on " << kRuntimeISA;
1418 // Force-print to std::cout so it's also outside the logcat.
1419 std::cout << "Skipping setObjstatic as I don't know how to do that on " << kRuntimeISA << std::endl;
1420#endif
1421}
1422
1423
1424#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__x86_64__)
1425extern "C" void art_quick_set_obj_instance(void);
1426extern "C" void art_quick_get_obj_instance(void);
1427
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001428static void set_and_check_instance(Handle<mirror::ArtField>* f, mirror::Object* trg,
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001429 mirror::Object* val, Thread* self, mirror::ArtMethod* referrer,
1430 StubTest* test)
1431 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1432 test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
1433 reinterpret_cast<size_t>(trg),
1434 reinterpret_cast<size_t>(val),
1435 reinterpret_cast<uintptr_t>(&art_quick_set_obj_instance),
1436 self,
1437 referrer);
1438
1439 size_t res = test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
1440 reinterpret_cast<size_t>(trg),
1441 0U,
1442 reinterpret_cast<uintptr_t>(&art_quick_get_obj_instance),
1443 self,
1444 referrer);
1445
1446 EXPECT_EQ(res, reinterpret_cast<size_t>(val)) << "Value " << val;
1447
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001448 EXPECT_EQ(val, f->Get()->GetObj(trg));
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001449}
1450#endif
1451
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001452static void GetSetObjInstance(Handle<mirror::Object>* obj, Handle<mirror::ArtField>* f,
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001453 Thread* self, mirror::ArtMethod* referrer, StubTest* test)
1454 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1455#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__x86_64__)
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001456 set_and_check_instance(f, obj->Get(), nullptr, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001457
1458 // Allocate a string object for simplicity.
1459 mirror::String* str = mirror::String::AllocFromModifiedUtf8(self, "Test");
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001460 set_and_check_instance(f, obj->Get(), str, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001461
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001462 set_and_check_instance(f, obj->Get(), nullptr, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001463#else
1464 LOG(INFO) << "Skipping setObjinstance as I don't know how to do that on " << kRuntimeISA;
1465 // Force-print to std::cout so it's also outside the logcat.
1466 std::cout << "Skipping setObjinstance as I don't know how to do that on " << kRuntimeISA << std::endl;
1467#endif
1468}
1469
1470
1471// TODO: Complete these tests for 32b architectures.
1472
1473#if defined(__x86_64__) || defined(__aarch64__)
1474extern "C" void art_quick_set64_static(void);
1475extern "C" void art_quick_get64_static(void);
1476#endif
1477
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001478static void GetSet64Static(Handle<mirror::Object>* obj, Handle<mirror::ArtField>* f, Thread* self,
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001479 mirror::ArtMethod* referrer, StubTest* test)
1480 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1481#if defined(__x86_64__) || defined(__aarch64__)
1482 constexpr size_t num_values = 8;
1483 uint64_t values[num_values] = { 0, 1, 2, 255, 32768, 1000000, 0xFFFFFFFF, 0xFFFFFFFFFFFF };
1484
1485 for (size_t i = 0; i < num_values; ++i) {
1486 test->Invoke3UWithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
1487 values[i],
1488 reinterpret_cast<uintptr_t>(&art_quick_set64_static),
1489 self,
1490 referrer);
1491
1492 size_t res = test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
1493 0U, 0U,
1494 reinterpret_cast<uintptr_t>(&art_quick_get64_static),
1495 self,
1496 referrer);
1497
1498 EXPECT_EQ(res, values[i]) << "Iteration " << i;
1499 }
1500#else
1501 LOG(INFO) << "Skipping set64static as I don't know how to do that on " << kRuntimeISA;
1502 // Force-print to std::cout so it's also outside the logcat.
1503 std::cout << "Skipping set64static as I don't know how to do that on " << kRuntimeISA << std::endl;
1504#endif
1505}
1506
1507
1508#if defined(__x86_64__) || defined(__aarch64__)
1509extern "C" void art_quick_set64_instance(void);
1510extern "C" void art_quick_get64_instance(void);
1511#endif
1512
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001513static void GetSet64Instance(Handle<mirror::Object>* obj, Handle<mirror::ArtField>* f,
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001514 Thread* self, mirror::ArtMethod* referrer, StubTest* test)
1515 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1516#if defined(__x86_64__) || defined(__aarch64__)
1517 constexpr size_t num_values = 8;
1518 uint64_t values[num_values] = { 0, 1, 2, 255, 32768, 1000000, 0xFFFFFFFF, 0xFFFFFFFFFFFF };
1519
1520 for (size_t i = 0; i < num_values; ++i) {
1521 test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001522 reinterpret_cast<size_t>(obj->Get()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001523 static_cast<size_t>(values[i]),
1524 reinterpret_cast<uintptr_t>(&art_quick_set64_instance),
1525 self,
1526 referrer);
1527
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001528 int64_t res = f->Get()->GetLong(obj->Get());
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001529 EXPECT_EQ(res, static_cast<int64_t>(values[i])) << "Iteration " << i;
1530
1531 res++;
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001532 f->Get()->SetLong<false>(obj->Get(), res);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001533
1534 size_t res2 = test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001535 reinterpret_cast<size_t>(obj->Get()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001536 0U,
1537 reinterpret_cast<uintptr_t>(&art_quick_get64_instance),
1538 self,
1539 referrer);
1540 EXPECT_EQ(res, static_cast<int64_t>(res2));
1541 }
1542#else
1543 LOG(INFO) << "Skipping set64instance as I don't know how to do that on " << kRuntimeISA;
1544 // Force-print to std::cout so it's also outside the logcat.
1545 std::cout << "Skipping set64instance as I don't know how to do that on " << kRuntimeISA << std::endl;
1546#endif
1547}
1548
1549static void TestFields(Thread* self, StubTest* test, Primitive::Type test_type) {
1550 // garbage is created during ClassLinker::Init
1551
1552 JNIEnv* env = Thread::Current()->GetJniEnv();
1553 jclass jc = env->FindClass("AllFields");
1554 CHECK(jc != NULL);
1555 jobject o = env->AllocObject(jc);
1556 CHECK(o != NULL);
1557
1558 ScopedObjectAccess soa(self);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001559 StackHandleScope<5> hs(self);
1560 Handle<mirror::Object> obj(hs.NewHandle(soa.Decode<mirror::Object*>(o)));
1561 Handle<mirror::Class> c(hs.NewHandle(obj->GetClass()));
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001562 // Need a method as a referrer
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001563 Handle<mirror::ArtMethod> m(hs.NewHandle(c->GetDirectMethod(0)));
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001564
1565 // Play with it...
1566
1567 // Static fields.
1568 {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001569 Handle<mirror::ObjectArray<mirror::ArtField>> fields(hs.NewHandle(c.Get()->GetSFields()));
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001570 int32_t num_fields = fields->GetLength();
1571 for (int32_t i = 0; i < num_fields; ++i) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001572 StackHandleScope<1> hs(self);
1573 Handle<mirror::ArtField> f(hs.NewHandle(fields->Get(i)));
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001574
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001575 FieldHelper fh(f.Get());
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001576 Primitive::Type type = fh.GetTypeAsPrimitiveType();
1577 switch (type) {
1578 case Primitive::Type::kPrimInt:
1579 if (test_type == type) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001580 GetSet32Static(&obj, &f, self, m.Get(), test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001581 }
1582 break;
1583
1584 case Primitive::Type::kPrimLong:
1585 if (test_type == type) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001586 GetSet64Static(&obj, &f, self, m.Get(), test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001587 }
1588 break;
1589
1590 case Primitive::Type::kPrimNot:
1591 // Don't try array.
1592 if (test_type == type && fh.GetTypeDescriptor()[0] != '[') {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001593 GetSetObjStatic(&obj, &f, self, m.Get(), test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001594 }
1595 break;
1596
1597 default:
1598 break; // Skip.
1599 }
1600 }
1601 }
1602
1603 // Instance fields.
1604 {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001605 Handle<mirror::ObjectArray<mirror::ArtField>> fields(hs.NewHandle(c.Get()->GetIFields()));
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001606 int32_t num_fields = fields->GetLength();
1607 for (int32_t i = 0; i < num_fields; ++i) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001608 StackHandleScope<1> hs(self);
1609 Handle<mirror::ArtField> f(hs.NewHandle(fields->Get(i)));
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001610
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001611 FieldHelper fh(f.Get());
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001612 Primitive::Type type = fh.GetTypeAsPrimitiveType();
1613 switch (type) {
1614 case Primitive::Type::kPrimInt:
1615 if (test_type == type) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001616 GetSet32Instance(&obj, &f, self, m.Get(), test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001617 }
1618 break;
1619
1620 case Primitive::Type::kPrimLong:
1621 if (test_type == type) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001622 GetSet64Instance(&obj, &f, self, m.Get(), test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001623 }
1624 break;
1625
1626 case Primitive::Type::kPrimNot:
1627 // Don't try array.
1628 if (test_type == type && fh.GetTypeDescriptor()[0] != '[') {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001629 GetSetObjInstance(&obj, &f, self, m.Get(), test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001630 }
1631 break;
1632
1633 default:
1634 break; // Skip.
1635 }
1636 }
1637 }
1638
1639 // TODO: Deallocate things.
1640}
1641
1642
1643TEST_F(StubTest, Fields32) {
1644 TEST_DISABLED_FOR_HEAP_REFERENCE_POISONING();
1645
1646 Thread* self = Thread::Current();
1647
1648 self->TransitionFromSuspendedToRunnable();
1649 LoadDex("AllFields");
1650 bool started = runtime_->Start();
1651 CHECK(started);
1652
1653 TestFields(self, this, Primitive::Type::kPrimInt);
1654}
1655
1656TEST_F(StubTest, FieldsObj) {
1657 TEST_DISABLED_FOR_HEAP_REFERENCE_POISONING();
1658
1659 Thread* self = Thread::Current();
1660
1661 self->TransitionFromSuspendedToRunnable();
1662 LoadDex("AllFields");
1663 bool started = runtime_->Start();
1664 CHECK(started);
1665
1666 TestFields(self, this, Primitive::Type::kPrimNot);
1667}
1668
1669TEST_F(StubTest, Fields64) {
1670 TEST_DISABLED_FOR_HEAP_REFERENCE_POISONING();
1671
1672 Thread* self = Thread::Current();
1673
1674 self->TransitionFromSuspendedToRunnable();
1675 LoadDex("AllFields");
1676 bool started = runtime_->Start();
1677 CHECK(started);
1678
1679 TestFields(self, this, Primitive::Type::kPrimLong);
1680}
1681
Andreas Gampe51f76352014-05-21 08:28:48 -07001682#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__x86_64__)
1683extern "C" void art_quick_imt_conflict_trampoline(void);
1684#endif
1685
1686TEST_F(StubTest, IMT) {
1687#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__x86_64__)
1688 TEST_DISABLED_FOR_HEAP_REFERENCE_POISONING();
1689
1690 Thread* self = Thread::Current();
1691
1692 ScopedObjectAccess soa(self);
1693 StackHandleScope<7> hs(self);
1694
1695 JNIEnv* env = Thread::Current()->GetJniEnv();
1696
1697 // ArrayList
1698
1699 // Load ArrayList and used methods (JNI).
1700 jclass arraylist_jclass = env->FindClass("java/util/ArrayList");
1701 ASSERT_NE(nullptr, arraylist_jclass);
1702 jmethodID arraylist_constructor = env->GetMethodID(arraylist_jclass, "<init>", "()V");
1703 ASSERT_NE(nullptr, arraylist_constructor);
1704 jmethodID contains_jmethod = env->GetMethodID(arraylist_jclass, "contains", "(Ljava/lang/Object;)Z");
1705 ASSERT_NE(nullptr, contains_jmethod);
1706 jmethodID add_jmethod = env->GetMethodID(arraylist_jclass, "add", "(Ljava/lang/Object;)Z");
1707 ASSERT_NE(nullptr, add_jmethod);
1708
1709 // Get mirror representation.
1710 Handle<mirror::ArtMethod> contains_amethod(hs.NewHandle(soa.DecodeMethod(contains_jmethod)));
1711
1712 // Patch up ArrayList.contains.
1713 if (contains_amethod.Get()->GetEntryPointFromQuickCompiledCode() == nullptr) {
1714 contains_amethod.Get()->SetEntryPointFromQuickCompiledCode(reinterpret_cast<void*>(
1715 GetTlsPtr(self)->quick_entrypoints.pQuickToInterpreterBridge));
1716 }
1717
1718 // List
1719
1720 // Load List and used methods (JNI).
1721 jclass list_jclass = env->FindClass("java/util/List");
1722 ASSERT_NE(nullptr, list_jclass);
1723 jmethodID inf_contains_jmethod = env->GetMethodID(list_jclass, "contains", "(Ljava/lang/Object;)Z");
1724 ASSERT_NE(nullptr, inf_contains_jmethod);
1725
1726 // Get mirror representation.
1727 Handle<mirror::ArtMethod> inf_contains(hs.NewHandle(soa.DecodeMethod(inf_contains_jmethod)));
1728
1729 // Object
1730
1731 jclass obj_jclass = env->FindClass("java/lang/Object");
1732 ASSERT_NE(nullptr, obj_jclass);
1733 jmethodID obj_constructor = env->GetMethodID(obj_jclass, "<init>", "()V");
1734 ASSERT_NE(nullptr, obj_constructor);
1735
1736 // Sanity check: check that there is a conflict for List.contains in ArrayList.
1737
1738 mirror::Class* arraylist_class = soa.Decode<mirror::Class*>(arraylist_jclass);
1739 mirror::ArtMethod* m = arraylist_class->GetImTable()->Get(
1740 inf_contains->GetDexMethodIndex() % ClassLinker::kImtSize);
Andreas Gampe51f76352014-05-21 08:28:48 -07001741
Andreas Gampe0ea37942014-05-21 14:12:18 -07001742 if (!m->IsImtConflictMethod()) {
1743 LOG(WARNING) << "Test is meaningless, no IMT conflict in setup: " <<
1744 PrettyMethod(m, true);
1745 LOG(WARNING) << "Please update StubTest.IMT.";
1746 return;
1747 }
Andreas Gampe51f76352014-05-21 08:28:48 -07001748
1749 // Create instances.
1750
1751 jobject jarray_list = env->NewObject(arraylist_jclass, arraylist_constructor);
1752 ASSERT_NE(nullptr, jarray_list);
1753 Handle<mirror::Object> array_list(hs.NewHandle(soa.Decode<mirror::Object*>(jarray_list)));
1754
1755 jobject jobj = env->NewObject(obj_jclass, obj_constructor);
1756 ASSERT_NE(nullptr, jobj);
1757 Handle<mirror::Object> obj(hs.NewHandle(soa.Decode<mirror::Object*>(jobj)));
1758
1759 // Invoke.
1760
1761 size_t result =
1762 Invoke3WithReferrerAndHidden(0U, reinterpret_cast<size_t>(array_list.Get()),
1763 reinterpret_cast<size_t>(obj.Get()),
1764 reinterpret_cast<uintptr_t>(&art_quick_imt_conflict_trampoline),
1765 self, contains_amethod.Get(),
1766 static_cast<size_t>(inf_contains.Get()->GetDexMethodIndex()));
1767
1768 ASSERT_FALSE(self->IsExceptionPending());
1769 EXPECT_EQ(static_cast<size_t>(JNI_FALSE), result);
1770
1771 // Add object.
1772
1773 env->CallBooleanMethod(jarray_list, add_jmethod, jobj);
1774
1775 ASSERT_FALSE(self->IsExceptionPending()) << PrettyTypeOf(self->GetException(nullptr));
1776
1777 // Invoke again.
1778
1779 result = Invoke3WithReferrerAndHidden(0U, reinterpret_cast<size_t>(array_list.Get()),
1780 reinterpret_cast<size_t>(obj.Get()),
1781 reinterpret_cast<uintptr_t>(&art_quick_imt_conflict_trampoline),
1782 self, contains_amethod.Get(),
1783 static_cast<size_t>(inf_contains.Get()->GetDexMethodIndex()));
1784
1785 ASSERT_FALSE(self->IsExceptionPending());
1786 EXPECT_EQ(static_cast<size_t>(JNI_TRUE), result);
1787#else
1788 LOG(INFO) << "Skipping memcpy as I don't know how to do that on " << kRuntimeISA;
1789 // Force-print to std::cout so it's also outside the logcat.
1790 std::cout << "Skipping memcpy as I don't know how to do that on " << kRuntimeISA << std::endl;
1791#endif
1792}
1793
Andreas Gampe525cde22014-04-22 15:44:50 -07001794} // namespace art