blob: 7170f73e10da2acf59b4956db8a495001f2a5ac8 [file] [log] [blame]
Andreas Gampe525cde22014-04-22 15:44:50 -07001/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
Ian Rogerse63db272014-07-15 15:36:11 -070017#include <cstdio>
18
Mathieu Chartierc7853442015-03-27 14:35:38 -070019#include "art_field-inl.h"
Mathieu Chartiere401d142015-04-22 13:56:20 -070020#include "art_method-inl.h"
Vladimir Marko3481ba22015-04-13 12:22:36 +010021#include "class_linker-inl.h"
Andreas Gampe525cde22014-04-22 15:44:50 -070022#include "common_runtime_test.h"
Andreas Gampe29b38412014-08-13 00:15:43 -070023#include "entrypoints/quick/quick_entrypoints_enum.h"
Andreas Gampe51f76352014-05-21 08:28:48 -070024#include "mirror/class-inl.h"
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -070025#include "mirror/string-inl.h"
Ian Rogerse63db272014-07-15 15:36:11 -070026#include "scoped_thread_state_change.h"
Andreas Gampe525cde22014-04-22 15:44:50 -070027
28namespace art {
29
30
31class StubTest : public CommonRuntimeTest {
32 protected:
33 // We need callee-save methods set up in the Runtime for exceptions.
34 void SetUp() OVERRIDE {
35 // Do the normal setup.
36 CommonRuntimeTest::SetUp();
37
38 {
39 // Create callee-save methods
40 ScopedObjectAccess soa(Thread::Current());
Vladimir Marko7624d252014-05-02 14:40:15 +010041 runtime_->SetInstructionSet(kRuntimeISA);
Andreas Gampe525cde22014-04-22 15:44:50 -070042 for (int i = 0; i < Runtime::kLastCalleeSaveType; i++) {
43 Runtime::CalleeSaveType type = Runtime::CalleeSaveType(i);
44 if (!runtime_->HasCalleeSaveMethod(type)) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -070045 runtime_->SetCalleeSaveMethod(runtime_->CreateCalleeSaveMethod(), type);
Andreas Gampe525cde22014-04-22 15:44:50 -070046 }
47 }
48 }
49 }
50
Ian Rogerse63db272014-07-15 15:36:11 -070051 void SetUpRuntimeOptions(RuntimeOptions *options) OVERRIDE {
Andreas Gampe00c1e6d2014-04-25 15:47:13 -070052 // Use a smaller heap
53 for (std::pair<std::string, const void*>& pair : *options) {
54 if (pair.first.find("-Xmx") == 0) {
55 pair.first = "-Xmx4M"; // Smallest we can go.
56 }
57 }
Andreas Gampe51f76352014-05-21 08:28:48 -070058 options->push_back(std::make_pair("-Xint", nullptr));
Andreas Gampe00c1e6d2014-04-25 15:47:13 -070059 }
Andreas Gampe525cde22014-04-22 15:44:50 -070060
Mathieu Chartier119c6bd2014-05-09 14:11:47 -070061 // Helper function needed since TEST_F makes a new class.
62 Thread::tls_ptr_sized_values* GetTlsPtr(Thread* self) {
63 return &self->tlsPtr_;
64 }
65
Andreas Gampe4fc046e2014-05-06 16:56:39 -070066 public:
Andreas Gampe525cde22014-04-22 15:44:50 -070067 size_t Invoke3(size_t arg0, size_t arg1, size_t arg2, uintptr_t code, Thread* self) {
Andreas Gampe6cf80102014-05-19 11:32:41 -070068 return Invoke3WithReferrer(arg0, arg1, arg2, code, self, nullptr);
Andreas Gampe525cde22014-04-22 15:44:50 -070069 }
Andreas Gampe6e4e59c2014-05-05 20:11:02 -070070
Andreas Gampe6e4e59c2014-05-05 20:11:02 -070071 // TODO: Set up a frame according to referrer's specs.
72 size_t Invoke3WithReferrer(size_t arg0, size_t arg1, size_t arg2, uintptr_t code, Thread* self,
Mathieu Chartiere401d142015-04-22 13:56:20 -070073 ArtMethod* referrer) {
Andreas Gampe9537ba22015-10-12 14:29:38 -070074 return Invoke3WithReferrerAndHidden(arg0, arg1, arg2, code, self, referrer, 0);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -070075 }
76
Andreas Gampe51f76352014-05-21 08:28:48 -070077 // TODO: Set up a frame according to referrer's specs.
78 size_t Invoke3WithReferrerAndHidden(size_t arg0, size_t arg1, size_t arg2, uintptr_t code,
Mathieu Chartiere401d142015-04-22 13:56:20 -070079 Thread* self, ArtMethod* referrer, size_t hidden) {
Andreas Gampe51f76352014-05-21 08:28:48 -070080 // Push a transition back into managed code onto the linked list in thread.
81 ManagedStack fragment;
82 self->PushManagedStackFragment(&fragment);
83
84 size_t result;
85 size_t fpr_result = 0;
86#if defined(__i386__)
87 // TODO: Set the thread?
Andreas Gampe9537ba22015-10-12 14:29:38 -070088#define PUSH(reg) "push " # reg "\n\t .cfi_adjust_cfa_offset 4\n\t"
89#define POP(reg) "pop " # reg "\n\t .cfi_adjust_cfa_offset -4\n\t"
Andreas Gampe51f76352014-05-21 08:28:48 -070090 __asm__ __volatile__(
Andreas Gampe9537ba22015-10-12 14:29:38 -070091 "movd %[hidden], %%xmm7\n\t" // This is a memory op, so do this early. If it is off of
92 // esp, then we won't be able to access it after spilling.
93
94 // Spill 6 registers.
95 PUSH(%%ebx)
96 PUSH(%%ecx)
97 PUSH(%%edx)
98 PUSH(%%esi)
99 PUSH(%%edi)
100 PUSH(%%ebp)
101
102 // Store the inputs to the stack, but keep the referrer up top, less work.
103 PUSH(%[referrer]) // Align stack.
104 PUSH(%[referrer]) // Store referrer
105
106 PUSH(%[arg0])
107 PUSH(%[arg1])
108 PUSH(%[arg2])
109 PUSH(%[code])
110 // Now read them back into the required registers.
111 POP(%%edi)
112 POP(%%edx)
113 POP(%%ecx)
114 POP(%%eax)
115 // Call is prepared now.
116
Andreas Gampe51f76352014-05-21 08:28:48 -0700117 "call *%%edi\n\t" // Call the stub
Andreas Gampe9537ba22015-10-12 14:29:38 -0700118 "addl $8, %%esp\n\t" // Pop referrer and padding.
119 ".cfi_adjust_cfa_offset -8\n\t"
120
121 // Restore 6 registers.
122 POP(%%ebp)
123 POP(%%edi)
124 POP(%%esi)
125 POP(%%edx)
126 POP(%%ecx)
127 POP(%%ebx)
128
Andreas Gampe51f76352014-05-21 08:28:48 -0700129 : "=a" (result)
130 // Use the result from eax
Andreas Gampe9537ba22015-10-12 14:29:38 -0700131 : [arg0] "r"(arg0), [arg1] "r"(arg1), [arg2] "r"(arg2), [code] "r"(code),
132 [referrer]"r"(referrer), [hidden]"m"(hidden)
Andreas Gampe2f6e3512014-06-07 01:32:33 -0700133 // This places code into edi, arg0 into eax, arg1 into ecx, and arg2 into edx
Andreas Gampe9537ba22015-10-12 14:29:38 -0700134 : "memory", "xmm7"); // clobber.
135#undef PUSH
136#undef POP
Andreas Gampe51f76352014-05-21 08:28:48 -0700137#elif defined(__arm__)
138 __asm__ __volatile__(
139 "push {r1-r12, lr}\n\t" // Save state, 13*4B = 52B
140 ".cfi_adjust_cfa_offset 52\n\t"
141 "push {r9}\n\t"
142 ".cfi_adjust_cfa_offset 4\n\t"
143 "mov r9, %[referrer]\n\n"
144 "str r9, [sp, #-8]!\n\t" // Push referrer, +8B padding so 16B aligned
145 ".cfi_adjust_cfa_offset 8\n\t"
146 "ldr r9, [sp, #8]\n\t"
147
148 // Push everything on the stack, so we don't rely on the order. What a mess. :-(
149 "sub sp, sp, #24\n\t"
150 "str %[arg0], [sp]\n\t"
151 "str %[arg1], [sp, #4]\n\t"
152 "str %[arg2], [sp, #8]\n\t"
153 "str %[code], [sp, #12]\n\t"
154 "str %[self], [sp, #16]\n\t"
155 "str %[hidden], [sp, #20]\n\t"
156 "ldr r0, [sp]\n\t"
157 "ldr r1, [sp, #4]\n\t"
158 "ldr r2, [sp, #8]\n\t"
159 "ldr r3, [sp, #12]\n\t"
160 "ldr r9, [sp, #16]\n\t"
161 "ldr r12, [sp, #20]\n\t"
162 "add sp, sp, #24\n\t"
163
164 "blx r3\n\t" // Call the stub
Mathieu Chartier2cebb242015-04-21 16:50:40 -0700165 "add sp, sp, #12\n\t" // Pop null and padding
Andreas Gampe51f76352014-05-21 08:28:48 -0700166 ".cfi_adjust_cfa_offset -12\n\t"
167 "pop {r1-r12, lr}\n\t" // Restore state
168 ".cfi_adjust_cfa_offset -52\n\t"
169 "mov %[result], r0\n\t" // Save the result
170 : [result] "=r" (result)
171 // Use the result from r0
Andreas Gampe2f6e3512014-06-07 01:32:33 -0700172 : [arg0] "r"(arg0), [arg1] "r"(arg1), [arg2] "r"(arg2), [code] "r"(code), [self] "r"(self),
173 [referrer] "r"(referrer), [hidden] "r"(hidden)
Andreas Gampeff7b1142015-08-03 10:25:06 -0700174 : "r0", "memory"); // clobber.
Andreas Gampe51f76352014-05-21 08:28:48 -0700175#elif defined(__aarch64__)
176 __asm__ __volatile__(
Andreas Gampef39b3782014-06-03 14:38:30 -0700177 // Spill x0-x7 which we say we don't clobber. May contain args.
Andreas Gampe51f76352014-05-21 08:28:48 -0700178 "sub sp, sp, #64\n\t"
179 ".cfi_adjust_cfa_offset 64\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700180 "stp x0, x1, [sp]\n\t"
181 "stp x2, x3, [sp, #16]\n\t"
182 "stp x4, x5, [sp, #32]\n\t"
183 "stp x6, x7, [sp, #48]\n\t"
Andreas Gampe51f76352014-05-21 08:28:48 -0700184
Andreas Gampef39b3782014-06-03 14:38:30 -0700185 "sub sp, sp, #16\n\t" // Reserve stack space, 16B aligned
186 ".cfi_adjust_cfa_offset 16\n\t"
187 "str %[referrer], [sp]\n\t" // referrer
Andreas Gampe51f76352014-05-21 08:28:48 -0700188
189 // Push everything on the stack, so we don't rely on the order. What a mess. :-(
190 "sub sp, sp, #48\n\t"
191 ".cfi_adjust_cfa_offset 48\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700192 // All things are "r" constraints, so direct str/stp should work.
193 "stp %[arg0], %[arg1], [sp]\n\t"
194 "stp %[arg2], %[code], [sp, #16]\n\t"
195 "stp %[self], %[hidden], [sp, #32]\n\t"
Andreas Gampe51f76352014-05-21 08:28:48 -0700196
197 // Now we definitely have x0-x3 free, use it to garble d8 - d15
198 "movk x0, #0xfad0\n\t"
199 "movk x0, #0xebad, lsl #16\n\t"
200 "movk x0, #0xfad0, lsl #32\n\t"
201 "movk x0, #0xebad, lsl #48\n\t"
202 "fmov d8, x0\n\t"
203 "add x0, x0, 1\n\t"
204 "fmov d9, x0\n\t"
205 "add x0, x0, 1\n\t"
206 "fmov d10, x0\n\t"
207 "add x0, x0, 1\n\t"
208 "fmov d11, x0\n\t"
209 "add x0, x0, 1\n\t"
210 "fmov d12, x0\n\t"
211 "add x0, x0, 1\n\t"
212 "fmov d13, x0\n\t"
213 "add x0, x0, 1\n\t"
214 "fmov d14, x0\n\t"
215 "add x0, x0, 1\n\t"
216 "fmov d15, x0\n\t"
217
Andreas Gampef39b3782014-06-03 14:38:30 -0700218 // Load call params into the right registers.
219 "ldp x0, x1, [sp]\n\t"
220 "ldp x2, x3, [sp, #16]\n\t"
Serban Constantinescu9bd88b02015-04-22 16:24:46 +0100221 "ldp x19, x17, [sp, #32]\n\t"
Andreas Gampe51f76352014-05-21 08:28:48 -0700222 "add sp, sp, #48\n\t"
223 ".cfi_adjust_cfa_offset -48\n\t"
224
Andreas Gampe51f76352014-05-21 08:28:48 -0700225 "blr x3\n\t" // Call the stub
Andreas Gampef39b3782014-06-03 14:38:30 -0700226 "mov x8, x0\n\t" // Store result
227 "add sp, sp, #16\n\t" // Drop the quick "frame"
228 ".cfi_adjust_cfa_offset -16\n\t"
Andreas Gampe51f76352014-05-21 08:28:48 -0700229
230 // Test d8 - d15. We can use x1 and x2.
231 "movk x1, #0xfad0\n\t"
232 "movk x1, #0xebad, lsl #16\n\t"
233 "movk x1, #0xfad0, lsl #32\n\t"
234 "movk x1, #0xebad, lsl #48\n\t"
235 "fmov x2, d8\n\t"
236 "cmp x1, x2\n\t"
237 "b.ne 1f\n\t"
238 "add x1, x1, 1\n\t"
239
240 "fmov x2, d9\n\t"
241 "cmp x1, x2\n\t"
242 "b.ne 1f\n\t"
243 "add x1, x1, 1\n\t"
244
245 "fmov x2, d10\n\t"
246 "cmp x1, x2\n\t"
247 "b.ne 1f\n\t"
248 "add x1, x1, 1\n\t"
249
250 "fmov x2, d11\n\t"
251 "cmp x1, x2\n\t"
252 "b.ne 1f\n\t"
253 "add x1, x1, 1\n\t"
254
255 "fmov x2, d12\n\t"
256 "cmp x1, x2\n\t"
257 "b.ne 1f\n\t"
258 "add x1, x1, 1\n\t"
259
260 "fmov x2, d13\n\t"
261 "cmp x1, x2\n\t"
262 "b.ne 1f\n\t"
263 "add x1, x1, 1\n\t"
264
265 "fmov x2, d14\n\t"
266 "cmp x1, x2\n\t"
267 "b.ne 1f\n\t"
268 "add x1, x1, 1\n\t"
269
270 "fmov x2, d15\n\t"
271 "cmp x1, x2\n\t"
272 "b.ne 1f\n\t"
273
Andreas Gampef39b3782014-06-03 14:38:30 -0700274 "mov x9, #0\n\t" // Use x9 as flag, in clobber list
Andreas Gampe51f76352014-05-21 08:28:48 -0700275
276 // Finish up.
277 "2:\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700278 "ldp x0, x1, [sp]\n\t" // Restore stuff not named clobbered, may contain fpr_result
279 "ldp x2, x3, [sp, #16]\n\t"
280 "ldp x4, x5, [sp, #32]\n\t"
281 "ldp x6, x7, [sp, #48]\n\t"
282 "add sp, sp, #64\n\t" // Free stack space, now sp as on entry
Andreas Gampe51f76352014-05-21 08:28:48 -0700283 ".cfi_adjust_cfa_offset -64\n\t"
284
Andreas Gampef39b3782014-06-03 14:38:30 -0700285 "str x9, %[fpr_result]\n\t" // Store the FPR comparison result
286 "mov %[result], x8\n\t" // Store the call result
287
Andreas Gampe51f76352014-05-21 08:28:48 -0700288 "b 3f\n\t" // Goto end
289
290 // Failed fpr verification.
291 "1:\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700292 "mov x9, #1\n\t"
Andreas Gampe51f76352014-05-21 08:28:48 -0700293 "b 2b\n\t" // Goto finish-up
294
295 // End
296 "3:\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700297 : [result] "=r" (result)
298 // Use the result from r0
Andreas Gampe51f76352014-05-21 08:28:48 -0700299 : [arg0] "0"(arg0), [arg1] "r"(arg1), [arg2] "r"(arg2), [code] "r"(code), [self] "r"(self),
Andreas Gampef39b3782014-06-03 14:38:30 -0700300 [referrer] "r"(referrer), [hidden] "r"(hidden), [fpr_result] "m" (fpr_result)
301 : "x8", "x9", "x10", "x11", "x12", "x13", "x14", "x15", "x16", "x17", "x18", "x19", "x20",
302 "x21", "x22", "x23", "x24", "x25", "x26", "x27", "x28", "x30",
303 "d0", "d1", "d2", "d3", "d4", "d5", "d6", "d7",
304 "d8", "d9", "d10", "d11", "d12", "d13", "d14", "d15",
305 "d16", "d17", "d18", "d19", "d20", "d21", "d22", "d23",
Andreas Gampe2f6e3512014-06-07 01:32:33 -0700306 "d24", "d25", "d26", "d27", "d28", "d29", "d30", "d31",
307 "memory"); // clobber.
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200308#elif defined(__mips__) && !defined(__LP64__)
309 __asm__ __volatile__ (
310 // Spill a0-a3 and t0-t7 which we say we don't clobber. May contain args.
311 "addiu $sp, $sp, -64\n\t"
312 "sw $a0, 0($sp)\n\t"
313 "sw $a1, 4($sp)\n\t"
314 "sw $a2, 8($sp)\n\t"
315 "sw $a3, 12($sp)\n\t"
316 "sw $t0, 16($sp)\n\t"
317 "sw $t1, 20($sp)\n\t"
318 "sw $t2, 24($sp)\n\t"
319 "sw $t3, 28($sp)\n\t"
320 "sw $t4, 32($sp)\n\t"
321 "sw $t5, 36($sp)\n\t"
322 "sw $t6, 40($sp)\n\t"
323 "sw $t7, 44($sp)\n\t"
324 // Spill gp register since it is caller save.
325 "sw $gp, 52($sp)\n\t"
326
327 "addiu $sp, $sp, -16\n\t" // Reserve stack space, 16B aligned.
328 "sw %[referrer], 0($sp)\n\t"
329
330 // Push everything on the stack, so we don't rely on the order.
331 "addiu $sp, $sp, -24\n\t"
332 "sw %[arg0], 0($sp)\n\t"
333 "sw %[arg1], 4($sp)\n\t"
334 "sw %[arg2], 8($sp)\n\t"
335 "sw %[code], 12($sp)\n\t"
336 "sw %[self], 16($sp)\n\t"
337 "sw %[hidden], 20($sp)\n\t"
338
339 // Load call params into the right registers.
340 "lw $a0, 0($sp)\n\t"
341 "lw $a1, 4($sp)\n\t"
342 "lw $a2, 8($sp)\n\t"
343 "lw $t9, 12($sp)\n\t"
344 "lw $s1, 16($sp)\n\t"
345 "lw $t0, 20($sp)\n\t"
346 "addiu $sp, $sp, 24\n\t"
347
348 "jalr $t9\n\t" // Call the stub.
349 "nop\n\t"
350 "addiu $sp, $sp, 16\n\t" // Drop the quick "frame".
351
352 // Restore stuff not named clobbered.
353 "lw $a0, 0($sp)\n\t"
354 "lw $a1, 4($sp)\n\t"
355 "lw $a2, 8($sp)\n\t"
356 "lw $a3, 12($sp)\n\t"
357 "lw $t0, 16($sp)\n\t"
358 "lw $t1, 20($sp)\n\t"
359 "lw $t2, 24($sp)\n\t"
360 "lw $t3, 28($sp)\n\t"
361 "lw $t4, 32($sp)\n\t"
362 "lw $t5, 36($sp)\n\t"
363 "lw $t6, 40($sp)\n\t"
364 "lw $t7, 44($sp)\n\t"
365 // Restore gp.
366 "lw $gp, 52($sp)\n\t"
367 "addiu $sp, $sp, 64\n\t" // Free stack space, now sp as on entry.
368
369 "move %[result], $v0\n\t" // Store the call result.
370 : [result] "=r" (result)
371 : [arg0] "r"(arg0), [arg1] "r"(arg1), [arg2] "r"(arg2), [code] "r"(code), [self] "r"(self),
372 [referrer] "r"(referrer), [hidden] "r"(hidden)
373 : "at", "v0", "v1", "s0", "s1", "s2", "s3", "s4", "s5", "s6", "s7", "t8", "t9", "k0", "k1",
374 "fp", "ra",
Nicolas Geoffrayc5b4b322015-09-15 16:36:50 +0100375 "$f0", "$f1", "$f2", "$f3", "$f4", "$f5", "$f6", "$f7", "$f8", "$f9", "$f10", "$f11",
376 "$f12", "$f13", "$f14", "$f15", "$f16", "$f17", "$f18", "$f19", "$f20", "$f21", "$f22",
377 "$f23", "$f24", "$f25", "$f26", "$f27", "$f28", "$f29", "$f30", "$f31",
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200378 "memory"); // clobber.
379#elif defined(__mips__) && defined(__LP64__)
380 __asm__ __volatile__ (
Goran Jakovljevicdc2388f2016-01-13 14:41:02 +0100381 // Spill a0-a7 and t0-t3 which we say we don't clobber. May contain args.
382 "daddiu $sp, $sp, -96\n\t"
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200383 "sd $a0, 0($sp)\n\t"
384 "sd $a1, 8($sp)\n\t"
385 "sd $a2, 16($sp)\n\t"
386 "sd $a3, 24($sp)\n\t"
387 "sd $a4, 32($sp)\n\t"
388 "sd $a5, 40($sp)\n\t"
389 "sd $a6, 48($sp)\n\t"
390 "sd $a7, 56($sp)\n\t"
Goran Jakovljevicdc2388f2016-01-13 14:41:02 +0100391 "sd $t0, 64($sp)\n\t"
392 "sd $t1, 72($sp)\n\t"
393 "sd $t2, 80($sp)\n\t"
394 "sd $t3, 88($sp)\n\t"
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200395
396 "daddiu $sp, $sp, -16\n\t" // Reserve stack space, 16B aligned.
397 "sd %[referrer], 0($sp)\n\t"
398
399 // Push everything on the stack, so we don't rely on the order.
400 "daddiu $sp, $sp, -48\n\t"
401 "sd %[arg0], 0($sp)\n\t"
402 "sd %[arg1], 8($sp)\n\t"
403 "sd %[arg2], 16($sp)\n\t"
404 "sd %[code], 24($sp)\n\t"
405 "sd %[self], 32($sp)\n\t"
406 "sd %[hidden], 40($sp)\n\t"
407
408 // Load call params into the right registers.
409 "ld $a0, 0($sp)\n\t"
410 "ld $a1, 8($sp)\n\t"
411 "ld $a2, 16($sp)\n\t"
412 "ld $t9, 24($sp)\n\t"
413 "ld $s1, 32($sp)\n\t"
414 "ld $t0, 40($sp)\n\t"
415 "daddiu $sp, $sp, 48\n\t"
416
417 "jalr $t9\n\t" // Call the stub.
418 "nop\n\t"
419 "daddiu $sp, $sp, 16\n\t" // Drop the quick "frame".
420
421 // Restore stuff not named clobbered.
422 "ld $a0, 0($sp)\n\t"
423 "ld $a1, 8($sp)\n\t"
424 "ld $a2, 16($sp)\n\t"
425 "ld $a3, 24($sp)\n\t"
426 "ld $a4, 32($sp)\n\t"
427 "ld $a5, 40($sp)\n\t"
428 "ld $a6, 48($sp)\n\t"
429 "ld $a7, 56($sp)\n\t"
Goran Jakovljevicdc2388f2016-01-13 14:41:02 +0100430 "ld $t0, 64($sp)\n\t"
431 "ld $t1, 72($sp)\n\t"
432 "ld $t2, 80($sp)\n\t"
433 "ld $t3, 88($sp)\n\t"
434 "daddiu $sp, $sp, 96\n\t"
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200435
436 "move %[result], $v0\n\t" // Store the call result.
437 : [result] "=r" (result)
438 : [arg0] "r"(arg0), [arg1] "r"(arg1), [arg2] "r"(arg2), [code] "r"(code), [self] "r"(self),
439 [referrer] "r"(referrer), [hidden] "r"(hidden)
Goran Jakovljevicdc2388f2016-01-13 14:41:02 +0100440 : "at", "v0", "v1", "s0", "s1", "s2", "s3", "s4", "s5", "s6", "s7",
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200441 "t8", "t9", "k0", "k1", "fp", "ra",
Goran Jakovljevic4d44e532015-11-27 11:20:20 +0100442 "$f0", "$f1", "$f2", "$f3", "$f4", "$f5", "$f6", "$f7", "$f8", "$f9", "$f10", "$f11",
443 "$f12", "$f13", "$f14", "$f15", "$f16", "$f17", "$f18", "$f19", "$f20", "$f21", "$f22",
444 "$f23", "$f24", "$f25", "$f26", "$f27", "$f28", "$f29", "$f30", "$f31",
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200445 "memory"); // clobber.
Andreas Gampe9537ba22015-10-12 14:29:38 -0700446#elif defined(__x86_64__) && !defined(__APPLE__)
447#define PUSH(reg) "pushq " # reg "\n\t .cfi_adjust_cfa_offset 8\n\t"
448#define POP(reg) "popq " # reg "\n\t .cfi_adjust_cfa_offset -8\n\t"
449 // Note: Uses the native convention. We do a callee-save regimen by manually spilling and
450 // restoring almost all registers.
Andreas Gampe51f76352014-05-21 08:28:48 -0700451 // TODO: Set the thread?
452 __asm__ __volatile__(
Andreas Gampe9537ba22015-10-12 14:29:38 -0700453 // Spill almost everything (except rax, rsp). 14 registers.
454 PUSH(%%rbx)
455 PUSH(%%rcx)
456 PUSH(%%rdx)
457 PUSH(%%rsi)
458 PUSH(%%rdi)
459 PUSH(%%rbp)
460 PUSH(%%r8)
461 PUSH(%%r9)
462 PUSH(%%r10)
463 PUSH(%%r11)
464 PUSH(%%r12)
465 PUSH(%%r13)
466 PUSH(%%r14)
467 PUSH(%%r15)
468
469 PUSH(%[referrer]) // Push referrer & 16B alignment padding
470 PUSH(%[referrer])
471
472 // Now juggle the input registers.
473 PUSH(%[arg0])
474 PUSH(%[arg1])
475 PUSH(%[arg2])
476 PUSH(%[hidden])
477 PUSH(%[code])
478 POP(%%r8)
479 POP(%%rax)
480 POP(%%rdx)
481 POP(%%rsi)
482 POP(%%rdi)
483
484 "call *%%r8\n\t" // Call the stub
485 "addq $16, %%rsp\n\t" // Pop null and padding
Andreas Gampe51f76352014-05-21 08:28:48 -0700486 ".cfi_adjust_cfa_offset -16\n\t"
Andreas Gampe9537ba22015-10-12 14:29:38 -0700487
488 POP(%%r15)
489 POP(%%r14)
490 POP(%%r13)
491 POP(%%r12)
492 POP(%%r11)
493 POP(%%r10)
494 POP(%%r9)
495 POP(%%r8)
496 POP(%%rbp)
497 POP(%%rdi)
498 POP(%%rsi)
499 POP(%%rdx)
500 POP(%%rcx)
501 POP(%%rbx)
502
Andreas Gampe51f76352014-05-21 08:28:48 -0700503 : "=a" (result)
504 // Use the result from rax
Andreas Gampe9537ba22015-10-12 14:29:38 -0700505 : [arg0] "r"(arg0), [arg1] "r"(arg1), [arg2] "r"(arg2), [code] "r"(code),
506 [referrer] "r"(referrer), [hidden] "r"(hidden)
507 // This places arg0 into rdi, arg1 into rsi, arg2 into rdx, and code into some other
508 // register. We can't use "b" (rbx), as ASAN uses this for the frame pointer.
509 : "memory"); // We spill and restore (almost) all registers, so only mention memory here.
510#undef PUSH
511#undef POP
Andreas Gampe51f76352014-05-21 08:28:48 -0700512#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -0800513 UNUSED(arg0, arg1, arg2, code, referrer, hidden);
Andreas Gampe51f76352014-05-21 08:28:48 -0700514 LOG(WARNING) << "Was asked to invoke for an architecture I do not understand.";
515 result = 0;
516#endif
517 // Pop transition.
518 self->PopManagedStackFragment(fragment);
519
520 fp_result = fpr_result;
521 EXPECT_EQ(0U, fp_result);
522
523 return result;
524 }
525
Andreas Gampe29b38412014-08-13 00:15:43 -0700526 static uintptr_t GetEntrypoint(Thread* self, QuickEntrypointEnum entrypoint) {
527 int32_t offset;
528#ifdef __LP64__
529 offset = GetThreadOffset<8>(entrypoint).Int32Value();
530#else
531 offset = GetThreadOffset<4>(entrypoint).Int32Value();
532#endif
533 return *reinterpret_cast<uintptr_t*>(reinterpret_cast<uint8_t*>(self) + offset);
534 }
535
Andreas Gampe6cf80102014-05-19 11:32:41 -0700536 protected:
537 size_t fp_result;
Andreas Gampe525cde22014-04-22 15:44:50 -0700538};
539
540
Andreas Gampe525cde22014-04-22 15:44:50 -0700541TEST_F(StubTest, Memcpy) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200542#if defined(__i386__) || (defined(__x86_64__) && !defined(__APPLE__)) || defined(__mips__)
Andreas Gampe525cde22014-04-22 15:44:50 -0700543 Thread* self = Thread::Current();
544
545 uint32_t orig[20];
546 uint32_t trg[20];
547 for (size_t i = 0; i < 20; ++i) {
548 orig[i] = i;
549 trg[i] = 0;
550 }
551
552 Invoke3(reinterpret_cast<size_t>(&trg[4]), reinterpret_cast<size_t>(&orig[4]),
Andreas Gampe29b38412014-08-13 00:15:43 -0700553 10 * sizeof(uint32_t), StubTest::GetEntrypoint(self, kQuickMemcpy), self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700554
555 EXPECT_EQ(orig[0], trg[0]);
556
557 for (size_t i = 1; i < 4; ++i) {
558 EXPECT_NE(orig[i], trg[i]);
559 }
560
561 for (size_t i = 4; i < 14; ++i) {
562 EXPECT_EQ(orig[i], trg[i]);
563 }
564
565 for (size_t i = 14; i < 20; ++i) {
566 EXPECT_NE(orig[i], trg[i]);
567 }
568
569 // TODO: Test overlapping?
570
571#else
572 LOG(INFO) << "Skipping memcpy as I don't know how to do that on " << kRuntimeISA;
573 // Force-print to std::cout so it's also outside the logcat.
574 std::cout << "Skipping memcpy as I don't know how to do that on " << kRuntimeISA << std::endl;
575#endif
576}
577
Andreas Gampe525cde22014-04-22 15:44:50 -0700578TEST_F(StubTest, LockObject) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200579#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
580 (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -0700581 static constexpr size_t kThinLockLoops = 100;
582
Andreas Gampe525cde22014-04-22 15:44:50 -0700583 Thread* self = Thread::Current();
Andreas Gampe29b38412014-08-13 00:15:43 -0700584
585 const uintptr_t art_quick_lock_object = StubTest::GetEntrypoint(self, kQuickLockObject);
586
Andreas Gampe525cde22014-04-22 15:44:50 -0700587 // Create an object
588 ScopedObjectAccess soa(self);
589 // garbage is created during ClassLinker::Init
590
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700591 StackHandleScope<2> hs(soa.Self());
592 Handle<mirror::String> obj(
593 hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), "hello, world!")));
Andreas Gampe525cde22014-04-22 15:44:50 -0700594 LockWord lock = obj->GetLockWord(false);
595 LockWord::LockState old_state = lock.GetState();
596 EXPECT_EQ(LockWord::LockState::kUnlocked, old_state);
597
Andreas Gampe29b38412014-08-13 00:15:43 -0700598 Invoke3(reinterpret_cast<size_t>(obj.Get()), 0U, 0U, art_quick_lock_object, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700599
600 LockWord lock_after = obj->GetLockWord(false);
601 LockWord::LockState new_state = lock_after.GetState();
602 EXPECT_EQ(LockWord::LockState::kThinLocked, new_state);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700603 EXPECT_EQ(lock_after.ThinLockCount(), 0U); // Thin lock starts count at zero
604
605 for (size_t i = 1; i < kThinLockLoops; ++i) {
Andreas Gampe29b38412014-08-13 00:15:43 -0700606 Invoke3(reinterpret_cast<size_t>(obj.Get()), 0U, 0U, art_quick_lock_object, self);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700607
608 // Check we're at lock count i
609
610 LockWord l_inc = obj->GetLockWord(false);
611 LockWord::LockState l_inc_state = l_inc.GetState();
612 EXPECT_EQ(LockWord::LockState::kThinLocked, l_inc_state);
613 EXPECT_EQ(l_inc.ThinLockCount(), i);
614 }
615
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700616 // Force a fat lock by running identity hashcode to fill up lock word.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700617 Handle<mirror::String> obj2(hs.NewHandle(
618 mirror::String::AllocFromModifiedUtf8(soa.Self(), "hello, world!")));
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700619
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700620 obj2->IdentityHashCode();
621
Andreas Gampe29b38412014-08-13 00:15:43 -0700622 Invoke3(reinterpret_cast<size_t>(obj2.Get()), 0U, 0U, art_quick_lock_object, self);
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700623
624 LockWord lock_after2 = obj2->GetLockWord(false);
625 LockWord::LockState new_state2 = lock_after2.GetState();
626 EXPECT_EQ(LockWord::LockState::kFatLocked, new_state2);
627 EXPECT_NE(lock_after2.FatLockMonitor(), static_cast<Monitor*>(nullptr));
628
629 // Test done.
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700630#else
631 LOG(INFO) << "Skipping lock_object as I don't know how to do that on " << kRuntimeISA;
632 // Force-print to std::cout so it's also outside the logcat.
633 std::cout << "Skipping lock_object as I don't know how to do that on " << kRuntimeISA << std::endl;
634#endif
635}
636
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700637
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700638class RandGen {
639 public:
640 explicit RandGen(uint32_t seed) : val_(seed) {}
641
642 uint32_t next() {
643 val_ = val_ * 48271 % 2147483647 + 13;
644 return val_;
645 }
646
647 uint32_t val_;
648};
649
650
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700651// NO_THREAD_SAFETY_ANALYSIS as we do not want to grab exclusive mutator lock for MonitorInfo.
652static void TestUnlockObject(StubTest* test) NO_THREAD_SAFETY_ANALYSIS {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200653#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
654 (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -0700655 static constexpr size_t kThinLockLoops = 100;
656
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700657 Thread* self = Thread::Current();
Andreas Gampe29b38412014-08-13 00:15:43 -0700658
659 const uintptr_t art_quick_lock_object = StubTest::GetEntrypoint(self, kQuickLockObject);
660 const uintptr_t art_quick_unlock_object = StubTest::GetEntrypoint(self, kQuickUnlockObject);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700661 // Create an object
662 ScopedObjectAccess soa(self);
663 // garbage is created during ClassLinker::Init
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700664 static constexpr size_t kNumberOfLocks = 10; // Number of objects = lock
665 StackHandleScope<kNumberOfLocks + 1> hs(self);
666 Handle<mirror::String> obj(
667 hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), "hello, world!")));
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700668 LockWord lock = obj->GetLockWord(false);
669 LockWord::LockState old_state = lock.GetState();
670 EXPECT_EQ(LockWord::LockState::kUnlocked, old_state);
671
Andreas Gampe29b38412014-08-13 00:15:43 -0700672 test->Invoke3(reinterpret_cast<size_t>(obj.Get()), 0U, 0U, art_quick_unlock_object, self);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700673 // This should be an illegal monitor state.
674 EXPECT_TRUE(self->IsExceptionPending());
675 self->ClearException();
676
677 LockWord lock_after = obj->GetLockWord(false);
678 LockWord::LockState new_state = lock_after.GetState();
679 EXPECT_EQ(LockWord::LockState::kUnlocked, new_state);
Andreas Gampe525cde22014-04-22 15:44:50 -0700680
Andreas Gampe29b38412014-08-13 00:15:43 -0700681 test->Invoke3(reinterpret_cast<size_t>(obj.Get()), 0U, 0U, art_quick_lock_object, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700682
683 LockWord lock_after2 = obj->GetLockWord(false);
684 LockWord::LockState new_state2 = lock_after2.GetState();
685 EXPECT_EQ(LockWord::LockState::kThinLocked, new_state2);
686
Andreas Gampe29b38412014-08-13 00:15:43 -0700687 test->Invoke3(reinterpret_cast<size_t>(obj.Get()), 0U, 0U, art_quick_unlock_object, self);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700688
689 LockWord lock_after3 = obj->GetLockWord(false);
690 LockWord::LockState new_state3 = lock_after3.GetState();
691 EXPECT_EQ(LockWord::LockState::kUnlocked, new_state3);
692
693 // Stress test:
694 // Keep a number of objects and their locks in flight. Randomly lock or unlock one of them in
695 // each step.
696
697 RandGen r(0x1234);
698
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700699 constexpr size_t kIterations = 10000; // Number of iterations
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700700 constexpr size_t kMoveToFat = 1000; // Chance of 1:kMoveFat to make a lock fat.
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700701
702 size_t counts[kNumberOfLocks];
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700703 bool fat[kNumberOfLocks]; // Whether a lock should be thin or fat.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700704 Handle<mirror::String> objects[kNumberOfLocks];
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700705
706 // Initialize = allocate.
707 for (size_t i = 0; i < kNumberOfLocks; ++i) {
708 counts[i] = 0;
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700709 fat[i] = false;
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700710 objects[i] = hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), ""));
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700711 }
712
713 for (size_t i = 0; i < kIterations; ++i) {
714 // Select which lock to update.
715 size_t index = r.next() % kNumberOfLocks;
716
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700717 // Make lock fat?
718 if (!fat[index] && (r.next() % kMoveToFat == 0)) {
719 fat[index] = true;
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700720 objects[index]->IdentityHashCode();
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700721
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700722 LockWord lock_iter = objects[index]->GetLockWord(false);
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700723 LockWord::LockState iter_state = lock_iter.GetState();
724 if (counts[index] == 0) {
725 EXPECT_EQ(LockWord::LockState::kHashCode, iter_state);
726 } else {
727 EXPECT_EQ(LockWord::LockState::kFatLocked, iter_state);
728 }
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700729 } else {
Andreas Gampe277ccbd2014-11-03 21:36:10 -0800730 bool take_lock; // Whether to lock or unlock in this step.
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700731 if (counts[index] == 0) {
Andreas Gampe277ccbd2014-11-03 21:36:10 -0800732 take_lock = true;
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700733 } else if (counts[index] == kThinLockLoops) {
Andreas Gampe277ccbd2014-11-03 21:36:10 -0800734 take_lock = false;
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700735 } else {
736 // Randomly.
Andreas Gampe277ccbd2014-11-03 21:36:10 -0800737 take_lock = r.next() % 2 == 0;
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700738 }
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700739
Andreas Gampe277ccbd2014-11-03 21:36:10 -0800740 if (take_lock) {
Andreas Gampe29b38412014-08-13 00:15:43 -0700741 test->Invoke3(reinterpret_cast<size_t>(objects[index].Get()), 0U, 0U, art_quick_lock_object,
742 self);
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700743 counts[index]++;
744 } else {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700745 test->Invoke3(reinterpret_cast<size_t>(objects[index].Get()), 0U, 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -0700746 art_quick_unlock_object, self);
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700747 counts[index]--;
748 }
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700749
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700750 EXPECT_FALSE(self->IsExceptionPending());
751
752 // Check the new state.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700753 LockWord lock_iter = objects[index]->GetLockWord(true);
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700754 LockWord::LockState iter_state = lock_iter.GetState();
755 if (fat[index]) {
756 // Abuse MonitorInfo.
757 EXPECT_EQ(LockWord::LockState::kFatLocked, iter_state) << index;
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700758 MonitorInfo info(objects[index].Get());
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700759 EXPECT_EQ(counts[index], info.entry_count_) << index;
760 } else {
761 if (counts[index] > 0) {
762 EXPECT_EQ(LockWord::LockState::kThinLocked, iter_state);
763 EXPECT_EQ(counts[index] - 1, lock_iter.ThinLockCount());
764 } else {
765 EXPECT_EQ(LockWord::LockState::kUnlocked, iter_state);
766 }
767 }
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700768 }
769 }
770
771 // Unlock the remaining count times and then check it's unlocked. Then deallocate.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700772 // Go reverse order to correctly handle Handles.
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700773 for (size_t i = 0; i < kNumberOfLocks; ++i) {
774 size_t index = kNumberOfLocks - 1 - i;
775 size_t count = counts[index];
776 while (count > 0) {
Andreas Gampe29b38412014-08-13 00:15:43 -0700777 test->Invoke3(reinterpret_cast<size_t>(objects[index].Get()), 0U, 0U, art_quick_unlock_object,
778 self);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700779 count--;
780 }
781
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700782 LockWord lock_after4 = objects[index]->GetLockWord(false);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700783 LockWord::LockState new_state4 = lock_after4.GetState();
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700784 EXPECT_TRUE(LockWord::LockState::kUnlocked == new_state4
785 || LockWord::LockState::kFatLocked == new_state4);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700786 }
787
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700788 // Test done.
Andreas Gampe525cde22014-04-22 15:44:50 -0700789#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -0800790 UNUSED(test);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700791 LOG(INFO) << "Skipping unlock_object as I don't know how to do that on " << kRuntimeISA;
Andreas Gampe525cde22014-04-22 15:44:50 -0700792 // Force-print to std::cout so it's also outside the logcat.
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700793 std::cout << "Skipping unlock_object as I don't know how to do that on " << kRuntimeISA << std::endl;
Andreas Gampe525cde22014-04-22 15:44:50 -0700794#endif
795}
796
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700797TEST_F(StubTest, UnlockObject) {
Andreas Gampe369810a2015-01-14 19:53:31 -0800798 // This will lead to monitor error messages in the log.
799 ScopedLogSeverity sls(LogSeverity::FATAL);
800
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700801 TestUnlockObject(this);
802}
Andreas Gampe525cde22014-04-22 15:44:50 -0700803
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200804#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
805 (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe525cde22014-04-22 15:44:50 -0700806extern "C" void art_quick_check_cast(void);
807#endif
808
809TEST_F(StubTest, CheckCast) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200810#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
811 (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe525cde22014-04-22 15:44:50 -0700812 Thread* self = Thread::Current();
Andreas Gampe29b38412014-08-13 00:15:43 -0700813
814 const uintptr_t art_quick_check_cast = StubTest::GetEntrypoint(self, kQuickCheckCast);
815
Andreas Gampe525cde22014-04-22 15:44:50 -0700816 // Find some classes.
817 ScopedObjectAccess soa(self);
818 // garbage is created during ClassLinker::Init
819
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700820 StackHandleScope<2> hs(soa.Self());
821 Handle<mirror::Class> c(
822 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "[Ljava/lang/Object;")));
823 Handle<mirror::Class> c2(
824 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "[Ljava/lang/String;")));
Andreas Gampe525cde22014-04-22 15:44:50 -0700825
826 EXPECT_FALSE(self->IsExceptionPending());
827
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700828 Invoke3(reinterpret_cast<size_t>(c.Get()), reinterpret_cast<size_t>(c.Get()), 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -0700829 art_quick_check_cast, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700830
831 EXPECT_FALSE(self->IsExceptionPending());
832
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700833 Invoke3(reinterpret_cast<size_t>(c2.Get()), reinterpret_cast<size_t>(c2.Get()), 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -0700834 art_quick_check_cast, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700835
836 EXPECT_FALSE(self->IsExceptionPending());
837
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700838 Invoke3(reinterpret_cast<size_t>(c.Get()), reinterpret_cast<size_t>(c2.Get()), 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -0700839 art_quick_check_cast, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700840
841 EXPECT_FALSE(self->IsExceptionPending());
842
843 // TODO: Make the following work. But that would require correct managed frames.
844
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700845 Invoke3(reinterpret_cast<size_t>(c2.Get()), reinterpret_cast<size_t>(c.Get()), 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -0700846 art_quick_check_cast, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700847
848 EXPECT_TRUE(self->IsExceptionPending());
849 self->ClearException();
850
851#else
852 LOG(INFO) << "Skipping check_cast as I don't know how to do that on " << kRuntimeISA;
853 // Force-print to std::cout so it's also outside the logcat.
854 std::cout << "Skipping check_cast as I don't know how to do that on " << kRuntimeISA << std::endl;
855#endif
856}
857
858
Andreas Gampe525cde22014-04-22 15:44:50 -0700859TEST_F(StubTest, APutObj) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200860#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
861 (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe525cde22014-04-22 15:44:50 -0700862 Thread* self = Thread::Current();
Andreas Gampe29b38412014-08-13 00:15:43 -0700863
864 // Do not check non-checked ones, we'd need handlers and stuff...
865 const uintptr_t art_quick_aput_obj_with_null_and_bound_check =
866 StubTest::GetEntrypoint(self, kQuickAputObjectWithNullAndBoundCheck);
867
Andreas Gampe525cde22014-04-22 15:44:50 -0700868 // Create an object
869 ScopedObjectAccess soa(self);
870 // garbage is created during ClassLinker::Init
871
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700872 StackHandleScope<5> hs(soa.Self());
873 Handle<mirror::Class> c(
874 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "Ljava/lang/Object;")));
875 Handle<mirror::Class> ca(
876 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "[Ljava/lang/String;")));
Andreas Gampe525cde22014-04-22 15:44:50 -0700877
878 // Build a string array of size 1
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700879 Handle<mirror::ObjectArray<mirror::Object>> array(
880 hs.NewHandle(mirror::ObjectArray<mirror::Object>::Alloc(soa.Self(), ca.Get(), 10)));
Andreas Gampe525cde22014-04-22 15:44:50 -0700881
882 // Build a string -> should be assignable
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700883 Handle<mirror::String> str_obj(
884 hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), "hello, world!")));
Andreas Gampe525cde22014-04-22 15:44:50 -0700885
886 // Build a generic object -> should fail assigning
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700887 Handle<mirror::Object> obj_obj(hs.NewHandle(c->AllocObject(soa.Self())));
Andreas Gampe525cde22014-04-22 15:44:50 -0700888
889 // Play with it...
890
891 // 1) Success cases
Andreas Gampef4e910b2014-04-29 16:55:52 -0700892 // 1.1) Assign str_obj to array[0..3]
Andreas Gampe525cde22014-04-22 15:44:50 -0700893
894 EXPECT_FALSE(self->IsExceptionPending());
895
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700896 Invoke3(reinterpret_cast<size_t>(array.Get()), 0U, reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampe29b38412014-08-13 00:15:43 -0700897 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700898
899 EXPECT_FALSE(self->IsExceptionPending());
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700900 EXPECT_EQ(str_obj.Get(), array->Get(0));
Andreas Gampe525cde22014-04-22 15:44:50 -0700901
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700902 Invoke3(reinterpret_cast<size_t>(array.Get()), 1U, reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampe29b38412014-08-13 00:15:43 -0700903 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampef4e910b2014-04-29 16:55:52 -0700904
905 EXPECT_FALSE(self->IsExceptionPending());
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700906 EXPECT_EQ(str_obj.Get(), array->Get(1));
Andreas Gampef4e910b2014-04-29 16:55:52 -0700907
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700908 Invoke3(reinterpret_cast<size_t>(array.Get()), 2U, reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampe29b38412014-08-13 00:15:43 -0700909 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampef4e910b2014-04-29 16:55:52 -0700910
911 EXPECT_FALSE(self->IsExceptionPending());
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700912 EXPECT_EQ(str_obj.Get(), array->Get(2));
Andreas Gampef4e910b2014-04-29 16:55:52 -0700913
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700914 Invoke3(reinterpret_cast<size_t>(array.Get()), 3U, reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampe29b38412014-08-13 00:15:43 -0700915 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampef4e910b2014-04-29 16:55:52 -0700916
917 EXPECT_FALSE(self->IsExceptionPending());
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700918 EXPECT_EQ(str_obj.Get(), array->Get(3));
Andreas Gampef4e910b2014-04-29 16:55:52 -0700919
920 // 1.2) Assign null to array[0..3]
Andreas Gampe525cde22014-04-22 15:44:50 -0700921
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700922 Invoke3(reinterpret_cast<size_t>(array.Get()), 0U, reinterpret_cast<size_t>(nullptr),
Andreas Gampe29b38412014-08-13 00:15:43 -0700923 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700924
925 EXPECT_FALSE(self->IsExceptionPending());
Andreas Gampef4e910b2014-04-29 16:55:52 -0700926 EXPECT_EQ(nullptr, array->Get(0));
927
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700928 Invoke3(reinterpret_cast<size_t>(array.Get()), 1U, reinterpret_cast<size_t>(nullptr),
Andreas Gampe29b38412014-08-13 00:15:43 -0700929 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampef4e910b2014-04-29 16:55:52 -0700930
931 EXPECT_FALSE(self->IsExceptionPending());
932 EXPECT_EQ(nullptr, array->Get(1));
933
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700934 Invoke3(reinterpret_cast<size_t>(array.Get()), 2U, reinterpret_cast<size_t>(nullptr),
Andreas Gampe29b38412014-08-13 00:15:43 -0700935 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampef4e910b2014-04-29 16:55:52 -0700936
937 EXPECT_FALSE(self->IsExceptionPending());
938 EXPECT_EQ(nullptr, array->Get(2));
939
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700940 Invoke3(reinterpret_cast<size_t>(array.Get()), 3U, reinterpret_cast<size_t>(nullptr),
Andreas Gampe29b38412014-08-13 00:15:43 -0700941 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampef4e910b2014-04-29 16:55:52 -0700942
943 EXPECT_FALSE(self->IsExceptionPending());
944 EXPECT_EQ(nullptr, array->Get(3));
Andreas Gampe525cde22014-04-22 15:44:50 -0700945
946 // TODO: Check _which_ exception is thrown. Then make 3) check that it's the right check order.
947
948 // 2) Failure cases (str into str[])
949 // 2.1) Array = null
950 // TODO: Throwing NPE needs actual DEX code
951
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700952// Invoke3(reinterpret_cast<size_t>(nullptr), 0U, reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampe525cde22014-04-22 15:44:50 -0700953// reinterpret_cast<uintptr_t>(&art_quick_aput_obj_with_null_and_bound_check), self);
954//
955// EXPECT_TRUE(self->IsExceptionPending());
956// self->ClearException();
957
958 // 2.2) Index < 0
959
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700960 Invoke3(reinterpret_cast<size_t>(array.Get()), static_cast<size_t>(-1),
961 reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampe29b38412014-08-13 00:15:43 -0700962 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700963
964 EXPECT_TRUE(self->IsExceptionPending());
965 self->ClearException();
966
967 // 2.3) Index > 0
968
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700969 Invoke3(reinterpret_cast<size_t>(array.Get()), 10U, reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampe29b38412014-08-13 00:15:43 -0700970 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700971
972 EXPECT_TRUE(self->IsExceptionPending());
973 self->ClearException();
974
975 // 3) Failure cases (obj into str[])
976
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700977 Invoke3(reinterpret_cast<size_t>(array.Get()), 0U, reinterpret_cast<size_t>(obj_obj.Get()),
Andreas Gampe29b38412014-08-13 00:15:43 -0700978 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700979
980 EXPECT_TRUE(self->IsExceptionPending());
981 self->ClearException();
982
983 // Tests done.
984#else
985 LOG(INFO) << "Skipping aput_obj as I don't know how to do that on " << kRuntimeISA;
986 // Force-print to std::cout so it's also outside the logcat.
987 std::cout << "Skipping aput_obj as I don't know how to do that on " << kRuntimeISA << std::endl;
988#endif
989}
990
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700991TEST_F(StubTest, AllocObject) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200992#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
993 (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe369810a2015-01-14 19:53:31 -0800994 // This will lead to OOM error messages in the log.
995 ScopedLogSeverity sls(LogSeverity::FATAL);
996
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700997 // TODO: Check the "Unresolved" allocation stubs
998
999 Thread* self = Thread::Current();
1000 // Create an object
1001 ScopedObjectAccess soa(self);
1002 // garbage is created during ClassLinker::Init
1003
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001004 StackHandleScope<2> hs(soa.Self());
1005 Handle<mirror::Class> c(
1006 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "Ljava/lang/Object;")));
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001007
1008 // Play with it...
1009
1010 EXPECT_FALSE(self->IsExceptionPending());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001011 {
1012 // Use an arbitrary method from c to use as referrer
1013 size_t result = Invoke3(static_cast<size_t>(c->GetDexTypeIndex()), // type_idx
Mathieu Chartiere401d142015-04-22 13:56:20 -07001014 // arbitrary
1015 reinterpret_cast<size_t>(c->GetVirtualMethod(0, sizeof(void*))),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001016 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001017 StubTest::GetEntrypoint(self, kQuickAllocObject),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001018 self);
1019
1020 EXPECT_FALSE(self->IsExceptionPending());
1021 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
1022 mirror::Object* obj = reinterpret_cast<mirror::Object*>(result);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001023 EXPECT_EQ(c.Get(), obj->GetClass());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001024 VerifyObject(obj);
1025 }
1026
1027 {
Mathieu Chartier2cebb242015-04-21 16:50:40 -07001028 // We can use null in the second argument as we do not need a method here (not used in
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001029 // resolved/initialized cases)
Mathieu Chartiere401d142015-04-22 13:56:20 -07001030 size_t result = Invoke3(reinterpret_cast<size_t>(c.Get()), 0u, 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001031 StubTest::GetEntrypoint(self, kQuickAllocObjectResolved),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001032 self);
1033
1034 EXPECT_FALSE(self->IsExceptionPending());
1035 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
1036 mirror::Object* obj = reinterpret_cast<mirror::Object*>(result);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001037 EXPECT_EQ(c.Get(), obj->GetClass());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001038 VerifyObject(obj);
1039 }
1040
1041 {
Mathieu Chartier2cebb242015-04-21 16:50:40 -07001042 // We can use null in the second argument as we do not need a method here (not used in
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001043 // resolved/initialized cases)
Mathieu Chartiere401d142015-04-22 13:56:20 -07001044 size_t result = Invoke3(reinterpret_cast<size_t>(c.Get()), 0u, 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001045 StubTest::GetEntrypoint(self, kQuickAllocObjectInitialized),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001046 self);
1047
1048 EXPECT_FALSE(self->IsExceptionPending());
1049 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
1050 mirror::Object* obj = reinterpret_cast<mirror::Object*>(result);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001051 EXPECT_EQ(c.Get(), obj->GetClass());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001052 VerifyObject(obj);
1053 }
1054
1055 // Failure tests.
1056
1057 // Out-of-memory.
1058 {
1059 Runtime::Current()->GetHeap()->SetIdealFootprint(1 * GB);
1060
1061 // Array helps to fill memory faster.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001062 Handle<mirror::Class> ca(
1063 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "[Ljava/lang/Object;")));
1064
1065 // Use arbitrary large amount for now.
1066 static const size_t kMaxHandles = 1000000;
Ian Rogers700a4022014-05-19 16:49:03 -07001067 std::unique_ptr<StackHandleScope<kMaxHandles>> hsp(new StackHandleScope<kMaxHandles>(self));
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001068
1069 std::vector<Handle<mirror::Object>> handles;
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001070 // Start allocating with 128K
1071 size_t length = 128 * KB / 4;
1072 while (length > 10) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001073 Handle<mirror::Object> h(hsp->NewHandle<mirror::Object>(
1074 mirror::ObjectArray<mirror::Object>::Alloc(soa.Self(), ca.Get(), length / 4)));
1075 if (self->IsExceptionPending() || h.Get() == nullptr) {
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001076 self->ClearException();
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001077
1078 // Try a smaller length
1079 length = length / 8;
1080 // Use at most half the reported free space.
1081 size_t mem = Runtime::Current()->GetHeap()->GetFreeMemory();
1082 if (length * 8 > mem) {
1083 length = mem / 8;
1084 }
1085 } else {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001086 handles.push_back(h);
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001087 }
1088 }
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001089 LOG(INFO) << "Used " << handles.size() << " arrays to fill space.";
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001090
1091 // Allocate simple objects till it fails.
1092 while (!self->IsExceptionPending()) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001093 Handle<mirror::Object> h = hsp->NewHandle(c->AllocObject(soa.Self()));
1094 if (!self->IsExceptionPending() && h.Get() != nullptr) {
1095 handles.push_back(h);
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001096 }
1097 }
1098 self->ClearException();
1099
Mathieu Chartiere401d142015-04-22 13:56:20 -07001100 size_t result = Invoke3(reinterpret_cast<size_t>(c.Get()), 0u, 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001101 StubTest::GetEntrypoint(self, kQuickAllocObjectInitialized),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001102 self);
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001103 EXPECT_TRUE(self->IsExceptionPending());
1104 self->ClearException();
1105 EXPECT_EQ(reinterpret_cast<size_t>(nullptr), result);
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001106 }
1107
1108 // Tests done.
1109#else
1110 LOG(INFO) << "Skipping alloc_object as I don't know how to do that on " << kRuntimeISA;
1111 // Force-print to std::cout so it's also outside the logcat.
1112 std::cout << "Skipping alloc_object as I don't know how to do that on " << kRuntimeISA << std::endl;
1113#endif
1114}
1115
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001116TEST_F(StubTest, AllocObjectArray) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001117#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1118 (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001119 // TODO: Check the "Unresolved" allocation stubs
1120
Andreas Gampe369810a2015-01-14 19:53:31 -08001121 // This will lead to OOM error messages in the log.
1122 ScopedLogSeverity sls(LogSeverity::FATAL);
1123
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001124 Thread* self = Thread::Current();
1125 // Create an object
1126 ScopedObjectAccess soa(self);
1127 // garbage is created during ClassLinker::Init
1128
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001129 StackHandleScope<2> hs(self);
1130 Handle<mirror::Class> c(
1131 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "[Ljava/lang/Object;")));
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001132
1133 // Needed to have a linked method.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001134 Handle<mirror::Class> c_obj(
1135 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "Ljava/lang/Object;")));
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001136
1137 // Play with it...
1138
1139 EXPECT_FALSE(self->IsExceptionPending());
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001140
1141 // For some reason this does not work, as the type_idx is artificial and outside what the
1142 // resolved types of c_obj allow...
1143
Ian Rogerscf7f1912014-10-22 22:06:39 -07001144 if ((false)) {
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001145 // Use an arbitrary method from c to use as referrer
1146 size_t result = Invoke3(static_cast<size_t>(c->GetDexTypeIndex()), // type_idx
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001147 10U,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001148 // arbitrary
1149 reinterpret_cast<size_t>(c_obj->GetVirtualMethod(0, sizeof(void*))),
Andreas Gampe29b38412014-08-13 00:15:43 -07001150 StubTest::GetEntrypoint(self, kQuickAllocArray),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001151 self);
1152
1153 EXPECT_FALSE(self->IsExceptionPending());
1154 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
1155 mirror::Array* obj = reinterpret_cast<mirror::Array*>(result);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001156 EXPECT_EQ(c.Get(), obj->GetClass());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001157 VerifyObject(obj);
1158 EXPECT_EQ(obj->GetLength(), 10);
1159 }
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001160
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001161 {
Mathieu Chartier2cebb242015-04-21 16:50:40 -07001162 // We can use null in the second argument as we do not need a method here (not used in
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001163 // resolved/initialized cases)
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08001164 size_t result = Invoke3(reinterpret_cast<size_t>(c.Get()), 10U,
1165 reinterpret_cast<size_t>(nullptr),
Andreas Gampe29b38412014-08-13 00:15:43 -07001166 StubTest::GetEntrypoint(self, kQuickAllocArrayResolved),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001167 self);
Nicolas Geoffray14691c52015-03-05 10:40:17 +00001168 EXPECT_FALSE(self->IsExceptionPending()) << PrettyTypeOf(self->GetException());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001169 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
1170 mirror::Object* obj = reinterpret_cast<mirror::Object*>(result);
1171 EXPECT_TRUE(obj->IsArrayInstance());
1172 EXPECT_TRUE(obj->IsObjectArray());
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001173 EXPECT_EQ(c.Get(), obj->GetClass());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001174 VerifyObject(obj);
1175 mirror::Array* array = reinterpret_cast<mirror::Array*>(result);
1176 EXPECT_EQ(array->GetLength(), 10);
1177 }
1178
1179 // Failure tests.
1180
1181 // Out-of-memory.
1182 {
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08001183 size_t result = Invoke3(reinterpret_cast<size_t>(c.Get()),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001184 GB, // that should fail...
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08001185 reinterpret_cast<size_t>(nullptr),
Andreas Gampe29b38412014-08-13 00:15:43 -07001186 StubTest::GetEntrypoint(self, kQuickAllocArrayResolved),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001187 self);
1188
1189 EXPECT_TRUE(self->IsExceptionPending());
1190 self->ClearException();
1191 EXPECT_EQ(reinterpret_cast<size_t>(nullptr), result);
1192 }
1193
1194 // Tests done.
1195#else
1196 LOG(INFO) << "Skipping alloc_array as I don't know how to do that on " << kRuntimeISA;
1197 // Force-print to std::cout so it's also outside the logcat.
1198 std::cout << "Skipping alloc_array as I don't know how to do that on " << kRuntimeISA << std::endl;
1199#endif
1200}
1201
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001202
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001203TEST_F(StubTest, StringCompareTo) {
Goran Jakovljevic801fcc42015-12-03 11:44:26 +01001204#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || \
1205 (defined(__mips__) && defined(__LP64__)) || (defined(__x86_64__) && !defined(__APPLE__))
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001206 // TODO: Check the "Unresolved" allocation stubs
1207
1208 Thread* self = Thread::Current();
Andreas Gampe29b38412014-08-13 00:15:43 -07001209
1210 const uintptr_t art_quick_string_compareto = StubTest::GetEntrypoint(self, kQuickStringCompareTo);
1211
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001212 ScopedObjectAccess soa(self);
1213 // garbage is created during ClassLinker::Init
1214
1215 // Create some strings
1216 // Use array so we can index into it and use a matrix for expected results
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -07001217 // Setup: The first half is standard. The second half uses a non-zero offset.
1218 // TODO: Shared backing arrays.
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001219 const char* c[] = { "", "", "a", "aa", "ab",
Serban Constantinescu86797a72014-06-19 16:17:56 +01001220 "aacaacaacaacaacaac", // This one's under the default limit to go to __memcmp16.
1221 "aacaacaacaacaacaacaacaacaacaacaacaac", // This one's over.
1222 "aacaacaacaacaacaacaacaacaacaacaacaaca" }; // As is this one. We need a separate one to
1223 // defeat object-equal optimizations.
Jeff Hao848f70a2014-01-15 13:49:50 -08001224 static constexpr size_t kStringCount = arraysize(c);
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001225
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001226 StackHandleScope<kStringCount> hs(self);
1227 Handle<mirror::String> s[kStringCount];
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001228
Jeff Hao848f70a2014-01-15 13:49:50 -08001229 for (size_t i = 0; i < kStringCount; ++i) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001230 s[i] = hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), c[i]));
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001231 }
1232
1233 // TODO: wide characters
1234
1235 // Matrix of expectations. First component is first parameter. Note we only check against the
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -07001236 // sign, not the value. As we are testing random offsets, we need to compute this and need to
1237 // rely on String::CompareTo being correct.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001238 int32_t expected[kStringCount][kStringCount];
1239 for (size_t x = 0; x < kStringCount; ++x) {
1240 for (size_t y = 0; y < kStringCount; ++y) {
1241 expected[x][y] = s[x]->CompareTo(s[y].Get());
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -07001242 }
1243 }
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001244
1245 // Play with it...
1246
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001247 for (size_t x = 0; x < kStringCount; ++x) {
1248 for (size_t y = 0; y < kStringCount; ++y) {
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001249 // Test string_compareto x y
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001250 size_t result = Invoke3(reinterpret_cast<size_t>(s[x].Get()),
1251 reinterpret_cast<size_t>(s[y].Get()), 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001252 art_quick_string_compareto, self);
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001253
1254 EXPECT_FALSE(self->IsExceptionPending());
1255
1256 // The result is a 32b signed integer
1257 union {
1258 size_t r;
1259 int32_t i;
1260 } conv;
1261 conv.r = result;
1262 int32_t e = expected[x][y];
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -07001263 EXPECT_TRUE(e == 0 ? conv.i == 0 : true) << "x=" << c[x] << " y=" << c[y] << " res=" <<
1264 conv.r;
1265 EXPECT_TRUE(e < 0 ? conv.i < 0 : true) << "x=" << c[x] << " y=" << c[y] << " res=" <<
1266 conv.r;
1267 EXPECT_TRUE(e > 0 ? conv.i > 0 : true) << "x=" << c[x] << " y=" << c[y] << " res=" <<
1268 conv.r;
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001269 }
1270 }
1271
Andreas Gampe7177d7c2014-05-02 12:10:02 -07001272 // TODO: Deallocate things.
1273
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001274 // Tests done.
1275#else
1276 LOG(INFO) << "Skipping string_compareto as I don't know how to do that on " << kRuntimeISA;
1277 // Force-print to std::cout so it's also outside the logcat.
1278 std::cout << "Skipping string_compareto as I don't know how to do that on " << kRuntimeISA <<
1279 std::endl;
1280#endif
1281}
1282
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001283
Mathieu Chartierc7853442015-03-27 14:35:38 -07001284static void GetSetBooleanStatic(ArtField* f, Thread* self,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001285 ArtMethod* referrer, StubTest* test)
Mathieu Chartier90443472015-07-16 20:32:27 -07001286 SHARED_REQUIRES(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001287#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1288 (defined(__x86_64__) && !defined(__APPLE__))
Fred Shih37f05ef2014-07-16 18:38:08 -07001289 constexpr size_t num_values = 5;
1290 uint8_t values[num_values] = { 0, 1, 2, 128, 0xFF };
1291
1292 for (size_t i = 0; i < num_values; ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001293 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001294 static_cast<size_t>(values[i]),
1295 0U,
1296 StubTest::GetEntrypoint(self, kQuickSet8Static),
1297 self,
1298 referrer);
1299
Mathieu Chartierc7853442015-03-27 14:35:38 -07001300 size_t res = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001301 0U, 0U,
1302 StubTest::GetEntrypoint(self, kQuickGetBooleanStatic),
1303 self,
1304 referrer);
1305 // Boolean currently stores bools as uint8_t, be more zealous about asserting correct writes/gets.
1306 EXPECT_EQ(values[i], static_cast<uint8_t>(res)) << "Iteration " << i;
1307 }
1308#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001309 UNUSED(f, self, referrer, test);
Fred Shih37f05ef2014-07-16 18:38:08 -07001310 LOG(INFO) << "Skipping set_boolean_static as I don't know how to do that on " << kRuntimeISA;
1311 // Force-print to std::cout so it's also outside the logcat.
1312 std::cout << "Skipping set_boolean_static as I don't know how to do that on " << kRuntimeISA << std::endl;
1313#endif
1314}
Mathieu Chartiere401d142015-04-22 13:56:20 -07001315static void GetSetByteStatic(ArtField* f, Thread* self, ArtMethod* referrer,
Mathieu Chartierc7853442015-03-27 14:35:38 -07001316 StubTest* test)
Mathieu Chartier90443472015-07-16 20:32:27 -07001317 SHARED_REQUIRES(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001318#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1319 (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001320 int8_t values[] = { -128, -64, 0, 64, 127 };
Fred Shih37f05ef2014-07-16 18:38:08 -07001321
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001322 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001323 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001324 static_cast<size_t>(values[i]),
1325 0U,
1326 StubTest::GetEntrypoint(self, kQuickSet8Static),
1327 self,
1328 referrer);
1329
Mathieu Chartierc7853442015-03-27 14:35:38 -07001330 size_t res = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001331 0U, 0U,
1332 StubTest::GetEntrypoint(self, kQuickGetByteStatic),
1333 self,
1334 referrer);
1335 EXPECT_EQ(values[i], static_cast<int8_t>(res)) << "Iteration " << i;
1336 }
1337#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001338 UNUSED(f, self, referrer, test);
Fred Shih37f05ef2014-07-16 18:38:08 -07001339 LOG(INFO) << "Skipping set_byte_static as I don't know how to do that on " << kRuntimeISA;
1340 // Force-print to std::cout so it's also outside the logcat.
1341 std::cout << "Skipping set_byte_static as I don't know how to do that on " << kRuntimeISA << std::endl;
1342#endif
1343}
1344
1345
Mathieu Chartierc7853442015-03-27 14:35:38 -07001346static void GetSetBooleanInstance(Handle<mirror::Object>* obj, ArtField* f, Thread* self,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001347 ArtMethod* referrer, StubTest* test)
Mathieu Chartier90443472015-07-16 20:32:27 -07001348 SHARED_REQUIRES(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001349#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1350 (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001351 uint8_t values[] = { 0, true, 2, 128, 0xFF };
Fred Shih37f05ef2014-07-16 18:38:08 -07001352
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001353 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001354 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001355 reinterpret_cast<size_t>(obj->Get()),
1356 static_cast<size_t>(values[i]),
1357 StubTest::GetEntrypoint(self, kQuickSet8Instance),
1358 self,
1359 referrer);
1360
Mathieu Chartierc7853442015-03-27 14:35:38 -07001361 uint8_t res = f->GetBoolean(obj->Get());
Fred Shih37f05ef2014-07-16 18:38:08 -07001362 EXPECT_EQ(values[i], res) << "Iteration " << i;
1363
Mathieu Chartierc7853442015-03-27 14:35:38 -07001364 f->SetBoolean<false>(obj->Get(), res);
Fred Shih37f05ef2014-07-16 18:38:08 -07001365
Mathieu Chartierc7853442015-03-27 14:35:38 -07001366 size_t res2 = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001367 reinterpret_cast<size_t>(obj->Get()),
1368 0U,
1369 StubTest::GetEntrypoint(self, kQuickGetBooleanInstance),
1370 self,
1371 referrer);
1372 EXPECT_EQ(res, static_cast<uint8_t>(res2));
1373 }
1374#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001375 UNUSED(obj, f, self, referrer, test);
Fred Shih37f05ef2014-07-16 18:38:08 -07001376 LOG(INFO) << "Skipping set_boolean_instance as I don't know how to do that on " << kRuntimeISA;
1377 // Force-print to std::cout so it's also outside the logcat.
1378 std::cout << "Skipping set_boolean_instance as I don't know how to do that on " << kRuntimeISA << std::endl;
1379#endif
1380}
Mathieu Chartierc7853442015-03-27 14:35:38 -07001381static void GetSetByteInstance(Handle<mirror::Object>* obj, ArtField* f,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001382 Thread* self, ArtMethod* referrer, StubTest* test)
Mathieu Chartier90443472015-07-16 20:32:27 -07001383 SHARED_REQUIRES(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001384#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1385 (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001386 int8_t values[] = { -128, -64, 0, 64, 127 };
Fred Shih37f05ef2014-07-16 18:38:08 -07001387
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001388 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001389 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001390 reinterpret_cast<size_t>(obj->Get()),
1391 static_cast<size_t>(values[i]),
1392 StubTest::GetEntrypoint(self, kQuickSet8Instance),
1393 self,
1394 referrer);
1395
Mathieu Chartierc7853442015-03-27 14:35:38 -07001396 int8_t res = f->GetByte(obj->Get());
Fred Shih37f05ef2014-07-16 18:38:08 -07001397 EXPECT_EQ(res, values[i]) << "Iteration " << i;
Mathieu Chartierc7853442015-03-27 14:35:38 -07001398 f->SetByte<false>(obj->Get(), ++res);
Fred Shih37f05ef2014-07-16 18:38:08 -07001399
Mathieu Chartierc7853442015-03-27 14:35:38 -07001400 size_t res2 = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001401 reinterpret_cast<size_t>(obj->Get()),
1402 0U,
1403 StubTest::GetEntrypoint(self, kQuickGetByteInstance),
1404 self,
1405 referrer);
1406 EXPECT_EQ(res, static_cast<int8_t>(res2));
1407 }
1408#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001409 UNUSED(obj, f, self, referrer, test);
Fred Shih37f05ef2014-07-16 18:38:08 -07001410 LOG(INFO) << "Skipping set_byte_instance as I don't know how to do that on " << kRuntimeISA;
1411 // Force-print to std::cout so it's also outside the logcat.
1412 std::cout << "Skipping set_byte_instance as I don't know how to do that on " << kRuntimeISA << std::endl;
1413#endif
1414}
1415
Mathieu Chartiere401d142015-04-22 13:56:20 -07001416static void GetSetCharStatic(ArtField* f, Thread* self, ArtMethod* referrer,
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001417 StubTest* test)
Mathieu Chartier90443472015-07-16 20:32:27 -07001418 SHARED_REQUIRES(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001419#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1420 (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001421 uint16_t values[] = { 0, 1, 2, 255, 32768, 0xFFFF };
Fred Shih37f05ef2014-07-16 18:38:08 -07001422
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001423 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001424 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001425 static_cast<size_t>(values[i]),
1426 0U,
1427 StubTest::GetEntrypoint(self, kQuickSet16Static),
1428 self,
1429 referrer);
1430
Mathieu Chartierc7853442015-03-27 14:35:38 -07001431 size_t res = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001432 0U, 0U,
1433 StubTest::GetEntrypoint(self, kQuickGetCharStatic),
1434 self,
1435 referrer);
1436
1437 EXPECT_EQ(values[i], static_cast<uint16_t>(res)) << "Iteration " << i;
1438 }
1439#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001440 UNUSED(f, self, referrer, test);
Fred Shih37f05ef2014-07-16 18:38:08 -07001441 LOG(INFO) << "Skipping set_char_static as I don't know how to do that on " << kRuntimeISA;
1442 // Force-print to std::cout so it's also outside the logcat.
1443 std::cout << "Skipping set_char_static as I don't know how to do that on " << kRuntimeISA << std::endl;
1444#endif
1445}
Mathieu Chartierc7853442015-03-27 14:35:38 -07001446static void GetSetShortStatic(ArtField* f, Thread* self,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001447 ArtMethod* referrer, StubTest* test)
Mathieu Chartier90443472015-07-16 20:32:27 -07001448 SHARED_REQUIRES(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001449#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1450 (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001451 int16_t values[] = { -0x7FFF, -32768, 0, 255, 32767, 0x7FFE };
Fred Shih37f05ef2014-07-16 18:38:08 -07001452
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001453 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001454 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001455 static_cast<size_t>(values[i]),
1456 0U,
1457 StubTest::GetEntrypoint(self, kQuickSet16Static),
1458 self,
1459 referrer);
1460
Mathieu Chartierc7853442015-03-27 14:35:38 -07001461 size_t res = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001462 0U, 0U,
1463 StubTest::GetEntrypoint(self, kQuickGetShortStatic),
1464 self,
1465 referrer);
1466
1467 EXPECT_EQ(static_cast<int16_t>(res), values[i]) << "Iteration " << i;
1468 }
1469#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001470 UNUSED(f, self, referrer, test);
Fred Shih37f05ef2014-07-16 18:38:08 -07001471 LOG(INFO) << "Skipping set_short_static as I don't know how to do that on " << kRuntimeISA;
1472 // Force-print to std::cout so it's also outside the logcat.
1473 std::cout << "Skipping set_short_static as I don't know how to do that on " << kRuntimeISA << std::endl;
1474#endif
1475}
1476
Mathieu Chartierc7853442015-03-27 14:35:38 -07001477static void GetSetCharInstance(Handle<mirror::Object>* obj, ArtField* f,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001478 Thread* self, ArtMethod* referrer, StubTest* test)
Mathieu Chartier90443472015-07-16 20:32:27 -07001479 SHARED_REQUIRES(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001480#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1481 (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001482 uint16_t values[] = { 0, 1, 2, 255, 32768, 0xFFFF };
Fred Shih37f05ef2014-07-16 18:38:08 -07001483
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001484 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001485 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001486 reinterpret_cast<size_t>(obj->Get()),
1487 static_cast<size_t>(values[i]),
1488 StubTest::GetEntrypoint(self, kQuickSet16Instance),
1489 self,
1490 referrer);
1491
Mathieu Chartierc7853442015-03-27 14:35:38 -07001492 uint16_t res = f->GetChar(obj->Get());
Fred Shih37f05ef2014-07-16 18:38:08 -07001493 EXPECT_EQ(res, values[i]) << "Iteration " << i;
Mathieu Chartierc7853442015-03-27 14:35:38 -07001494 f->SetChar<false>(obj->Get(), ++res);
Fred Shih37f05ef2014-07-16 18:38:08 -07001495
Mathieu Chartierc7853442015-03-27 14:35:38 -07001496 size_t res2 = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001497 reinterpret_cast<size_t>(obj->Get()),
1498 0U,
1499 StubTest::GetEntrypoint(self, kQuickGetCharInstance),
1500 self,
1501 referrer);
1502 EXPECT_EQ(res, static_cast<uint16_t>(res2));
1503 }
1504#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001505 UNUSED(obj, f, self, referrer, test);
Fred Shih37f05ef2014-07-16 18:38:08 -07001506 LOG(INFO) << "Skipping set_char_instance as I don't know how to do that on " << kRuntimeISA;
1507 // Force-print to std::cout so it's also outside the logcat.
1508 std::cout << "Skipping set_char_instance as I don't know how to do that on " << kRuntimeISA << std::endl;
1509#endif
1510}
Mathieu Chartierc7853442015-03-27 14:35:38 -07001511static void GetSetShortInstance(Handle<mirror::Object>* obj, ArtField* f,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001512 Thread* self, ArtMethod* referrer, StubTest* test)
Mathieu Chartier90443472015-07-16 20:32:27 -07001513 SHARED_REQUIRES(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001514#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1515 (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001516 int16_t values[] = { -0x7FFF, -32768, 0, 255, 32767, 0x7FFE };
Fred Shih37f05ef2014-07-16 18:38:08 -07001517
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001518 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001519 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001520 reinterpret_cast<size_t>(obj->Get()),
1521 static_cast<size_t>(values[i]),
1522 StubTest::GetEntrypoint(self, kQuickSet16Instance),
1523 self,
1524 referrer);
1525
Mathieu Chartierc7853442015-03-27 14:35:38 -07001526 int16_t res = f->GetShort(obj->Get());
Fred Shih37f05ef2014-07-16 18:38:08 -07001527 EXPECT_EQ(res, values[i]) << "Iteration " << i;
Mathieu Chartierc7853442015-03-27 14:35:38 -07001528 f->SetShort<false>(obj->Get(), ++res);
Fred Shih37f05ef2014-07-16 18:38:08 -07001529
Mathieu Chartierc7853442015-03-27 14:35:38 -07001530 size_t res2 = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001531 reinterpret_cast<size_t>(obj->Get()),
1532 0U,
1533 StubTest::GetEntrypoint(self, kQuickGetShortInstance),
1534 self,
1535 referrer);
1536 EXPECT_EQ(res, static_cast<int16_t>(res2));
1537 }
1538#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001539 UNUSED(obj, f, self, referrer, test);
Fred Shih37f05ef2014-07-16 18:38:08 -07001540 LOG(INFO) << "Skipping set_short_instance as I don't know how to do that on " << kRuntimeISA;
1541 // Force-print to std::cout so it's also outside the logcat.
1542 std::cout << "Skipping set_short_instance as I don't know how to do that on " << kRuntimeISA << std::endl;
1543#endif
1544}
1545
Mathieu Chartiere401d142015-04-22 13:56:20 -07001546static void GetSet32Static(ArtField* f, Thread* self, ArtMethod* referrer,
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001547 StubTest* test)
Mathieu Chartier90443472015-07-16 20:32:27 -07001548 SHARED_REQUIRES(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001549#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1550 (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001551 uint32_t values[] = { 0, 1, 2, 255, 32768, 1000000, 0xFFFFFFFF };
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001552
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001553 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001554 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001555 static_cast<size_t>(values[i]),
1556 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001557 StubTest::GetEntrypoint(self, kQuickSet32Static),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001558 self,
1559 referrer);
1560
Mathieu Chartierc7853442015-03-27 14:35:38 -07001561 size_t res = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001562 0U, 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001563 StubTest::GetEntrypoint(self, kQuickGet32Static),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001564 self,
1565 referrer);
1566
Goran Jakovljevic04568812015-04-23 15:27:23 +02001567#if defined(__mips__) && defined(__LP64__)
1568 EXPECT_EQ(static_cast<uint32_t>(res), values[i]) << "Iteration " << i;
1569#else
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001570 EXPECT_EQ(res, values[i]) << "Iteration " << i;
Goran Jakovljevic04568812015-04-23 15:27:23 +02001571#endif
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001572 }
1573#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001574 UNUSED(f, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001575 LOG(INFO) << "Skipping set32static as I don't know how to do that on " << kRuntimeISA;
1576 // Force-print to std::cout so it's also outside the logcat.
1577 std::cout << "Skipping set32static as I don't know how to do that on " << kRuntimeISA << std::endl;
1578#endif
1579}
1580
1581
Mathieu Chartierc7853442015-03-27 14:35:38 -07001582static void GetSet32Instance(Handle<mirror::Object>* obj, ArtField* f,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001583 Thread* self, ArtMethod* referrer, StubTest* test)
Mathieu Chartier90443472015-07-16 20:32:27 -07001584 SHARED_REQUIRES(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001585#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1586 (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001587 uint32_t values[] = { 0, 1, 2, 255, 32768, 1000000, 0xFFFFFFFF };
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001588
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001589 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001590 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001591 reinterpret_cast<size_t>(obj->Get()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001592 static_cast<size_t>(values[i]),
Andreas Gampe29b38412014-08-13 00:15:43 -07001593 StubTest::GetEntrypoint(self, kQuickSet32Instance),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001594 self,
1595 referrer);
1596
Mathieu Chartierc7853442015-03-27 14:35:38 -07001597 int32_t res = f->GetInt(obj->Get());
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001598 EXPECT_EQ(res, static_cast<int32_t>(values[i])) << "Iteration " << i;
1599
1600 res++;
Mathieu Chartierc7853442015-03-27 14:35:38 -07001601 f->SetInt<false>(obj->Get(), res);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001602
Mathieu Chartierc7853442015-03-27 14:35:38 -07001603 size_t res2 = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001604 reinterpret_cast<size_t>(obj->Get()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001605 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001606 StubTest::GetEntrypoint(self, kQuickGet32Instance),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001607 self,
1608 referrer);
1609 EXPECT_EQ(res, static_cast<int32_t>(res2));
1610 }
1611#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001612 UNUSED(obj, f, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001613 LOG(INFO) << "Skipping set32instance as I don't know how to do that on " << kRuntimeISA;
1614 // Force-print to std::cout so it's also outside the logcat.
1615 std::cout << "Skipping set32instance as I don't know how to do that on " << kRuntimeISA << std::endl;
1616#endif
1617}
1618
1619
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001620#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1621 (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001622
1623static void set_and_check_static(uint32_t f_idx, mirror::Object* val, Thread* self,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001624 ArtMethod* referrer, StubTest* test)
Mathieu Chartier90443472015-07-16 20:32:27 -07001625 SHARED_REQUIRES(Locks::mutator_lock_) {
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001626 test->Invoke3WithReferrer(static_cast<size_t>(f_idx),
1627 reinterpret_cast<size_t>(val),
1628 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001629 StubTest::GetEntrypoint(self, kQuickSetObjStatic),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001630 self,
1631 referrer);
1632
1633 size_t res = test->Invoke3WithReferrer(static_cast<size_t>(f_idx),
1634 0U, 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001635 StubTest::GetEntrypoint(self, kQuickGetObjStatic),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001636 self,
1637 referrer);
1638
1639 EXPECT_EQ(res, reinterpret_cast<size_t>(val)) << "Value " << val;
1640}
1641#endif
1642
Mathieu Chartiere401d142015-04-22 13:56:20 -07001643static void GetSetObjStatic(ArtField* f, Thread* self, ArtMethod* referrer,
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001644 StubTest* test)
Mathieu Chartier90443472015-07-16 20:32:27 -07001645 SHARED_REQUIRES(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001646#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1647 (defined(__x86_64__) && !defined(__APPLE__))
Mathieu Chartierc7853442015-03-27 14:35:38 -07001648 set_and_check_static(f->GetDexFieldIndex(), nullptr, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001649
1650 // Allocate a string object for simplicity.
1651 mirror::String* str = mirror::String::AllocFromModifiedUtf8(self, "Test");
Mathieu Chartierc7853442015-03-27 14:35:38 -07001652 set_and_check_static(f->GetDexFieldIndex(), str, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001653
Mathieu Chartierc7853442015-03-27 14:35:38 -07001654 set_and_check_static(f->GetDexFieldIndex(), nullptr, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001655#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001656 UNUSED(f, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001657 LOG(INFO) << "Skipping setObjstatic as I don't know how to do that on " << kRuntimeISA;
1658 // Force-print to std::cout so it's also outside the logcat.
1659 std::cout << "Skipping setObjstatic as I don't know how to do that on " << kRuntimeISA << std::endl;
1660#endif
1661}
1662
1663
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001664#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1665 (defined(__x86_64__) && !defined(__APPLE__))
Mathieu Chartierc7853442015-03-27 14:35:38 -07001666static void set_and_check_instance(ArtField* f, mirror::Object* trg,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001667 mirror::Object* val, Thread* self, ArtMethod* referrer,
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001668 StubTest* test)
Mathieu Chartier90443472015-07-16 20:32:27 -07001669 SHARED_REQUIRES(Locks::mutator_lock_) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001670 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001671 reinterpret_cast<size_t>(trg),
1672 reinterpret_cast<size_t>(val),
Andreas Gampe29b38412014-08-13 00:15:43 -07001673 StubTest::GetEntrypoint(self, kQuickSetObjInstance),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001674 self,
1675 referrer);
1676
Mathieu Chartierc7853442015-03-27 14:35:38 -07001677 size_t res = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001678 reinterpret_cast<size_t>(trg),
1679 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001680 StubTest::GetEntrypoint(self, kQuickGetObjInstance),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001681 self,
1682 referrer);
1683
1684 EXPECT_EQ(res, reinterpret_cast<size_t>(val)) << "Value " << val;
1685
Mathieu Chartierc7853442015-03-27 14:35:38 -07001686 EXPECT_EQ(val, f->GetObj(trg));
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001687}
1688#endif
1689
Mathieu Chartierc7853442015-03-27 14:35:38 -07001690static void GetSetObjInstance(Handle<mirror::Object>* obj, ArtField* f,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001691 Thread* self, ArtMethod* referrer, StubTest* test)
Mathieu Chartier90443472015-07-16 20:32:27 -07001692 SHARED_REQUIRES(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001693#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1694 (defined(__x86_64__) && !defined(__APPLE__))
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001695 set_and_check_instance(f, obj->Get(), nullptr, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001696
1697 // Allocate a string object for simplicity.
1698 mirror::String* str = mirror::String::AllocFromModifiedUtf8(self, "Test");
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001699 set_and_check_instance(f, obj->Get(), str, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001700
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001701 set_and_check_instance(f, obj->Get(), nullptr, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001702#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001703 UNUSED(obj, f, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001704 LOG(INFO) << "Skipping setObjinstance as I don't know how to do that on " << kRuntimeISA;
1705 // Force-print to std::cout so it's also outside the logcat.
1706 std::cout << "Skipping setObjinstance as I don't know how to do that on " << kRuntimeISA << std::endl;
1707#endif
1708}
1709
1710
Calin Juravle872ab3f2015-10-02 07:27:51 +01001711// TODO: Complete these tests for 32b architectures
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001712
Mathieu Chartiere401d142015-04-22 13:56:20 -07001713static void GetSet64Static(ArtField* f, Thread* self, ArtMethod* referrer,
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001714 StubTest* test)
Mathieu Chartier90443472015-07-16 20:32:27 -07001715 SHARED_REQUIRES(Locks::mutator_lock_) {
Calin Juravle6e399ac2015-10-02 23:56:06 +01001716#if (defined(__x86_64__) && !defined(__APPLE__)) || (defined(__mips__) && defined(__LP64__)) \
1717 || defined(__aarch64__)
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001718 uint64_t values[] = { 0, 1, 2, 255, 32768, 1000000, 0xFFFFFFFF, 0xFFFFFFFFFFFF };
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001719
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001720 for (size_t i = 0; i < arraysize(values); ++i) {
Calin Juravle6e399ac2015-10-02 23:56:06 +01001721 // 64 bit FieldSet stores the set value in the second register.
1722 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Calin Juravle24cc1b32015-10-06 11:46:58 +01001723 0U,
1724 values[i],
1725 StubTest::GetEntrypoint(self, kQuickSet64Static),
1726 self,
1727 referrer);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001728
Mathieu Chartierc7853442015-03-27 14:35:38 -07001729 size_t res = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001730 0U, 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001731 StubTest::GetEntrypoint(self, kQuickGet64Static),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001732 self,
1733 referrer);
1734
1735 EXPECT_EQ(res, values[i]) << "Iteration " << i;
1736 }
1737#else
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001738 UNUSED(f, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001739 LOG(INFO) << "Skipping set64static as I don't know how to do that on " << kRuntimeISA;
1740 // Force-print to std::cout so it's also outside the logcat.
1741 std::cout << "Skipping set64static as I don't know how to do that on " << kRuntimeISA << std::endl;
1742#endif
1743}
1744
1745
Mathieu Chartierc7853442015-03-27 14:35:38 -07001746static void GetSet64Instance(Handle<mirror::Object>* obj, ArtField* f,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001747 Thread* self, ArtMethod* referrer, StubTest* test)
Mathieu Chartier90443472015-07-16 20:32:27 -07001748 SHARED_REQUIRES(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001749#if (defined(__x86_64__) && !defined(__APPLE__)) || (defined(__mips__) && defined(__LP64__)) || \
1750 defined(__aarch64__)
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001751 uint64_t values[] = { 0, 1, 2, 255, 32768, 1000000, 0xFFFFFFFF, 0xFFFFFFFFFFFF };
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001752
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001753 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001754 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001755 reinterpret_cast<size_t>(obj->Get()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001756 static_cast<size_t>(values[i]),
Andreas Gampe29b38412014-08-13 00:15:43 -07001757 StubTest::GetEntrypoint(self, kQuickSet64Instance),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001758 self,
1759 referrer);
1760
Mathieu Chartierc7853442015-03-27 14:35:38 -07001761 int64_t res = f->GetLong(obj->Get());
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001762 EXPECT_EQ(res, static_cast<int64_t>(values[i])) << "Iteration " << i;
1763
1764 res++;
Mathieu Chartierc7853442015-03-27 14:35:38 -07001765 f->SetLong<false>(obj->Get(), res);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001766
Mathieu Chartierc7853442015-03-27 14:35:38 -07001767 size_t res2 = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001768 reinterpret_cast<size_t>(obj->Get()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001769 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001770 StubTest::GetEntrypoint(self, kQuickGet64Instance),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001771 self,
1772 referrer);
1773 EXPECT_EQ(res, static_cast<int64_t>(res2));
1774 }
1775#else
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001776 UNUSED(obj, f, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001777 LOG(INFO) << "Skipping set64instance as I don't know how to do that on " << kRuntimeISA;
1778 // Force-print to std::cout so it's also outside the logcat.
1779 std::cout << "Skipping set64instance as I don't know how to do that on " << kRuntimeISA << std::endl;
1780#endif
1781}
1782
1783static void TestFields(Thread* self, StubTest* test, Primitive::Type test_type) {
1784 // garbage is created during ClassLinker::Init
1785
1786 JNIEnv* env = Thread::Current()->GetJniEnv();
1787 jclass jc = env->FindClass("AllFields");
Mathieu Chartier2cebb242015-04-21 16:50:40 -07001788 CHECK(jc != nullptr);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001789 jobject o = env->AllocObject(jc);
Mathieu Chartier2cebb242015-04-21 16:50:40 -07001790 CHECK(o != nullptr);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001791
1792 ScopedObjectAccess soa(self);
Mathieu Chartiere401d142015-04-22 13:56:20 -07001793 StackHandleScope<3> hs(self);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001794 Handle<mirror::Object> obj(hs.NewHandle(soa.Decode<mirror::Object*>(o)));
1795 Handle<mirror::Class> c(hs.NewHandle(obj->GetClass()));
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001796 // Need a method as a referrer
Mathieu Chartiere401d142015-04-22 13:56:20 -07001797 ArtMethod* m = c->GetDirectMethod(0, sizeof(void*));
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001798
1799 // Play with it...
1800
1801 // Static fields.
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001802 for (ArtField& f : c->GetSFields()) {
1803 Primitive::Type type = f.GetTypeAsPrimitiveType();
Mathieu Chartierc7853442015-03-27 14:35:38 -07001804 if (test_type != type) {
1805 continue;
1806 }
1807 switch (type) {
1808 case Primitive::Type::kPrimBoolean:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001809 GetSetBooleanStatic(&f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001810 break;
1811 case Primitive::Type::kPrimByte:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001812 GetSetByteStatic(&f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001813 break;
1814 case Primitive::Type::kPrimChar:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001815 GetSetCharStatic(&f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001816 break;
1817 case Primitive::Type::kPrimShort:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001818 GetSetShortStatic(&f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001819 break;
1820 case Primitive::Type::kPrimInt:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001821 GetSet32Static(&f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001822 break;
1823 case Primitive::Type::kPrimLong:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001824 GetSet64Static(&f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001825 break;
1826 case Primitive::Type::kPrimNot:
1827 // Don't try array.
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001828 if (f.GetTypeDescriptor()[0] != '[') {
1829 GetSetObjStatic(&f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001830 }
1831 break;
1832 default:
1833 break; // Skip.
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001834 }
1835 }
1836
1837 // Instance fields.
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001838 for (ArtField& f : c->GetIFields()) {
1839 Primitive::Type type = f.GetTypeAsPrimitiveType();
Mathieu Chartierc7853442015-03-27 14:35:38 -07001840 if (test_type != type) {
1841 continue;
1842 }
1843 switch (type) {
1844 case Primitive::Type::kPrimBoolean:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001845 GetSetBooleanInstance(&obj, &f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001846 break;
1847 case Primitive::Type::kPrimByte:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001848 GetSetByteInstance(&obj, &f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001849 break;
1850 case Primitive::Type::kPrimChar:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001851 GetSetCharInstance(&obj, &f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001852 break;
1853 case Primitive::Type::kPrimShort:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001854 GetSetShortInstance(&obj, &f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001855 break;
1856 case Primitive::Type::kPrimInt:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001857 GetSet32Instance(&obj, &f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001858 break;
1859 case Primitive::Type::kPrimLong:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001860 GetSet64Instance(&obj, &f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001861 break;
1862 case Primitive::Type::kPrimNot:
1863 // Don't try array.
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001864 if (f.GetTypeDescriptor()[0] != '[') {
1865 GetSetObjInstance(&obj, &f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001866 }
1867 break;
1868 default:
1869 break; // Skip.
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001870 }
1871 }
1872
1873 // TODO: Deallocate things.
1874}
1875
Fred Shih37f05ef2014-07-16 18:38:08 -07001876TEST_F(StubTest, Fields8) {
Fred Shih37f05ef2014-07-16 18:38:08 -07001877 Thread* self = Thread::Current();
1878
1879 self->TransitionFromSuspendedToRunnable();
1880 LoadDex("AllFields");
1881 bool started = runtime_->Start();
1882 CHECK(started);
1883
1884 TestFields(self, this, Primitive::Type::kPrimBoolean);
1885 TestFields(self, this, Primitive::Type::kPrimByte);
1886}
1887
1888TEST_F(StubTest, Fields16) {
Fred Shih37f05ef2014-07-16 18:38:08 -07001889 Thread* self = Thread::Current();
1890
1891 self->TransitionFromSuspendedToRunnable();
1892 LoadDex("AllFields");
1893 bool started = runtime_->Start();
1894 CHECK(started);
1895
1896 TestFields(self, this, Primitive::Type::kPrimChar);
1897 TestFields(self, this, Primitive::Type::kPrimShort);
1898}
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001899
1900TEST_F(StubTest, Fields32) {
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001901 Thread* self = Thread::Current();
1902
1903 self->TransitionFromSuspendedToRunnable();
1904 LoadDex("AllFields");
1905 bool started = runtime_->Start();
1906 CHECK(started);
1907
1908 TestFields(self, this, Primitive::Type::kPrimInt);
1909}
1910
1911TEST_F(StubTest, FieldsObj) {
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001912 Thread* self = Thread::Current();
1913
1914 self->TransitionFromSuspendedToRunnable();
1915 LoadDex("AllFields");
1916 bool started = runtime_->Start();
1917 CHECK(started);
1918
1919 TestFields(self, this, Primitive::Type::kPrimNot);
1920}
1921
1922TEST_F(StubTest, Fields64) {
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001923 Thread* self = Thread::Current();
1924
1925 self->TransitionFromSuspendedToRunnable();
1926 LoadDex("AllFields");
1927 bool started = runtime_->Start();
1928 CHECK(started);
1929
1930 TestFields(self, this, Primitive::Type::kPrimLong);
1931}
1932
Andreas Gampe51f76352014-05-21 08:28:48 -07001933TEST_F(StubTest, IMT) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001934#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1935 (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe51f76352014-05-21 08:28:48 -07001936 Thread* self = Thread::Current();
1937
1938 ScopedObjectAccess soa(self);
1939 StackHandleScope<7> hs(self);
1940
1941 JNIEnv* env = Thread::Current()->GetJniEnv();
1942
1943 // ArrayList
1944
1945 // Load ArrayList and used methods (JNI).
1946 jclass arraylist_jclass = env->FindClass("java/util/ArrayList");
1947 ASSERT_NE(nullptr, arraylist_jclass);
1948 jmethodID arraylist_constructor = env->GetMethodID(arraylist_jclass, "<init>", "()V");
1949 ASSERT_NE(nullptr, arraylist_constructor);
Mathieu Chartiere401d142015-04-22 13:56:20 -07001950 jmethodID contains_jmethod = env->GetMethodID(
1951 arraylist_jclass, "contains", "(Ljava/lang/Object;)Z");
Andreas Gampe51f76352014-05-21 08:28:48 -07001952 ASSERT_NE(nullptr, contains_jmethod);
1953 jmethodID add_jmethod = env->GetMethodID(arraylist_jclass, "add", "(Ljava/lang/Object;)Z");
1954 ASSERT_NE(nullptr, add_jmethod);
1955
Mathieu Chartiere401d142015-04-22 13:56:20 -07001956 // Get representation.
1957 ArtMethod* contains_amethod = soa.DecodeMethod(contains_jmethod);
Andreas Gampe51f76352014-05-21 08:28:48 -07001958
1959 // Patch up ArrayList.contains.
Mathieu Chartiere401d142015-04-22 13:56:20 -07001960 if (contains_amethod->GetEntryPointFromQuickCompiledCode() == nullptr) {
1961 contains_amethod->SetEntryPointFromQuickCompiledCode(reinterpret_cast<void*>(
Andreas Gampe29b38412014-08-13 00:15:43 -07001962 StubTest::GetEntrypoint(self, kQuickQuickToInterpreterBridge)));
Andreas Gampe51f76352014-05-21 08:28:48 -07001963 }
1964
1965 // List
1966
1967 // Load List and used methods (JNI).
1968 jclass list_jclass = env->FindClass("java/util/List");
1969 ASSERT_NE(nullptr, list_jclass);
Mathieu Chartiere401d142015-04-22 13:56:20 -07001970 jmethodID inf_contains_jmethod = env->GetMethodID(
1971 list_jclass, "contains", "(Ljava/lang/Object;)Z");
Andreas Gampe51f76352014-05-21 08:28:48 -07001972 ASSERT_NE(nullptr, inf_contains_jmethod);
1973
1974 // Get mirror representation.
Mathieu Chartiere401d142015-04-22 13:56:20 -07001975 ArtMethod* inf_contains = soa.DecodeMethod(inf_contains_jmethod);
Andreas Gampe51f76352014-05-21 08:28:48 -07001976
1977 // Object
1978
1979 jclass obj_jclass = env->FindClass("java/lang/Object");
1980 ASSERT_NE(nullptr, obj_jclass);
1981 jmethodID obj_constructor = env->GetMethodID(obj_jclass, "<init>", "()V");
1982 ASSERT_NE(nullptr, obj_constructor);
1983
Andreas Gampe51f76352014-05-21 08:28:48 -07001984 // Create instances.
1985
1986 jobject jarray_list = env->NewObject(arraylist_jclass, arraylist_constructor);
1987 ASSERT_NE(nullptr, jarray_list);
1988 Handle<mirror::Object> array_list(hs.NewHandle(soa.Decode<mirror::Object*>(jarray_list)));
1989
1990 jobject jobj = env->NewObject(obj_jclass, obj_constructor);
1991 ASSERT_NE(nullptr, jobj);
1992 Handle<mirror::Object> obj(hs.NewHandle(soa.Decode<mirror::Object*>(jobj)));
1993
Andreas Gampe1a7e2922014-05-21 15:37:53 -07001994 // Invocation tests.
1995
1996 // 1. imt_conflict
1997
1998 // Contains.
Andreas Gampe51f76352014-05-21 08:28:48 -07001999
2000 size_t result =
2001 Invoke3WithReferrerAndHidden(0U, reinterpret_cast<size_t>(array_list.Get()),
2002 reinterpret_cast<size_t>(obj.Get()),
Andreas Gampe29b38412014-08-13 00:15:43 -07002003 StubTest::GetEntrypoint(self, kQuickQuickImtConflictTrampoline),
Mathieu Chartiere401d142015-04-22 13:56:20 -07002004 self, contains_amethod,
2005 static_cast<size_t>(inf_contains->GetDexMethodIndex()));
Andreas Gampe51f76352014-05-21 08:28:48 -07002006
2007 ASSERT_FALSE(self->IsExceptionPending());
2008 EXPECT_EQ(static_cast<size_t>(JNI_FALSE), result);
2009
2010 // Add object.
2011
2012 env->CallBooleanMethod(jarray_list, add_jmethod, jobj);
2013
Nicolas Geoffray14691c52015-03-05 10:40:17 +00002014 ASSERT_FALSE(self->IsExceptionPending()) << PrettyTypeOf(self->GetException());
Andreas Gampe51f76352014-05-21 08:28:48 -07002015
Andreas Gampe1a7e2922014-05-21 15:37:53 -07002016 // Contains.
Andreas Gampe51f76352014-05-21 08:28:48 -07002017
Mathieu Chartiere401d142015-04-22 13:56:20 -07002018 result = Invoke3WithReferrerAndHidden(
2019 0U, reinterpret_cast<size_t>(array_list.Get()), reinterpret_cast<size_t>(obj.Get()),
2020 StubTest::GetEntrypoint(self, kQuickQuickImtConflictTrampoline), self, contains_amethod,
2021 static_cast<size_t>(inf_contains->GetDexMethodIndex()));
Andreas Gampe51f76352014-05-21 08:28:48 -07002022
2023 ASSERT_FALSE(self->IsExceptionPending());
2024 EXPECT_EQ(static_cast<size_t>(JNI_TRUE), result);
Andreas Gampe1a7e2922014-05-21 15:37:53 -07002025
2026 // 2. regular interface trampoline
2027
Mathieu Chartiere401d142015-04-22 13:56:20 -07002028 result = Invoke3WithReferrer(static_cast<size_t>(inf_contains->GetDexMethodIndex()),
Andreas Gampe1a7e2922014-05-21 15:37:53 -07002029 reinterpret_cast<size_t>(array_list.Get()),
2030 reinterpret_cast<size_t>(obj.Get()),
2031 StubTest::GetEntrypoint(self,
2032 kQuickInvokeInterfaceTrampolineWithAccessCheck),
Mathieu Chartiere401d142015-04-22 13:56:20 -07002033 self, contains_amethod);
Andreas Gampe1a7e2922014-05-21 15:37:53 -07002034
2035 ASSERT_FALSE(self->IsExceptionPending());
2036 EXPECT_EQ(static_cast<size_t>(JNI_TRUE), result);
2037
Mathieu Chartiere401d142015-04-22 13:56:20 -07002038 result = Invoke3WithReferrer(
2039 static_cast<size_t>(inf_contains->GetDexMethodIndex()),
2040 reinterpret_cast<size_t>(array_list.Get()), reinterpret_cast<size_t>(array_list.Get()),
2041 StubTest::GetEntrypoint(self, kQuickInvokeInterfaceTrampolineWithAccessCheck), self,
2042 contains_amethod);
Andreas Gampe1a7e2922014-05-21 15:37:53 -07002043
2044 ASSERT_FALSE(self->IsExceptionPending());
2045 EXPECT_EQ(static_cast<size_t>(JNI_FALSE), result);
Andreas Gampe51f76352014-05-21 08:28:48 -07002046#else
Andreas Gampe6aac3552014-06-09 14:55:53 -07002047 LOG(INFO) << "Skipping imt as I don't know how to do that on " << kRuntimeISA;
Andreas Gampe51f76352014-05-21 08:28:48 -07002048 // Force-print to std::cout so it's also outside the logcat.
Andreas Gampe6aac3552014-06-09 14:55:53 -07002049 std::cout << "Skipping imt as I don't know how to do that on " << kRuntimeISA << std::endl;
2050#endif
2051}
2052
Andreas Gampe6aac3552014-06-09 14:55:53 -07002053TEST_F(StubTest, StringIndexOf) {
Goran Jakovljevic801fcc42015-12-03 11:44:26 +01002054#if defined(__arm__) || defined(__aarch64__) || (defined(__mips__) && defined(__LP64__))
Andreas Gampe6aac3552014-06-09 14:55:53 -07002055 Thread* self = Thread::Current();
2056 ScopedObjectAccess soa(self);
2057 // garbage is created during ClassLinker::Init
2058
2059 // Create some strings
2060 // Use array so we can index into it and use a matrix for expected results
2061 // Setup: The first half is standard. The second half uses a non-zero offset.
2062 // TODO: Shared backing arrays.
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07002063 const char* c_str[] = { "", "a", "ba", "cba", "dcba", "edcba", "asdfghjkl" };
2064 static constexpr size_t kStringCount = arraysize(c_str);
2065 const char c_char[] = { 'a', 'b', 'c', 'd', 'e' };
2066 static constexpr size_t kCharCount = arraysize(c_char);
Andreas Gampe6aac3552014-06-09 14:55:53 -07002067
2068 StackHandleScope<kStringCount> hs(self);
2069 Handle<mirror::String> s[kStringCount];
2070
2071 for (size_t i = 0; i < kStringCount; ++i) {
2072 s[i] = hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), c_str[i]));
2073 }
2074
2075 // Matrix of expectations. First component is first parameter. Note we only check against the
2076 // sign, not the value. As we are testing random offsets, we need to compute this and need to
2077 // rely on String::CompareTo being correct.
2078 static constexpr size_t kMaxLen = 9;
2079 DCHECK_LE(strlen(c_str[kStringCount-1]), kMaxLen) << "Please fix the indexof test.";
2080
2081 // Last dimension: start, offset by 1.
2082 int32_t expected[kStringCount][kCharCount][kMaxLen + 3];
2083 for (size_t x = 0; x < kStringCount; ++x) {
2084 for (size_t y = 0; y < kCharCount; ++y) {
2085 for (size_t z = 0; z <= kMaxLen + 2; ++z) {
2086 expected[x][y][z] = s[x]->FastIndexOf(c_char[y], static_cast<int32_t>(z) - 1);
2087 }
2088 }
2089 }
2090
2091 // Play with it...
2092
2093 for (size_t x = 0; x < kStringCount; ++x) {
2094 for (size_t y = 0; y < kCharCount; ++y) {
2095 for (size_t z = 0; z <= kMaxLen + 2; ++z) {
2096 int32_t start = static_cast<int32_t>(z) - 1;
2097
2098 // Test string_compareto x y
2099 size_t result = Invoke3(reinterpret_cast<size_t>(s[x].Get()), c_char[y], start,
Andreas Gampe29b38412014-08-13 00:15:43 -07002100 StubTest::GetEntrypoint(self, kQuickIndexOf), self);
Andreas Gampe6aac3552014-06-09 14:55:53 -07002101
2102 EXPECT_FALSE(self->IsExceptionPending());
2103
2104 // The result is a 32b signed integer
2105 union {
2106 size_t r;
2107 int32_t i;
2108 } conv;
2109 conv.r = result;
2110
2111 EXPECT_EQ(expected[x][y][z], conv.i) << "Wrong result for " << c_str[x] << " / " <<
2112 c_char[y] << " @ " << start;
2113 }
2114 }
2115 }
2116
2117 // TODO: Deallocate things.
2118
2119 // Tests done.
2120#else
2121 LOG(INFO) << "Skipping indexof as I don't know how to do that on " << kRuntimeISA;
2122 // Force-print to std::cout so it's also outside the logcat.
2123 std::cout << "Skipping indexof as I don't know how to do that on " << kRuntimeISA << std::endl;
Andreas Gampe51f76352014-05-21 08:28:48 -07002124#endif
2125}
2126
Man Cao1aee9002015-07-14 22:31:42 -07002127TEST_F(StubTest, ReadBarrier) {
2128#if defined(ART_USE_READ_BARRIER) && (defined(__i386__) || defined(__arm__) || \
2129 defined(__aarch64__) || defined(__mips__) || (defined(__x86_64__) && !defined(__APPLE__)))
2130 Thread* self = Thread::Current();
2131
2132 const uintptr_t readBarrierSlow = StubTest::GetEntrypoint(self, kQuickReadBarrierSlow);
2133
2134 // Create an object
2135 ScopedObjectAccess soa(self);
2136 // garbage is created during ClassLinker::Init
2137
2138 StackHandleScope<2> hs(soa.Self());
2139 Handle<mirror::Class> c(
2140 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "Ljava/lang/Object;")));
2141
2142 // Build an object instance
2143 Handle<mirror::Object> obj(hs.NewHandle(c->AllocObject(soa.Self())));
2144
2145 EXPECT_FALSE(self->IsExceptionPending());
2146
2147 size_t result = Invoke3(0U, reinterpret_cast<size_t>(obj.Get()),
2148 mirror::Object::ClassOffset().SizeValue(), readBarrierSlow, self);
2149
2150 EXPECT_FALSE(self->IsExceptionPending());
2151 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
2152 mirror::Class* klass = reinterpret_cast<mirror::Class*>(result);
2153 EXPECT_EQ(klass, obj->GetClass());
2154
2155 // Tests done.
2156#else
2157 LOG(INFO) << "Skipping read_barrier_slow";
2158 // Force-print to std::cout so it's also outside the logcat.
2159 std::cout << "Skipping read_barrier_slow" << std::endl;
2160#endif
2161}
2162
Roland Levillain0d5a2812015-11-13 10:07:31 +00002163TEST_F(StubTest, ReadBarrierForRoot) {
2164#if defined(ART_USE_READ_BARRIER) && (defined(__i386__) || defined(__arm__) || \
2165 defined(__aarch64__) || defined(__mips__) || (defined(__x86_64__) && !defined(__APPLE__)))
2166 Thread* self = Thread::Current();
2167
2168 const uintptr_t readBarrierForRootSlow =
2169 StubTest::GetEntrypoint(self, kQuickReadBarrierForRootSlow);
2170
2171 // Create an object
2172 ScopedObjectAccess soa(self);
2173 // garbage is created during ClassLinker::Init
2174
2175 StackHandleScope<1> hs(soa.Self());
2176
2177 Handle<mirror::String> obj(
2178 hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), "hello, world!")));
2179
2180 EXPECT_FALSE(self->IsExceptionPending());
2181
2182 GcRoot<mirror::Class>& root = mirror::String::java_lang_String_;
2183 size_t result = Invoke3(reinterpret_cast<size_t>(&root), 0U, 0U, readBarrierForRootSlow, self);
2184
2185 EXPECT_FALSE(self->IsExceptionPending());
2186 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
2187 mirror::Class* klass = reinterpret_cast<mirror::Class*>(result);
2188 EXPECT_EQ(klass, obj->GetClass());
2189
2190 // Tests done.
2191#else
2192 LOG(INFO) << "Skipping read_barrier_for_root_slow";
2193 // Force-print to std::cout so it's also outside the logcat.
2194 std::cout << "Skipping read_barrier_for_root_slow" << std::endl;
2195#endif
2196}
2197
Andreas Gampe525cde22014-04-22 15:44:50 -07002198} // namespace art