blob: d4b873e441b285dcd6e8027003e4178a95797c18 [file] [log] [blame]
Andreas Gampe525cde22014-04-22 15:44:50 -07001/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
Ian Rogerse63db272014-07-15 15:36:11 -070017#include <cstdio>
18
Mathieu Chartierc7853442015-03-27 14:35:38 -070019#include "art_field-inl.h"
Mathieu Chartiere401d142015-04-22 13:56:20 -070020#include "art_method-inl.h"
Vladimir Marko3481ba22015-04-13 12:22:36 +010021#include "class_linker-inl.h"
Andreas Gampe525cde22014-04-22 15:44:50 -070022#include "common_runtime_test.h"
Andreas Gampe29b38412014-08-13 00:15:43 -070023#include "entrypoints/quick/quick_entrypoints_enum.h"
Andreas Gampe51f76352014-05-21 08:28:48 -070024#include "mirror/class-inl.h"
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -070025#include "mirror/string-inl.h"
Ian Rogerse63db272014-07-15 15:36:11 -070026#include "scoped_thread_state_change.h"
Andreas Gampe525cde22014-04-22 15:44:50 -070027
28namespace art {
29
30
31class StubTest : public CommonRuntimeTest {
32 protected:
33 // We need callee-save methods set up in the Runtime for exceptions.
34 void SetUp() OVERRIDE {
35 // Do the normal setup.
36 CommonRuntimeTest::SetUp();
37
38 {
39 // Create callee-save methods
40 ScopedObjectAccess soa(Thread::Current());
Vladimir Marko7624d252014-05-02 14:40:15 +010041 runtime_->SetInstructionSet(kRuntimeISA);
Andreas Gampe525cde22014-04-22 15:44:50 -070042 for (int i = 0; i < Runtime::kLastCalleeSaveType; i++) {
43 Runtime::CalleeSaveType type = Runtime::CalleeSaveType(i);
44 if (!runtime_->HasCalleeSaveMethod(type)) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -070045 runtime_->SetCalleeSaveMethod(runtime_->CreateCalleeSaveMethod(), type);
Andreas Gampe525cde22014-04-22 15:44:50 -070046 }
47 }
48 }
49 }
50
Ian Rogerse63db272014-07-15 15:36:11 -070051 void SetUpRuntimeOptions(RuntimeOptions *options) OVERRIDE {
Andreas Gampe00c1e6d2014-04-25 15:47:13 -070052 // Use a smaller heap
53 for (std::pair<std::string, const void*>& pair : *options) {
54 if (pair.first.find("-Xmx") == 0) {
55 pair.first = "-Xmx4M"; // Smallest we can go.
56 }
57 }
Andreas Gampe51f76352014-05-21 08:28:48 -070058 options->push_back(std::make_pair("-Xint", nullptr));
Andreas Gampe00c1e6d2014-04-25 15:47:13 -070059 }
Andreas Gampe525cde22014-04-22 15:44:50 -070060
Mathieu Chartier119c6bd2014-05-09 14:11:47 -070061 // Helper function needed since TEST_F makes a new class.
62 Thread::tls_ptr_sized_values* GetTlsPtr(Thread* self) {
63 return &self->tlsPtr_;
64 }
65
Andreas Gampe4fc046e2014-05-06 16:56:39 -070066 public:
Andreas Gampe525cde22014-04-22 15:44:50 -070067 size_t Invoke3(size_t arg0, size_t arg1, size_t arg2, uintptr_t code, Thread* self) {
Andreas Gampe6cf80102014-05-19 11:32:41 -070068 return Invoke3WithReferrer(arg0, arg1, arg2, code, self, nullptr);
Andreas Gampe525cde22014-04-22 15:44:50 -070069 }
Andreas Gampe6e4e59c2014-05-05 20:11:02 -070070
Andreas Gampe6e4e59c2014-05-05 20:11:02 -070071 // TODO: Set up a frame according to referrer's specs.
72 size_t Invoke3WithReferrer(size_t arg0, size_t arg1, size_t arg2, uintptr_t code, Thread* self,
Mathieu Chartiere401d142015-04-22 13:56:20 -070073 ArtMethod* referrer) {
Andreas Gampe9537ba22015-10-12 14:29:38 -070074 return Invoke3WithReferrerAndHidden(arg0, arg1, arg2, code, self, referrer, 0);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -070075 }
76
Andreas Gampe51f76352014-05-21 08:28:48 -070077 // TODO: Set up a frame according to referrer's specs.
78 size_t Invoke3WithReferrerAndHidden(size_t arg0, size_t arg1, size_t arg2, uintptr_t code,
Mathieu Chartiere401d142015-04-22 13:56:20 -070079 Thread* self, ArtMethod* referrer, size_t hidden) {
Andreas Gampe51f76352014-05-21 08:28:48 -070080 // Push a transition back into managed code onto the linked list in thread.
81 ManagedStack fragment;
82 self->PushManagedStackFragment(&fragment);
83
84 size_t result;
85 size_t fpr_result = 0;
86#if defined(__i386__)
87 // TODO: Set the thread?
Andreas Gampe9537ba22015-10-12 14:29:38 -070088#define PUSH(reg) "push " # reg "\n\t .cfi_adjust_cfa_offset 4\n\t"
89#define POP(reg) "pop " # reg "\n\t .cfi_adjust_cfa_offset -4\n\t"
Andreas Gampe51f76352014-05-21 08:28:48 -070090 __asm__ __volatile__(
Andreas Gampe9537ba22015-10-12 14:29:38 -070091 "movd %[hidden], %%xmm7\n\t" // This is a memory op, so do this early. If it is off of
92 // esp, then we won't be able to access it after spilling.
93
94 // Spill 6 registers.
95 PUSH(%%ebx)
96 PUSH(%%ecx)
97 PUSH(%%edx)
98 PUSH(%%esi)
99 PUSH(%%edi)
100 PUSH(%%ebp)
101
102 // Store the inputs to the stack, but keep the referrer up top, less work.
103 PUSH(%[referrer]) // Align stack.
104 PUSH(%[referrer]) // Store referrer
105
106 PUSH(%[arg0])
107 PUSH(%[arg1])
108 PUSH(%[arg2])
109 PUSH(%[code])
110 // Now read them back into the required registers.
111 POP(%%edi)
112 POP(%%edx)
113 POP(%%ecx)
114 POP(%%eax)
115 // Call is prepared now.
116
Andreas Gampe51f76352014-05-21 08:28:48 -0700117 "call *%%edi\n\t" // Call the stub
Andreas Gampe9537ba22015-10-12 14:29:38 -0700118 "addl $8, %%esp\n\t" // Pop referrer and padding.
119 ".cfi_adjust_cfa_offset -8\n\t"
120
121 // Restore 6 registers.
122 POP(%%ebp)
123 POP(%%edi)
124 POP(%%esi)
125 POP(%%edx)
126 POP(%%ecx)
127 POP(%%ebx)
128
Andreas Gampe51f76352014-05-21 08:28:48 -0700129 : "=a" (result)
130 // Use the result from eax
Andreas Gampe9537ba22015-10-12 14:29:38 -0700131 : [arg0] "r"(arg0), [arg1] "r"(arg1), [arg2] "r"(arg2), [code] "r"(code),
132 [referrer]"r"(referrer), [hidden]"m"(hidden)
Andreas Gampe2f6e3512014-06-07 01:32:33 -0700133 // This places code into edi, arg0 into eax, arg1 into ecx, and arg2 into edx
Andreas Gampe9537ba22015-10-12 14:29:38 -0700134 : "memory", "xmm7"); // clobber.
135#undef PUSH
136#undef POP
Andreas Gampe51f76352014-05-21 08:28:48 -0700137#elif defined(__arm__)
138 __asm__ __volatile__(
139 "push {r1-r12, lr}\n\t" // Save state, 13*4B = 52B
140 ".cfi_adjust_cfa_offset 52\n\t"
141 "push {r9}\n\t"
142 ".cfi_adjust_cfa_offset 4\n\t"
143 "mov r9, %[referrer]\n\n"
144 "str r9, [sp, #-8]!\n\t" // Push referrer, +8B padding so 16B aligned
145 ".cfi_adjust_cfa_offset 8\n\t"
146 "ldr r9, [sp, #8]\n\t"
147
148 // Push everything on the stack, so we don't rely on the order. What a mess. :-(
149 "sub sp, sp, #24\n\t"
150 "str %[arg0], [sp]\n\t"
151 "str %[arg1], [sp, #4]\n\t"
152 "str %[arg2], [sp, #8]\n\t"
153 "str %[code], [sp, #12]\n\t"
154 "str %[self], [sp, #16]\n\t"
155 "str %[hidden], [sp, #20]\n\t"
156 "ldr r0, [sp]\n\t"
157 "ldr r1, [sp, #4]\n\t"
158 "ldr r2, [sp, #8]\n\t"
159 "ldr r3, [sp, #12]\n\t"
160 "ldr r9, [sp, #16]\n\t"
161 "ldr r12, [sp, #20]\n\t"
162 "add sp, sp, #24\n\t"
163
164 "blx r3\n\t" // Call the stub
Mathieu Chartier2cebb242015-04-21 16:50:40 -0700165 "add sp, sp, #12\n\t" // Pop null and padding
Andreas Gampe51f76352014-05-21 08:28:48 -0700166 ".cfi_adjust_cfa_offset -12\n\t"
167 "pop {r1-r12, lr}\n\t" // Restore state
168 ".cfi_adjust_cfa_offset -52\n\t"
169 "mov %[result], r0\n\t" // Save the result
170 : [result] "=r" (result)
171 // Use the result from r0
Andreas Gampe2f6e3512014-06-07 01:32:33 -0700172 : [arg0] "r"(arg0), [arg1] "r"(arg1), [arg2] "r"(arg2), [code] "r"(code), [self] "r"(self),
173 [referrer] "r"(referrer), [hidden] "r"(hidden)
Andreas Gampeff7b1142015-08-03 10:25:06 -0700174 : "r0", "memory"); // clobber.
Andreas Gampe51f76352014-05-21 08:28:48 -0700175#elif defined(__aarch64__)
176 __asm__ __volatile__(
Andreas Gampef39b3782014-06-03 14:38:30 -0700177 // Spill x0-x7 which we say we don't clobber. May contain args.
Andreas Gampe51f76352014-05-21 08:28:48 -0700178 "sub sp, sp, #64\n\t"
179 ".cfi_adjust_cfa_offset 64\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700180 "stp x0, x1, [sp]\n\t"
181 "stp x2, x3, [sp, #16]\n\t"
182 "stp x4, x5, [sp, #32]\n\t"
183 "stp x6, x7, [sp, #48]\n\t"
Andreas Gampe51f76352014-05-21 08:28:48 -0700184
Andreas Gampef39b3782014-06-03 14:38:30 -0700185 "sub sp, sp, #16\n\t" // Reserve stack space, 16B aligned
186 ".cfi_adjust_cfa_offset 16\n\t"
187 "str %[referrer], [sp]\n\t" // referrer
Andreas Gampe51f76352014-05-21 08:28:48 -0700188
189 // Push everything on the stack, so we don't rely on the order. What a mess. :-(
190 "sub sp, sp, #48\n\t"
191 ".cfi_adjust_cfa_offset 48\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700192 // All things are "r" constraints, so direct str/stp should work.
193 "stp %[arg0], %[arg1], [sp]\n\t"
194 "stp %[arg2], %[code], [sp, #16]\n\t"
195 "stp %[self], %[hidden], [sp, #32]\n\t"
Andreas Gampe51f76352014-05-21 08:28:48 -0700196
197 // Now we definitely have x0-x3 free, use it to garble d8 - d15
198 "movk x0, #0xfad0\n\t"
199 "movk x0, #0xebad, lsl #16\n\t"
200 "movk x0, #0xfad0, lsl #32\n\t"
201 "movk x0, #0xebad, lsl #48\n\t"
202 "fmov d8, x0\n\t"
203 "add x0, x0, 1\n\t"
204 "fmov d9, x0\n\t"
205 "add x0, x0, 1\n\t"
206 "fmov d10, x0\n\t"
207 "add x0, x0, 1\n\t"
208 "fmov d11, x0\n\t"
209 "add x0, x0, 1\n\t"
210 "fmov d12, x0\n\t"
211 "add x0, x0, 1\n\t"
212 "fmov d13, x0\n\t"
213 "add x0, x0, 1\n\t"
214 "fmov d14, x0\n\t"
215 "add x0, x0, 1\n\t"
216 "fmov d15, x0\n\t"
217
Andreas Gampef39b3782014-06-03 14:38:30 -0700218 // Load call params into the right registers.
219 "ldp x0, x1, [sp]\n\t"
220 "ldp x2, x3, [sp, #16]\n\t"
Serban Constantinescu9bd88b02015-04-22 16:24:46 +0100221 "ldp x19, x17, [sp, #32]\n\t"
Andreas Gampe51f76352014-05-21 08:28:48 -0700222 "add sp, sp, #48\n\t"
223 ".cfi_adjust_cfa_offset -48\n\t"
224
Andreas Gampe51f76352014-05-21 08:28:48 -0700225 "blr x3\n\t" // Call the stub
Andreas Gampef39b3782014-06-03 14:38:30 -0700226 "mov x8, x0\n\t" // Store result
227 "add sp, sp, #16\n\t" // Drop the quick "frame"
228 ".cfi_adjust_cfa_offset -16\n\t"
Andreas Gampe51f76352014-05-21 08:28:48 -0700229
230 // Test d8 - d15. We can use x1 and x2.
231 "movk x1, #0xfad0\n\t"
232 "movk x1, #0xebad, lsl #16\n\t"
233 "movk x1, #0xfad0, lsl #32\n\t"
234 "movk x1, #0xebad, lsl #48\n\t"
235 "fmov x2, d8\n\t"
236 "cmp x1, x2\n\t"
237 "b.ne 1f\n\t"
238 "add x1, x1, 1\n\t"
239
240 "fmov x2, d9\n\t"
241 "cmp x1, x2\n\t"
242 "b.ne 1f\n\t"
243 "add x1, x1, 1\n\t"
244
245 "fmov x2, d10\n\t"
246 "cmp x1, x2\n\t"
247 "b.ne 1f\n\t"
248 "add x1, x1, 1\n\t"
249
250 "fmov x2, d11\n\t"
251 "cmp x1, x2\n\t"
252 "b.ne 1f\n\t"
253 "add x1, x1, 1\n\t"
254
255 "fmov x2, d12\n\t"
256 "cmp x1, x2\n\t"
257 "b.ne 1f\n\t"
258 "add x1, x1, 1\n\t"
259
260 "fmov x2, d13\n\t"
261 "cmp x1, x2\n\t"
262 "b.ne 1f\n\t"
263 "add x1, x1, 1\n\t"
264
265 "fmov x2, d14\n\t"
266 "cmp x1, x2\n\t"
267 "b.ne 1f\n\t"
268 "add x1, x1, 1\n\t"
269
270 "fmov x2, d15\n\t"
271 "cmp x1, x2\n\t"
272 "b.ne 1f\n\t"
273
Andreas Gampef39b3782014-06-03 14:38:30 -0700274 "mov x9, #0\n\t" // Use x9 as flag, in clobber list
Andreas Gampe51f76352014-05-21 08:28:48 -0700275
276 // Finish up.
277 "2:\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700278 "ldp x0, x1, [sp]\n\t" // Restore stuff not named clobbered, may contain fpr_result
279 "ldp x2, x3, [sp, #16]\n\t"
280 "ldp x4, x5, [sp, #32]\n\t"
281 "ldp x6, x7, [sp, #48]\n\t"
282 "add sp, sp, #64\n\t" // Free stack space, now sp as on entry
Andreas Gampe51f76352014-05-21 08:28:48 -0700283 ".cfi_adjust_cfa_offset -64\n\t"
284
Andreas Gampef39b3782014-06-03 14:38:30 -0700285 "str x9, %[fpr_result]\n\t" // Store the FPR comparison result
286 "mov %[result], x8\n\t" // Store the call result
287
Andreas Gampe51f76352014-05-21 08:28:48 -0700288 "b 3f\n\t" // Goto end
289
290 // Failed fpr verification.
291 "1:\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700292 "mov x9, #1\n\t"
Andreas Gampe51f76352014-05-21 08:28:48 -0700293 "b 2b\n\t" // Goto finish-up
294
295 // End
296 "3:\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700297 : [result] "=r" (result)
298 // Use the result from r0
Andreas Gampe51f76352014-05-21 08:28:48 -0700299 : [arg0] "0"(arg0), [arg1] "r"(arg1), [arg2] "r"(arg2), [code] "r"(code), [self] "r"(self),
Andreas Gampef39b3782014-06-03 14:38:30 -0700300 [referrer] "r"(referrer), [hidden] "r"(hidden), [fpr_result] "m" (fpr_result)
301 : "x8", "x9", "x10", "x11", "x12", "x13", "x14", "x15", "x16", "x17", "x18", "x19", "x20",
302 "x21", "x22", "x23", "x24", "x25", "x26", "x27", "x28", "x30",
303 "d0", "d1", "d2", "d3", "d4", "d5", "d6", "d7",
304 "d8", "d9", "d10", "d11", "d12", "d13", "d14", "d15",
305 "d16", "d17", "d18", "d19", "d20", "d21", "d22", "d23",
Andreas Gampe2f6e3512014-06-07 01:32:33 -0700306 "d24", "d25", "d26", "d27", "d28", "d29", "d30", "d31",
307 "memory"); // clobber.
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200308#elif defined(__mips__) && !defined(__LP64__)
309 __asm__ __volatile__ (
310 // Spill a0-a3 and t0-t7 which we say we don't clobber. May contain args.
311 "addiu $sp, $sp, -64\n\t"
312 "sw $a0, 0($sp)\n\t"
313 "sw $a1, 4($sp)\n\t"
314 "sw $a2, 8($sp)\n\t"
315 "sw $a3, 12($sp)\n\t"
316 "sw $t0, 16($sp)\n\t"
317 "sw $t1, 20($sp)\n\t"
318 "sw $t2, 24($sp)\n\t"
319 "sw $t3, 28($sp)\n\t"
320 "sw $t4, 32($sp)\n\t"
321 "sw $t5, 36($sp)\n\t"
322 "sw $t6, 40($sp)\n\t"
323 "sw $t7, 44($sp)\n\t"
324 // Spill gp register since it is caller save.
325 "sw $gp, 52($sp)\n\t"
326
327 "addiu $sp, $sp, -16\n\t" // Reserve stack space, 16B aligned.
328 "sw %[referrer], 0($sp)\n\t"
329
330 // Push everything on the stack, so we don't rely on the order.
331 "addiu $sp, $sp, -24\n\t"
332 "sw %[arg0], 0($sp)\n\t"
333 "sw %[arg1], 4($sp)\n\t"
334 "sw %[arg2], 8($sp)\n\t"
335 "sw %[code], 12($sp)\n\t"
336 "sw %[self], 16($sp)\n\t"
337 "sw %[hidden], 20($sp)\n\t"
338
339 // Load call params into the right registers.
340 "lw $a0, 0($sp)\n\t"
341 "lw $a1, 4($sp)\n\t"
342 "lw $a2, 8($sp)\n\t"
343 "lw $t9, 12($sp)\n\t"
344 "lw $s1, 16($sp)\n\t"
345 "lw $t0, 20($sp)\n\t"
346 "addiu $sp, $sp, 24\n\t"
347
348 "jalr $t9\n\t" // Call the stub.
349 "nop\n\t"
350 "addiu $sp, $sp, 16\n\t" // Drop the quick "frame".
351
352 // Restore stuff not named clobbered.
353 "lw $a0, 0($sp)\n\t"
354 "lw $a1, 4($sp)\n\t"
355 "lw $a2, 8($sp)\n\t"
356 "lw $a3, 12($sp)\n\t"
357 "lw $t0, 16($sp)\n\t"
358 "lw $t1, 20($sp)\n\t"
359 "lw $t2, 24($sp)\n\t"
360 "lw $t3, 28($sp)\n\t"
361 "lw $t4, 32($sp)\n\t"
362 "lw $t5, 36($sp)\n\t"
363 "lw $t6, 40($sp)\n\t"
364 "lw $t7, 44($sp)\n\t"
365 // Restore gp.
366 "lw $gp, 52($sp)\n\t"
367 "addiu $sp, $sp, 64\n\t" // Free stack space, now sp as on entry.
368
369 "move %[result], $v0\n\t" // Store the call result.
370 : [result] "=r" (result)
371 : [arg0] "r"(arg0), [arg1] "r"(arg1), [arg2] "r"(arg2), [code] "r"(code), [self] "r"(self),
372 [referrer] "r"(referrer), [hidden] "r"(hidden)
373 : "at", "v0", "v1", "s0", "s1", "s2", "s3", "s4", "s5", "s6", "s7", "t8", "t9", "k0", "k1",
374 "fp", "ra",
Nicolas Geoffrayc5b4b322015-09-15 16:36:50 +0100375 "$f0", "$f1", "$f2", "$f3", "$f4", "$f5", "$f6", "$f7", "$f8", "$f9", "$f10", "$f11",
376 "$f12", "$f13", "$f14", "$f15", "$f16", "$f17", "$f18", "$f19", "$f20", "$f21", "$f22",
377 "$f23", "$f24", "$f25", "$f26", "$f27", "$f28", "$f29", "$f30", "$f31",
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200378 "memory"); // clobber.
379#elif defined(__mips__) && defined(__LP64__)
380 __asm__ __volatile__ (
Goran Jakovljevic745f3cd2016-01-15 14:08:47 +0100381 // Spill a0-a7 which we say we don't clobber. May contain args.
382 "daddiu $sp, $sp, -64\n\t"
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200383 "sd $a0, 0($sp)\n\t"
384 "sd $a1, 8($sp)\n\t"
385 "sd $a2, 16($sp)\n\t"
386 "sd $a3, 24($sp)\n\t"
387 "sd $a4, 32($sp)\n\t"
388 "sd $a5, 40($sp)\n\t"
389 "sd $a6, 48($sp)\n\t"
390 "sd $a7, 56($sp)\n\t"
391
392 "daddiu $sp, $sp, -16\n\t" // Reserve stack space, 16B aligned.
393 "sd %[referrer], 0($sp)\n\t"
394
395 // Push everything on the stack, so we don't rely on the order.
396 "daddiu $sp, $sp, -48\n\t"
397 "sd %[arg0], 0($sp)\n\t"
398 "sd %[arg1], 8($sp)\n\t"
399 "sd %[arg2], 16($sp)\n\t"
400 "sd %[code], 24($sp)\n\t"
401 "sd %[self], 32($sp)\n\t"
402 "sd %[hidden], 40($sp)\n\t"
403
404 // Load call params into the right registers.
405 "ld $a0, 0($sp)\n\t"
406 "ld $a1, 8($sp)\n\t"
407 "ld $a2, 16($sp)\n\t"
408 "ld $t9, 24($sp)\n\t"
409 "ld $s1, 32($sp)\n\t"
410 "ld $t0, 40($sp)\n\t"
411 "daddiu $sp, $sp, 48\n\t"
412
413 "jalr $t9\n\t" // Call the stub.
414 "nop\n\t"
415 "daddiu $sp, $sp, 16\n\t" // Drop the quick "frame".
416
417 // Restore stuff not named clobbered.
418 "ld $a0, 0($sp)\n\t"
419 "ld $a1, 8($sp)\n\t"
420 "ld $a2, 16($sp)\n\t"
421 "ld $a3, 24($sp)\n\t"
422 "ld $a4, 32($sp)\n\t"
423 "ld $a5, 40($sp)\n\t"
424 "ld $a6, 48($sp)\n\t"
425 "ld $a7, 56($sp)\n\t"
Goran Jakovljevic745f3cd2016-01-15 14:08:47 +0100426 "daddiu $sp, $sp, 64\n\t"
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200427
428 "move %[result], $v0\n\t" // Store the call result.
429 : [result] "=r" (result)
430 : [arg0] "r"(arg0), [arg1] "r"(arg1), [arg2] "r"(arg2), [code] "r"(code), [self] "r"(self),
431 [referrer] "r"(referrer), [hidden] "r"(hidden)
Goran Jakovljevic745f3cd2016-01-15 14:08:47 +0100432 // Instead aliases t0-t3, register names $12-$15 has been used in the clobber list because
433 // t0-t3 are ambiguous.
434 : "at", "v0", "v1", "$12", "$13", "$14", "$15", "s0", "s1", "s2", "s3", "s4", "s5", "s6",
435 "s7", "t8", "t9", "k0", "k1", "fp", "ra",
Goran Jakovljevic4d44e532015-11-27 11:20:20 +0100436 "$f0", "$f1", "$f2", "$f3", "$f4", "$f5", "$f6", "$f7", "$f8", "$f9", "$f10", "$f11",
437 "$f12", "$f13", "$f14", "$f15", "$f16", "$f17", "$f18", "$f19", "$f20", "$f21", "$f22",
438 "$f23", "$f24", "$f25", "$f26", "$f27", "$f28", "$f29", "$f30", "$f31",
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200439 "memory"); // clobber.
Andreas Gampe9537ba22015-10-12 14:29:38 -0700440#elif defined(__x86_64__) && !defined(__APPLE__)
441#define PUSH(reg) "pushq " # reg "\n\t .cfi_adjust_cfa_offset 8\n\t"
442#define POP(reg) "popq " # reg "\n\t .cfi_adjust_cfa_offset -8\n\t"
443 // Note: Uses the native convention. We do a callee-save regimen by manually spilling and
444 // restoring almost all registers.
Andreas Gampe51f76352014-05-21 08:28:48 -0700445 // TODO: Set the thread?
446 __asm__ __volatile__(
Andreas Gampe9537ba22015-10-12 14:29:38 -0700447 // Spill almost everything (except rax, rsp). 14 registers.
448 PUSH(%%rbx)
449 PUSH(%%rcx)
450 PUSH(%%rdx)
451 PUSH(%%rsi)
452 PUSH(%%rdi)
453 PUSH(%%rbp)
454 PUSH(%%r8)
455 PUSH(%%r9)
456 PUSH(%%r10)
457 PUSH(%%r11)
458 PUSH(%%r12)
459 PUSH(%%r13)
460 PUSH(%%r14)
461 PUSH(%%r15)
462
463 PUSH(%[referrer]) // Push referrer & 16B alignment padding
464 PUSH(%[referrer])
465
466 // Now juggle the input registers.
467 PUSH(%[arg0])
468 PUSH(%[arg1])
469 PUSH(%[arg2])
470 PUSH(%[hidden])
471 PUSH(%[code])
472 POP(%%r8)
473 POP(%%rax)
474 POP(%%rdx)
475 POP(%%rsi)
476 POP(%%rdi)
477
478 "call *%%r8\n\t" // Call the stub
479 "addq $16, %%rsp\n\t" // Pop null and padding
Andreas Gampe51f76352014-05-21 08:28:48 -0700480 ".cfi_adjust_cfa_offset -16\n\t"
Andreas Gampe9537ba22015-10-12 14:29:38 -0700481
482 POP(%%r15)
483 POP(%%r14)
484 POP(%%r13)
485 POP(%%r12)
486 POP(%%r11)
487 POP(%%r10)
488 POP(%%r9)
489 POP(%%r8)
490 POP(%%rbp)
491 POP(%%rdi)
492 POP(%%rsi)
493 POP(%%rdx)
494 POP(%%rcx)
495 POP(%%rbx)
496
Andreas Gampe51f76352014-05-21 08:28:48 -0700497 : "=a" (result)
498 // Use the result from rax
Andreas Gampe9537ba22015-10-12 14:29:38 -0700499 : [arg0] "r"(arg0), [arg1] "r"(arg1), [arg2] "r"(arg2), [code] "r"(code),
500 [referrer] "r"(referrer), [hidden] "r"(hidden)
501 // This places arg0 into rdi, arg1 into rsi, arg2 into rdx, and code into some other
502 // register. We can't use "b" (rbx), as ASAN uses this for the frame pointer.
503 : "memory"); // We spill and restore (almost) all registers, so only mention memory here.
504#undef PUSH
505#undef POP
Andreas Gampe51f76352014-05-21 08:28:48 -0700506#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -0800507 UNUSED(arg0, arg1, arg2, code, referrer, hidden);
Andreas Gampe51f76352014-05-21 08:28:48 -0700508 LOG(WARNING) << "Was asked to invoke for an architecture I do not understand.";
509 result = 0;
510#endif
511 // Pop transition.
512 self->PopManagedStackFragment(fragment);
513
514 fp_result = fpr_result;
515 EXPECT_EQ(0U, fp_result);
516
517 return result;
518 }
519
Andreas Gampe29b38412014-08-13 00:15:43 -0700520 static uintptr_t GetEntrypoint(Thread* self, QuickEntrypointEnum entrypoint) {
521 int32_t offset;
522#ifdef __LP64__
523 offset = GetThreadOffset<8>(entrypoint).Int32Value();
524#else
525 offset = GetThreadOffset<4>(entrypoint).Int32Value();
526#endif
527 return *reinterpret_cast<uintptr_t*>(reinterpret_cast<uint8_t*>(self) + offset);
528 }
529
Andreas Gampe6cf80102014-05-19 11:32:41 -0700530 protected:
531 size_t fp_result;
Andreas Gampe525cde22014-04-22 15:44:50 -0700532};
533
534
Andreas Gampe525cde22014-04-22 15:44:50 -0700535TEST_F(StubTest, Memcpy) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200536#if defined(__i386__) || (defined(__x86_64__) && !defined(__APPLE__)) || defined(__mips__)
Andreas Gampe525cde22014-04-22 15:44:50 -0700537 Thread* self = Thread::Current();
538
539 uint32_t orig[20];
540 uint32_t trg[20];
541 for (size_t i = 0; i < 20; ++i) {
542 orig[i] = i;
543 trg[i] = 0;
544 }
545
546 Invoke3(reinterpret_cast<size_t>(&trg[4]), reinterpret_cast<size_t>(&orig[4]),
Andreas Gampe29b38412014-08-13 00:15:43 -0700547 10 * sizeof(uint32_t), StubTest::GetEntrypoint(self, kQuickMemcpy), self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700548
549 EXPECT_EQ(orig[0], trg[0]);
550
551 for (size_t i = 1; i < 4; ++i) {
552 EXPECT_NE(orig[i], trg[i]);
553 }
554
555 for (size_t i = 4; i < 14; ++i) {
556 EXPECT_EQ(orig[i], trg[i]);
557 }
558
559 for (size_t i = 14; i < 20; ++i) {
560 EXPECT_NE(orig[i], trg[i]);
561 }
562
563 // TODO: Test overlapping?
564
565#else
566 LOG(INFO) << "Skipping memcpy as I don't know how to do that on " << kRuntimeISA;
567 // Force-print to std::cout so it's also outside the logcat.
568 std::cout << "Skipping memcpy as I don't know how to do that on " << kRuntimeISA << std::endl;
569#endif
570}
571
Andreas Gampe525cde22014-04-22 15:44:50 -0700572TEST_F(StubTest, LockObject) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200573#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
574 (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -0700575 static constexpr size_t kThinLockLoops = 100;
576
Andreas Gampe525cde22014-04-22 15:44:50 -0700577 Thread* self = Thread::Current();
Andreas Gampe29b38412014-08-13 00:15:43 -0700578
579 const uintptr_t art_quick_lock_object = StubTest::GetEntrypoint(self, kQuickLockObject);
580
Andreas Gampe525cde22014-04-22 15:44:50 -0700581 // Create an object
582 ScopedObjectAccess soa(self);
583 // garbage is created during ClassLinker::Init
584
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700585 StackHandleScope<2> hs(soa.Self());
586 Handle<mirror::String> obj(
587 hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), "hello, world!")));
Andreas Gampe525cde22014-04-22 15:44:50 -0700588 LockWord lock = obj->GetLockWord(false);
589 LockWord::LockState old_state = lock.GetState();
590 EXPECT_EQ(LockWord::LockState::kUnlocked, old_state);
591
Andreas Gampe29b38412014-08-13 00:15:43 -0700592 Invoke3(reinterpret_cast<size_t>(obj.Get()), 0U, 0U, art_quick_lock_object, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700593
594 LockWord lock_after = obj->GetLockWord(false);
595 LockWord::LockState new_state = lock_after.GetState();
596 EXPECT_EQ(LockWord::LockState::kThinLocked, new_state);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700597 EXPECT_EQ(lock_after.ThinLockCount(), 0U); // Thin lock starts count at zero
598
599 for (size_t i = 1; i < kThinLockLoops; ++i) {
Andreas Gampe29b38412014-08-13 00:15:43 -0700600 Invoke3(reinterpret_cast<size_t>(obj.Get()), 0U, 0U, art_quick_lock_object, self);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700601
602 // Check we're at lock count i
603
604 LockWord l_inc = obj->GetLockWord(false);
605 LockWord::LockState l_inc_state = l_inc.GetState();
606 EXPECT_EQ(LockWord::LockState::kThinLocked, l_inc_state);
607 EXPECT_EQ(l_inc.ThinLockCount(), i);
608 }
609
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700610 // Force a fat lock by running identity hashcode to fill up lock word.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700611 Handle<mirror::String> obj2(hs.NewHandle(
612 mirror::String::AllocFromModifiedUtf8(soa.Self(), "hello, world!")));
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700613
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700614 obj2->IdentityHashCode();
615
Andreas Gampe29b38412014-08-13 00:15:43 -0700616 Invoke3(reinterpret_cast<size_t>(obj2.Get()), 0U, 0U, art_quick_lock_object, self);
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700617
618 LockWord lock_after2 = obj2->GetLockWord(false);
619 LockWord::LockState new_state2 = lock_after2.GetState();
620 EXPECT_EQ(LockWord::LockState::kFatLocked, new_state2);
621 EXPECT_NE(lock_after2.FatLockMonitor(), static_cast<Monitor*>(nullptr));
622
623 // Test done.
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700624#else
625 LOG(INFO) << "Skipping lock_object as I don't know how to do that on " << kRuntimeISA;
626 // Force-print to std::cout so it's also outside the logcat.
627 std::cout << "Skipping lock_object as I don't know how to do that on " << kRuntimeISA << std::endl;
628#endif
629}
630
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700631
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700632class RandGen {
633 public:
634 explicit RandGen(uint32_t seed) : val_(seed) {}
635
636 uint32_t next() {
637 val_ = val_ * 48271 % 2147483647 + 13;
638 return val_;
639 }
640
641 uint32_t val_;
642};
643
644
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700645// NO_THREAD_SAFETY_ANALYSIS as we do not want to grab exclusive mutator lock for MonitorInfo.
646static void TestUnlockObject(StubTest* test) NO_THREAD_SAFETY_ANALYSIS {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200647#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
648 (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -0700649 static constexpr size_t kThinLockLoops = 100;
650
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700651 Thread* self = Thread::Current();
Andreas Gampe29b38412014-08-13 00:15:43 -0700652
653 const uintptr_t art_quick_lock_object = StubTest::GetEntrypoint(self, kQuickLockObject);
654 const uintptr_t art_quick_unlock_object = StubTest::GetEntrypoint(self, kQuickUnlockObject);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700655 // Create an object
656 ScopedObjectAccess soa(self);
657 // garbage is created during ClassLinker::Init
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700658 static constexpr size_t kNumberOfLocks = 10; // Number of objects = lock
659 StackHandleScope<kNumberOfLocks + 1> hs(self);
660 Handle<mirror::String> obj(
661 hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), "hello, world!")));
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700662 LockWord lock = obj->GetLockWord(false);
663 LockWord::LockState old_state = lock.GetState();
664 EXPECT_EQ(LockWord::LockState::kUnlocked, old_state);
665
Andreas Gampe29b38412014-08-13 00:15:43 -0700666 test->Invoke3(reinterpret_cast<size_t>(obj.Get()), 0U, 0U, art_quick_unlock_object, self);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700667 // This should be an illegal monitor state.
668 EXPECT_TRUE(self->IsExceptionPending());
669 self->ClearException();
670
671 LockWord lock_after = obj->GetLockWord(false);
672 LockWord::LockState new_state = lock_after.GetState();
673 EXPECT_EQ(LockWord::LockState::kUnlocked, new_state);
Andreas Gampe525cde22014-04-22 15:44:50 -0700674
Andreas Gampe29b38412014-08-13 00:15:43 -0700675 test->Invoke3(reinterpret_cast<size_t>(obj.Get()), 0U, 0U, art_quick_lock_object, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700676
677 LockWord lock_after2 = obj->GetLockWord(false);
678 LockWord::LockState new_state2 = lock_after2.GetState();
679 EXPECT_EQ(LockWord::LockState::kThinLocked, new_state2);
680
Andreas Gampe29b38412014-08-13 00:15:43 -0700681 test->Invoke3(reinterpret_cast<size_t>(obj.Get()), 0U, 0U, art_quick_unlock_object, self);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700682
683 LockWord lock_after3 = obj->GetLockWord(false);
684 LockWord::LockState new_state3 = lock_after3.GetState();
685 EXPECT_EQ(LockWord::LockState::kUnlocked, new_state3);
686
687 // Stress test:
688 // Keep a number of objects and their locks in flight. Randomly lock or unlock one of them in
689 // each step.
690
691 RandGen r(0x1234);
692
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700693 constexpr size_t kIterations = 10000; // Number of iterations
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700694 constexpr size_t kMoveToFat = 1000; // Chance of 1:kMoveFat to make a lock fat.
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700695
696 size_t counts[kNumberOfLocks];
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700697 bool fat[kNumberOfLocks]; // Whether a lock should be thin or fat.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700698 Handle<mirror::String> objects[kNumberOfLocks];
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700699
700 // Initialize = allocate.
701 for (size_t i = 0; i < kNumberOfLocks; ++i) {
702 counts[i] = 0;
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700703 fat[i] = false;
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700704 objects[i] = hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), ""));
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700705 }
706
707 for (size_t i = 0; i < kIterations; ++i) {
708 // Select which lock to update.
709 size_t index = r.next() % kNumberOfLocks;
710
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700711 // Make lock fat?
712 if (!fat[index] && (r.next() % kMoveToFat == 0)) {
713 fat[index] = true;
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700714 objects[index]->IdentityHashCode();
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700715
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700716 LockWord lock_iter = objects[index]->GetLockWord(false);
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700717 LockWord::LockState iter_state = lock_iter.GetState();
718 if (counts[index] == 0) {
719 EXPECT_EQ(LockWord::LockState::kHashCode, iter_state);
720 } else {
721 EXPECT_EQ(LockWord::LockState::kFatLocked, iter_state);
722 }
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700723 } else {
Andreas Gampe277ccbd2014-11-03 21:36:10 -0800724 bool take_lock; // Whether to lock or unlock in this step.
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700725 if (counts[index] == 0) {
Andreas Gampe277ccbd2014-11-03 21:36:10 -0800726 take_lock = true;
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700727 } else if (counts[index] == kThinLockLoops) {
Andreas Gampe277ccbd2014-11-03 21:36:10 -0800728 take_lock = false;
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700729 } else {
730 // Randomly.
Andreas Gampe277ccbd2014-11-03 21:36:10 -0800731 take_lock = r.next() % 2 == 0;
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700732 }
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700733
Andreas Gampe277ccbd2014-11-03 21:36:10 -0800734 if (take_lock) {
Andreas Gampe29b38412014-08-13 00:15:43 -0700735 test->Invoke3(reinterpret_cast<size_t>(objects[index].Get()), 0U, 0U, art_quick_lock_object,
736 self);
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700737 counts[index]++;
738 } else {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700739 test->Invoke3(reinterpret_cast<size_t>(objects[index].Get()), 0U, 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -0700740 art_quick_unlock_object, self);
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700741 counts[index]--;
742 }
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700743
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700744 EXPECT_FALSE(self->IsExceptionPending());
745
746 // Check the new state.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700747 LockWord lock_iter = objects[index]->GetLockWord(true);
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700748 LockWord::LockState iter_state = lock_iter.GetState();
749 if (fat[index]) {
750 // Abuse MonitorInfo.
751 EXPECT_EQ(LockWord::LockState::kFatLocked, iter_state) << index;
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700752 MonitorInfo info(objects[index].Get());
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700753 EXPECT_EQ(counts[index], info.entry_count_) << index;
754 } else {
755 if (counts[index] > 0) {
756 EXPECT_EQ(LockWord::LockState::kThinLocked, iter_state);
757 EXPECT_EQ(counts[index] - 1, lock_iter.ThinLockCount());
758 } else {
759 EXPECT_EQ(LockWord::LockState::kUnlocked, iter_state);
760 }
761 }
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700762 }
763 }
764
765 // Unlock the remaining count times and then check it's unlocked. Then deallocate.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700766 // Go reverse order to correctly handle Handles.
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700767 for (size_t i = 0; i < kNumberOfLocks; ++i) {
768 size_t index = kNumberOfLocks - 1 - i;
769 size_t count = counts[index];
770 while (count > 0) {
Andreas Gampe29b38412014-08-13 00:15:43 -0700771 test->Invoke3(reinterpret_cast<size_t>(objects[index].Get()), 0U, 0U, art_quick_unlock_object,
772 self);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700773 count--;
774 }
775
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700776 LockWord lock_after4 = objects[index]->GetLockWord(false);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700777 LockWord::LockState new_state4 = lock_after4.GetState();
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700778 EXPECT_TRUE(LockWord::LockState::kUnlocked == new_state4
779 || LockWord::LockState::kFatLocked == new_state4);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700780 }
781
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700782 // Test done.
Andreas Gampe525cde22014-04-22 15:44:50 -0700783#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -0800784 UNUSED(test);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700785 LOG(INFO) << "Skipping unlock_object as I don't know how to do that on " << kRuntimeISA;
Andreas Gampe525cde22014-04-22 15:44:50 -0700786 // Force-print to std::cout so it's also outside the logcat.
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700787 std::cout << "Skipping unlock_object as I don't know how to do that on " << kRuntimeISA << std::endl;
Andreas Gampe525cde22014-04-22 15:44:50 -0700788#endif
789}
790
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700791TEST_F(StubTest, UnlockObject) {
Andreas Gampe369810a2015-01-14 19:53:31 -0800792 // This will lead to monitor error messages in the log.
793 ScopedLogSeverity sls(LogSeverity::FATAL);
794
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700795 TestUnlockObject(this);
796}
Andreas Gampe525cde22014-04-22 15:44:50 -0700797
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200798#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
799 (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe525cde22014-04-22 15:44:50 -0700800extern "C" void art_quick_check_cast(void);
801#endif
802
803TEST_F(StubTest, CheckCast) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200804#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
805 (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe525cde22014-04-22 15:44:50 -0700806 Thread* self = Thread::Current();
Andreas Gampe29b38412014-08-13 00:15:43 -0700807
808 const uintptr_t art_quick_check_cast = StubTest::GetEntrypoint(self, kQuickCheckCast);
809
Andreas Gampe525cde22014-04-22 15:44:50 -0700810 // Find some classes.
811 ScopedObjectAccess soa(self);
812 // garbage is created during ClassLinker::Init
813
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700814 StackHandleScope<2> hs(soa.Self());
815 Handle<mirror::Class> c(
816 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "[Ljava/lang/Object;")));
817 Handle<mirror::Class> c2(
818 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "[Ljava/lang/String;")));
Andreas Gampe525cde22014-04-22 15:44:50 -0700819
820 EXPECT_FALSE(self->IsExceptionPending());
821
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700822 Invoke3(reinterpret_cast<size_t>(c.Get()), reinterpret_cast<size_t>(c.Get()), 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -0700823 art_quick_check_cast, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700824
825 EXPECT_FALSE(self->IsExceptionPending());
826
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700827 Invoke3(reinterpret_cast<size_t>(c2.Get()), reinterpret_cast<size_t>(c2.Get()), 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -0700828 art_quick_check_cast, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700829
830 EXPECT_FALSE(self->IsExceptionPending());
831
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700832 Invoke3(reinterpret_cast<size_t>(c.Get()), reinterpret_cast<size_t>(c2.Get()), 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -0700833 art_quick_check_cast, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700834
835 EXPECT_FALSE(self->IsExceptionPending());
836
837 // TODO: Make the following work. But that would require correct managed frames.
838
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700839 Invoke3(reinterpret_cast<size_t>(c2.Get()), reinterpret_cast<size_t>(c.Get()), 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -0700840 art_quick_check_cast, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700841
842 EXPECT_TRUE(self->IsExceptionPending());
843 self->ClearException();
844
845#else
846 LOG(INFO) << "Skipping check_cast as I don't know how to do that on " << kRuntimeISA;
847 // Force-print to std::cout so it's also outside the logcat.
848 std::cout << "Skipping check_cast as I don't know how to do that on " << kRuntimeISA << std::endl;
849#endif
850}
851
852
Andreas Gampe525cde22014-04-22 15:44:50 -0700853TEST_F(StubTest, APutObj) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200854#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
855 (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe525cde22014-04-22 15:44:50 -0700856 Thread* self = Thread::Current();
Andreas Gampe29b38412014-08-13 00:15:43 -0700857
858 // Do not check non-checked ones, we'd need handlers and stuff...
859 const uintptr_t art_quick_aput_obj_with_null_and_bound_check =
860 StubTest::GetEntrypoint(self, kQuickAputObjectWithNullAndBoundCheck);
861
Andreas Gampe525cde22014-04-22 15:44:50 -0700862 // Create an object
863 ScopedObjectAccess soa(self);
864 // garbage is created during ClassLinker::Init
865
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700866 StackHandleScope<5> hs(soa.Self());
867 Handle<mirror::Class> c(
868 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "Ljava/lang/Object;")));
869 Handle<mirror::Class> ca(
870 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "[Ljava/lang/String;")));
Andreas Gampe525cde22014-04-22 15:44:50 -0700871
872 // Build a string array of size 1
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700873 Handle<mirror::ObjectArray<mirror::Object>> array(
874 hs.NewHandle(mirror::ObjectArray<mirror::Object>::Alloc(soa.Self(), ca.Get(), 10)));
Andreas Gampe525cde22014-04-22 15:44:50 -0700875
876 // Build a string -> should be assignable
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700877 Handle<mirror::String> str_obj(
878 hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), "hello, world!")));
Andreas Gampe525cde22014-04-22 15:44:50 -0700879
880 // Build a generic object -> should fail assigning
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700881 Handle<mirror::Object> obj_obj(hs.NewHandle(c->AllocObject(soa.Self())));
Andreas Gampe525cde22014-04-22 15:44:50 -0700882
883 // Play with it...
884
885 // 1) Success cases
Andreas Gampef4e910b2014-04-29 16:55:52 -0700886 // 1.1) Assign str_obj to array[0..3]
Andreas Gampe525cde22014-04-22 15:44:50 -0700887
888 EXPECT_FALSE(self->IsExceptionPending());
889
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700890 Invoke3(reinterpret_cast<size_t>(array.Get()), 0U, reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampe29b38412014-08-13 00:15:43 -0700891 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700892
893 EXPECT_FALSE(self->IsExceptionPending());
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700894 EXPECT_EQ(str_obj.Get(), array->Get(0));
Andreas Gampe525cde22014-04-22 15:44:50 -0700895
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700896 Invoke3(reinterpret_cast<size_t>(array.Get()), 1U, reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampe29b38412014-08-13 00:15:43 -0700897 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampef4e910b2014-04-29 16:55:52 -0700898
899 EXPECT_FALSE(self->IsExceptionPending());
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700900 EXPECT_EQ(str_obj.Get(), array->Get(1));
Andreas Gampef4e910b2014-04-29 16:55:52 -0700901
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700902 Invoke3(reinterpret_cast<size_t>(array.Get()), 2U, reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampe29b38412014-08-13 00:15:43 -0700903 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampef4e910b2014-04-29 16:55:52 -0700904
905 EXPECT_FALSE(self->IsExceptionPending());
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700906 EXPECT_EQ(str_obj.Get(), array->Get(2));
Andreas Gampef4e910b2014-04-29 16:55:52 -0700907
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700908 Invoke3(reinterpret_cast<size_t>(array.Get()), 3U, reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampe29b38412014-08-13 00:15:43 -0700909 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampef4e910b2014-04-29 16:55:52 -0700910
911 EXPECT_FALSE(self->IsExceptionPending());
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700912 EXPECT_EQ(str_obj.Get(), array->Get(3));
Andreas Gampef4e910b2014-04-29 16:55:52 -0700913
914 // 1.2) Assign null to array[0..3]
Andreas Gampe525cde22014-04-22 15:44:50 -0700915
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700916 Invoke3(reinterpret_cast<size_t>(array.Get()), 0U, reinterpret_cast<size_t>(nullptr),
Andreas Gampe29b38412014-08-13 00:15:43 -0700917 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700918
919 EXPECT_FALSE(self->IsExceptionPending());
Andreas Gampef4e910b2014-04-29 16:55:52 -0700920 EXPECT_EQ(nullptr, array->Get(0));
921
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700922 Invoke3(reinterpret_cast<size_t>(array.Get()), 1U, reinterpret_cast<size_t>(nullptr),
Andreas Gampe29b38412014-08-13 00:15:43 -0700923 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampef4e910b2014-04-29 16:55:52 -0700924
925 EXPECT_FALSE(self->IsExceptionPending());
926 EXPECT_EQ(nullptr, array->Get(1));
927
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700928 Invoke3(reinterpret_cast<size_t>(array.Get()), 2U, reinterpret_cast<size_t>(nullptr),
Andreas Gampe29b38412014-08-13 00:15:43 -0700929 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampef4e910b2014-04-29 16:55:52 -0700930
931 EXPECT_FALSE(self->IsExceptionPending());
932 EXPECT_EQ(nullptr, array->Get(2));
933
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700934 Invoke3(reinterpret_cast<size_t>(array.Get()), 3U, reinterpret_cast<size_t>(nullptr),
Andreas Gampe29b38412014-08-13 00:15:43 -0700935 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampef4e910b2014-04-29 16:55:52 -0700936
937 EXPECT_FALSE(self->IsExceptionPending());
938 EXPECT_EQ(nullptr, array->Get(3));
Andreas Gampe525cde22014-04-22 15:44:50 -0700939
940 // TODO: Check _which_ exception is thrown. Then make 3) check that it's the right check order.
941
942 // 2) Failure cases (str into str[])
943 // 2.1) Array = null
944 // TODO: Throwing NPE needs actual DEX code
945
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700946// Invoke3(reinterpret_cast<size_t>(nullptr), 0U, reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampe525cde22014-04-22 15:44:50 -0700947// reinterpret_cast<uintptr_t>(&art_quick_aput_obj_with_null_and_bound_check), self);
948//
949// EXPECT_TRUE(self->IsExceptionPending());
950// self->ClearException();
951
952 // 2.2) Index < 0
953
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700954 Invoke3(reinterpret_cast<size_t>(array.Get()), static_cast<size_t>(-1),
955 reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampe29b38412014-08-13 00:15:43 -0700956 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700957
958 EXPECT_TRUE(self->IsExceptionPending());
959 self->ClearException();
960
961 // 2.3) Index > 0
962
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700963 Invoke3(reinterpret_cast<size_t>(array.Get()), 10U, reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampe29b38412014-08-13 00:15:43 -0700964 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700965
966 EXPECT_TRUE(self->IsExceptionPending());
967 self->ClearException();
968
969 // 3) Failure cases (obj into str[])
970
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700971 Invoke3(reinterpret_cast<size_t>(array.Get()), 0U, reinterpret_cast<size_t>(obj_obj.Get()),
Andreas Gampe29b38412014-08-13 00:15:43 -0700972 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700973
974 EXPECT_TRUE(self->IsExceptionPending());
975 self->ClearException();
976
977 // Tests done.
978#else
979 LOG(INFO) << "Skipping aput_obj as I don't know how to do that on " << kRuntimeISA;
980 // Force-print to std::cout so it's also outside the logcat.
981 std::cout << "Skipping aput_obj as I don't know how to do that on " << kRuntimeISA << std::endl;
982#endif
983}
984
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700985TEST_F(StubTest, AllocObject) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200986#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
987 (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe369810a2015-01-14 19:53:31 -0800988 // This will lead to OOM error messages in the log.
989 ScopedLogSeverity sls(LogSeverity::FATAL);
990
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700991 // TODO: Check the "Unresolved" allocation stubs
992
993 Thread* self = Thread::Current();
994 // Create an object
995 ScopedObjectAccess soa(self);
996 // garbage is created during ClassLinker::Init
997
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700998 StackHandleScope<2> hs(soa.Self());
999 Handle<mirror::Class> c(
1000 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "Ljava/lang/Object;")));
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001001
1002 // Play with it...
1003
1004 EXPECT_FALSE(self->IsExceptionPending());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001005 {
1006 // Use an arbitrary method from c to use as referrer
1007 size_t result = Invoke3(static_cast<size_t>(c->GetDexTypeIndex()), // type_idx
Mathieu Chartiere401d142015-04-22 13:56:20 -07001008 // arbitrary
1009 reinterpret_cast<size_t>(c->GetVirtualMethod(0, sizeof(void*))),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001010 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001011 StubTest::GetEntrypoint(self, kQuickAllocObject),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001012 self);
1013
1014 EXPECT_FALSE(self->IsExceptionPending());
1015 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
1016 mirror::Object* obj = reinterpret_cast<mirror::Object*>(result);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001017 EXPECT_EQ(c.Get(), obj->GetClass());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001018 VerifyObject(obj);
1019 }
1020
1021 {
Mathieu Chartier2cebb242015-04-21 16:50:40 -07001022 // We can use null in the second argument as we do not need a method here (not used in
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001023 // resolved/initialized cases)
Mathieu Chartiere401d142015-04-22 13:56:20 -07001024 size_t result = Invoke3(reinterpret_cast<size_t>(c.Get()), 0u, 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001025 StubTest::GetEntrypoint(self, kQuickAllocObjectResolved),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001026 self);
1027
1028 EXPECT_FALSE(self->IsExceptionPending());
1029 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
1030 mirror::Object* obj = reinterpret_cast<mirror::Object*>(result);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001031 EXPECT_EQ(c.Get(), obj->GetClass());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001032 VerifyObject(obj);
1033 }
1034
1035 {
Mathieu Chartier2cebb242015-04-21 16:50:40 -07001036 // We can use null in the second argument as we do not need a method here (not used in
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001037 // resolved/initialized cases)
Mathieu Chartiere401d142015-04-22 13:56:20 -07001038 size_t result = Invoke3(reinterpret_cast<size_t>(c.Get()), 0u, 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001039 StubTest::GetEntrypoint(self, kQuickAllocObjectInitialized),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001040 self);
1041
1042 EXPECT_FALSE(self->IsExceptionPending());
1043 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
1044 mirror::Object* obj = reinterpret_cast<mirror::Object*>(result);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001045 EXPECT_EQ(c.Get(), obj->GetClass());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001046 VerifyObject(obj);
1047 }
1048
1049 // Failure tests.
1050
1051 // Out-of-memory.
1052 {
1053 Runtime::Current()->GetHeap()->SetIdealFootprint(1 * GB);
1054
1055 // Array helps to fill memory faster.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001056 Handle<mirror::Class> ca(
1057 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "[Ljava/lang/Object;")));
1058
1059 // Use arbitrary large amount for now.
1060 static const size_t kMaxHandles = 1000000;
Ian Rogers700a4022014-05-19 16:49:03 -07001061 std::unique_ptr<StackHandleScope<kMaxHandles>> hsp(new StackHandleScope<kMaxHandles>(self));
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001062
1063 std::vector<Handle<mirror::Object>> handles;
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001064 // Start allocating with 128K
1065 size_t length = 128 * KB / 4;
1066 while (length > 10) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001067 Handle<mirror::Object> h(hsp->NewHandle<mirror::Object>(
1068 mirror::ObjectArray<mirror::Object>::Alloc(soa.Self(), ca.Get(), length / 4)));
1069 if (self->IsExceptionPending() || h.Get() == nullptr) {
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001070 self->ClearException();
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001071
1072 // Try a smaller length
1073 length = length / 8;
1074 // Use at most half the reported free space.
1075 size_t mem = Runtime::Current()->GetHeap()->GetFreeMemory();
1076 if (length * 8 > mem) {
1077 length = mem / 8;
1078 }
1079 } else {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001080 handles.push_back(h);
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001081 }
1082 }
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001083 LOG(INFO) << "Used " << handles.size() << " arrays to fill space.";
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001084
1085 // Allocate simple objects till it fails.
1086 while (!self->IsExceptionPending()) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001087 Handle<mirror::Object> h = hsp->NewHandle(c->AllocObject(soa.Self()));
1088 if (!self->IsExceptionPending() && h.Get() != nullptr) {
1089 handles.push_back(h);
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001090 }
1091 }
1092 self->ClearException();
1093
Mathieu Chartiere401d142015-04-22 13:56:20 -07001094 size_t result = Invoke3(reinterpret_cast<size_t>(c.Get()), 0u, 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001095 StubTest::GetEntrypoint(self, kQuickAllocObjectInitialized),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001096 self);
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001097 EXPECT_TRUE(self->IsExceptionPending());
1098 self->ClearException();
1099 EXPECT_EQ(reinterpret_cast<size_t>(nullptr), result);
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001100 }
1101
1102 // Tests done.
1103#else
1104 LOG(INFO) << "Skipping alloc_object as I don't know how to do that on " << kRuntimeISA;
1105 // Force-print to std::cout so it's also outside the logcat.
1106 std::cout << "Skipping alloc_object as I don't know how to do that on " << kRuntimeISA << std::endl;
1107#endif
1108}
1109
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001110TEST_F(StubTest, AllocObjectArray) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001111#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1112 (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001113 // TODO: Check the "Unresolved" allocation stubs
1114
Andreas Gampe369810a2015-01-14 19:53:31 -08001115 // This will lead to OOM error messages in the log.
1116 ScopedLogSeverity sls(LogSeverity::FATAL);
1117
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001118 Thread* self = Thread::Current();
1119 // Create an object
1120 ScopedObjectAccess soa(self);
1121 // garbage is created during ClassLinker::Init
1122
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001123 StackHandleScope<2> hs(self);
1124 Handle<mirror::Class> c(
1125 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "[Ljava/lang/Object;")));
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001126
1127 // Needed to have a linked method.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001128 Handle<mirror::Class> c_obj(
1129 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "Ljava/lang/Object;")));
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001130
1131 // Play with it...
1132
1133 EXPECT_FALSE(self->IsExceptionPending());
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001134
1135 // For some reason this does not work, as the type_idx is artificial and outside what the
1136 // resolved types of c_obj allow...
1137
Ian Rogerscf7f1912014-10-22 22:06:39 -07001138 if ((false)) {
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001139 // Use an arbitrary method from c to use as referrer
1140 size_t result = Invoke3(static_cast<size_t>(c->GetDexTypeIndex()), // type_idx
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001141 10U,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001142 // arbitrary
1143 reinterpret_cast<size_t>(c_obj->GetVirtualMethod(0, sizeof(void*))),
Andreas Gampe29b38412014-08-13 00:15:43 -07001144 StubTest::GetEntrypoint(self, kQuickAllocArray),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001145 self);
1146
1147 EXPECT_FALSE(self->IsExceptionPending());
1148 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
1149 mirror::Array* obj = reinterpret_cast<mirror::Array*>(result);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001150 EXPECT_EQ(c.Get(), obj->GetClass());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001151 VerifyObject(obj);
1152 EXPECT_EQ(obj->GetLength(), 10);
1153 }
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001154
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001155 {
Mathieu Chartier2cebb242015-04-21 16:50:40 -07001156 // We can use null in the second argument as we do not need a method here (not used in
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001157 // resolved/initialized cases)
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08001158 size_t result = Invoke3(reinterpret_cast<size_t>(c.Get()), 10U,
1159 reinterpret_cast<size_t>(nullptr),
Andreas Gampe29b38412014-08-13 00:15:43 -07001160 StubTest::GetEntrypoint(self, kQuickAllocArrayResolved),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001161 self);
Nicolas Geoffray14691c52015-03-05 10:40:17 +00001162 EXPECT_FALSE(self->IsExceptionPending()) << PrettyTypeOf(self->GetException());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001163 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
1164 mirror::Object* obj = reinterpret_cast<mirror::Object*>(result);
1165 EXPECT_TRUE(obj->IsArrayInstance());
1166 EXPECT_TRUE(obj->IsObjectArray());
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001167 EXPECT_EQ(c.Get(), obj->GetClass());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001168 VerifyObject(obj);
1169 mirror::Array* array = reinterpret_cast<mirror::Array*>(result);
1170 EXPECT_EQ(array->GetLength(), 10);
1171 }
1172
1173 // Failure tests.
1174
1175 // Out-of-memory.
1176 {
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08001177 size_t result = Invoke3(reinterpret_cast<size_t>(c.Get()),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001178 GB, // that should fail...
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08001179 reinterpret_cast<size_t>(nullptr),
Andreas Gampe29b38412014-08-13 00:15:43 -07001180 StubTest::GetEntrypoint(self, kQuickAllocArrayResolved),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001181 self);
1182
1183 EXPECT_TRUE(self->IsExceptionPending());
1184 self->ClearException();
1185 EXPECT_EQ(reinterpret_cast<size_t>(nullptr), result);
1186 }
1187
1188 // Tests done.
1189#else
1190 LOG(INFO) << "Skipping alloc_array as I don't know how to do that on " << kRuntimeISA;
1191 // Force-print to std::cout so it's also outside the logcat.
1192 std::cout << "Skipping alloc_array as I don't know how to do that on " << kRuntimeISA << std::endl;
1193#endif
1194}
1195
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001196
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001197TEST_F(StubTest, StringCompareTo) {
Goran Jakovljevic801fcc42015-12-03 11:44:26 +01001198#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || \
1199 (defined(__mips__) && defined(__LP64__)) || (defined(__x86_64__) && !defined(__APPLE__))
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001200 // TODO: Check the "Unresolved" allocation stubs
1201
1202 Thread* self = Thread::Current();
Andreas Gampe29b38412014-08-13 00:15:43 -07001203
1204 const uintptr_t art_quick_string_compareto = StubTest::GetEntrypoint(self, kQuickStringCompareTo);
1205
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001206 ScopedObjectAccess soa(self);
1207 // garbage is created during ClassLinker::Init
1208
1209 // Create some strings
1210 // Use array so we can index into it and use a matrix for expected results
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -07001211 // Setup: The first half is standard. The second half uses a non-zero offset.
1212 // TODO: Shared backing arrays.
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001213 const char* c[] = { "", "", "a", "aa", "ab",
Serban Constantinescu86797a72014-06-19 16:17:56 +01001214 "aacaacaacaacaacaac", // This one's under the default limit to go to __memcmp16.
1215 "aacaacaacaacaacaacaacaacaacaacaacaac", // This one's over.
1216 "aacaacaacaacaacaacaacaacaacaacaacaaca" }; // As is this one. We need a separate one to
1217 // defeat object-equal optimizations.
Jeff Hao848f70a2014-01-15 13:49:50 -08001218 static constexpr size_t kStringCount = arraysize(c);
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001219
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001220 StackHandleScope<kStringCount> hs(self);
1221 Handle<mirror::String> s[kStringCount];
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001222
Jeff Hao848f70a2014-01-15 13:49:50 -08001223 for (size_t i = 0; i < kStringCount; ++i) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001224 s[i] = hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), c[i]));
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001225 }
1226
1227 // TODO: wide characters
1228
1229 // Matrix of expectations. First component is first parameter. Note we only check against the
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -07001230 // sign, not the value. As we are testing random offsets, we need to compute this and need to
1231 // rely on String::CompareTo being correct.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001232 int32_t expected[kStringCount][kStringCount];
1233 for (size_t x = 0; x < kStringCount; ++x) {
1234 for (size_t y = 0; y < kStringCount; ++y) {
1235 expected[x][y] = s[x]->CompareTo(s[y].Get());
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -07001236 }
1237 }
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001238
1239 // Play with it...
1240
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001241 for (size_t x = 0; x < kStringCount; ++x) {
1242 for (size_t y = 0; y < kStringCount; ++y) {
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001243 // Test string_compareto x y
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001244 size_t result = Invoke3(reinterpret_cast<size_t>(s[x].Get()),
1245 reinterpret_cast<size_t>(s[y].Get()), 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001246 art_quick_string_compareto, self);
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001247
1248 EXPECT_FALSE(self->IsExceptionPending());
1249
1250 // The result is a 32b signed integer
1251 union {
1252 size_t r;
1253 int32_t i;
1254 } conv;
1255 conv.r = result;
1256 int32_t e = expected[x][y];
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -07001257 EXPECT_TRUE(e == 0 ? conv.i == 0 : true) << "x=" << c[x] << " y=" << c[y] << " res=" <<
1258 conv.r;
1259 EXPECT_TRUE(e < 0 ? conv.i < 0 : true) << "x=" << c[x] << " y=" << c[y] << " res=" <<
1260 conv.r;
1261 EXPECT_TRUE(e > 0 ? conv.i > 0 : true) << "x=" << c[x] << " y=" << c[y] << " res=" <<
1262 conv.r;
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001263 }
1264 }
1265
Andreas Gampe7177d7c2014-05-02 12:10:02 -07001266 // TODO: Deallocate things.
1267
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001268 // Tests done.
1269#else
1270 LOG(INFO) << "Skipping string_compareto as I don't know how to do that on " << kRuntimeISA;
1271 // Force-print to std::cout so it's also outside the logcat.
1272 std::cout << "Skipping string_compareto as I don't know how to do that on " << kRuntimeISA <<
1273 std::endl;
1274#endif
1275}
1276
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001277
Mathieu Chartierc7853442015-03-27 14:35:38 -07001278static void GetSetBooleanStatic(ArtField* f, Thread* self,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001279 ArtMethod* referrer, StubTest* test)
Mathieu Chartier90443472015-07-16 20:32:27 -07001280 SHARED_REQUIRES(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001281#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1282 (defined(__x86_64__) && !defined(__APPLE__))
Fred Shih37f05ef2014-07-16 18:38:08 -07001283 constexpr size_t num_values = 5;
1284 uint8_t values[num_values] = { 0, 1, 2, 128, 0xFF };
1285
1286 for (size_t i = 0; i < num_values; ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001287 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001288 static_cast<size_t>(values[i]),
1289 0U,
1290 StubTest::GetEntrypoint(self, kQuickSet8Static),
1291 self,
1292 referrer);
1293
Mathieu Chartierc7853442015-03-27 14:35:38 -07001294 size_t res = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001295 0U, 0U,
1296 StubTest::GetEntrypoint(self, kQuickGetBooleanStatic),
1297 self,
1298 referrer);
1299 // Boolean currently stores bools as uint8_t, be more zealous about asserting correct writes/gets.
1300 EXPECT_EQ(values[i], static_cast<uint8_t>(res)) << "Iteration " << i;
1301 }
1302#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001303 UNUSED(f, self, referrer, test);
Fred Shih37f05ef2014-07-16 18:38:08 -07001304 LOG(INFO) << "Skipping set_boolean_static as I don't know how to do that on " << kRuntimeISA;
1305 // Force-print to std::cout so it's also outside the logcat.
1306 std::cout << "Skipping set_boolean_static as I don't know how to do that on " << kRuntimeISA << std::endl;
1307#endif
1308}
Mathieu Chartiere401d142015-04-22 13:56:20 -07001309static void GetSetByteStatic(ArtField* f, Thread* self, ArtMethod* referrer,
Mathieu Chartierc7853442015-03-27 14:35:38 -07001310 StubTest* test)
Mathieu Chartier90443472015-07-16 20:32:27 -07001311 SHARED_REQUIRES(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001312#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1313 (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001314 int8_t values[] = { -128, -64, 0, 64, 127 };
Fred Shih37f05ef2014-07-16 18:38:08 -07001315
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001316 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001317 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001318 static_cast<size_t>(values[i]),
1319 0U,
1320 StubTest::GetEntrypoint(self, kQuickSet8Static),
1321 self,
1322 referrer);
1323
Mathieu Chartierc7853442015-03-27 14:35:38 -07001324 size_t res = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001325 0U, 0U,
1326 StubTest::GetEntrypoint(self, kQuickGetByteStatic),
1327 self,
1328 referrer);
1329 EXPECT_EQ(values[i], static_cast<int8_t>(res)) << "Iteration " << i;
1330 }
1331#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001332 UNUSED(f, self, referrer, test);
Fred Shih37f05ef2014-07-16 18:38:08 -07001333 LOG(INFO) << "Skipping set_byte_static as I don't know how to do that on " << kRuntimeISA;
1334 // Force-print to std::cout so it's also outside the logcat.
1335 std::cout << "Skipping set_byte_static as I don't know how to do that on " << kRuntimeISA << std::endl;
1336#endif
1337}
1338
1339
Mathieu Chartierc7853442015-03-27 14:35:38 -07001340static void GetSetBooleanInstance(Handle<mirror::Object>* obj, ArtField* f, Thread* self,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001341 ArtMethod* referrer, StubTest* test)
Mathieu Chartier90443472015-07-16 20:32:27 -07001342 SHARED_REQUIRES(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001343#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1344 (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001345 uint8_t values[] = { 0, true, 2, 128, 0xFF };
Fred Shih37f05ef2014-07-16 18:38:08 -07001346
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001347 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001348 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001349 reinterpret_cast<size_t>(obj->Get()),
1350 static_cast<size_t>(values[i]),
1351 StubTest::GetEntrypoint(self, kQuickSet8Instance),
1352 self,
1353 referrer);
1354
Mathieu Chartierc7853442015-03-27 14:35:38 -07001355 uint8_t res = f->GetBoolean(obj->Get());
Fred Shih37f05ef2014-07-16 18:38:08 -07001356 EXPECT_EQ(values[i], res) << "Iteration " << i;
1357
Mathieu Chartierc7853442015-03-27 14:35:38 -07001358 f->SetBoolean<false>(obj->Get(), res);
Fred Shih37f05ef2014-07-16 18:38:08 -07001359
Mathieu Chartierc7853442015-03-27 14:35:38 -07001360 size_t res2 = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001361 reinterpret_cast<size_t>(obj->Get()),
1362 0U,
1363 StubTest::GetEntrypoint(self, kQuickGetBooleanInstance),
1364 self,
1365 referrer);
1366 EXPECT_EQ(res, static_cast<uint8_t>(res2));
1367 }
1368#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001369 UNUSED(obj, f, self, referrer, test);
Fred Shih37f05ef2014-07-16 18:38:08 -07001370 LOG(INFO) << "Skipping set_boolean_instance as I don't know how to do that on " << kRuntimeISA;
1371 // Force-print to std::cout so it's also outside the logcat.
1372 std::cout << "Skipping set_boolean_instance as I don't know how to do that on " << kRuntimeISA << std::endl;
1373#endif
1374}
Mathieu Chartierc7853442015-03-27 14:35:38 -07001375static void GetSetByteInstance(Handle<mirror::Object>* obj, ArtField* f,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001376 Thread* self, ArtMethod* referrer, StubTest* test)
Mathieu Chartier90443472015-07-16 20:32:27 -07001377 SHARED_REQUIRES(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001378#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1379 (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001380 int8_t values[] = { -128, -64, 0, 64, 127 };
Fred Shih37f05ef2014-07-16 18:38:08 -07001381
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001382 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001383 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001384 reinterpret_cast<size_t>(obj->Get()),
1385 static_cast<size_t>(values[i]),
1386 StubTest::GetEntrypoint(self, kQuickSet8Instance),
1387 self,
1388 referrer);
1389
Mathieu Chartierc7853442015-03-27 14:35:38 -07001390 int8_t res = f->GetByte(obj->Get());
Fred Shih37f05ef2014-07-16 18:38:08 -07001391 EXPECT_EQ(res, values[i]) << "Iteration " << i;
Mathieu Chartierc7853442015-03-27 14:35:38 -07001392 f->SetByte<false>(obj->Get(), ++res);
Fred Shih37f05ef2014-07-16 18:38:08 -07001393
Mathieu Chartierc7853442015-03-27 14:35:38 -07001394 size_t res2 = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001395 reinterpret_cast<size_t>(obj->Get()),
1396 0U,
1397 StubTest::GetEntrypoint(self, kQuickGetByteInstance),
1398 self,
1399 referrer);
1400 EXPECT_EQ(res, static_cast<int8_t>(res2));
1401 }
1402#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001403 UNUSED(obj, f, self, referrer, test);
Fred Shih37f05ef2014-07-16 18:38:08 -07001404 LOG(INFO) << "Skipping set_byte_instance as I don't know how to do that on " << kRuntimeISA;
1405 // Force-print to std::cout so it's also outside the logcat.
1406 std::cout << "Skipping set_byte_instance as I don't know how to do that on " << kRuntimeISA << std::endl;
1407#endif
1408}
1409
Mathieu Chartiere401d142015-04-22 13:56:20 -07001410static void GetSetCharStatic(ArtField* f, Thread* self, ArtMethod* referrer,
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001411 StubTest* test)
Mathieu Chartier90443472015-07-16 20:32:27 -07001412 SHARED_REQUIRES(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001413#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1414 (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001415 uint16_t values[] = { 0, 1, 2, 255, 32768, 0xFFFF };
Fred Shih37f05ef2014-07-16 18:38:08 -07001416
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001417 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001418 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001419 static_cast<size_t>(values[i]),
1420 0U,
1421 StubTest::GetEntrypoint(self, kQuickSet16Static),
1422 self,
1423 referrer);
1424
Mathieu Chartierc7853442015-03-27 14:35:38 -07001425 size_t res = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001426 0U, 0U,
1427 StubTest::GetEntrypoint(self, kQuickGetCharStatic),
1428 self,
1429 referrer);
1430
1431 EXPECT_EQ(values[i], static_cast<uint16_t>(res)) << "Iteration " << i;
1432 }
1433#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001434 UNUSED(f, self, referrer, test);
Fred Shih37f05ef2014-07-16 18:38:08 -07001435 LOG(INFO) << "Skipping set_char_static as I don't know how to do that on " << kRuntimeISA;
1436 // Force-print to std::cout so it's also outside the logcat.
1437 std::cout << "Skipping set_char_static as I don't know how to do that on " << kRuntimeISA << std::endl;
1438#endif
1439}
Mathieu Chartierc7853442015-03-27 14:35:38 -07001440static void GetSetShortStatic(ArtField* f, Thread* self,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001441 ArtMethod* referrer, StubTest* test)
Mathieu Chartier90443472015-07-16 20:32:27 -07001442 SHARED_REQUIRES(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001443#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1444 (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001445 int16_t values[] = { -0x7FFF, -32768, 0, 255, 32767, 0x7FFE };
Fred Shih37f05ef2014-07-16 18:38:08 -07001446
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001447 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001448 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001449 static_cast<size_t>(values[i]),
1450 0U,
1451 StubTest::GetEntrypoint(self, kQuickSet16Static),
1452 self,
1453 referrer);
1454
Mathieu Chartierc7853442015-03-27 14:35:38 -07001455 size_t res = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001456 0U, 0U,
1457 StubTest::GetEntrypoint(self, kQuickGetShortStatic),
1458 self,
1459 referrer);
1460
1461 EXPECT_EQ(static_cast<int16_t>(res), values[i]) << "Iteration " << i;
1462 }
1463#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001464 UNUSED(f, self, referrer, test);
Fred Shih37f05ef2014-07-16 18:38:08 -07001465 LOG(INFO) << "Skipping set_short_static as I don't know how to do that on " << kRuntimeISA;
1466 // Force-print to std::cout so it's also outside the logcat.
1467 std::cout << "Skipping set_short_static as I don't know how to do that on " << kRuntimeISA << std::endl;
1468#endif
1469}
1470
Mathieu Chartierc7853442015-03-27 14:35:38 -07001471static void GetSetCharInstance(Handle<mirror::Object>* obj, ArtField* f,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001472 Thread* self, ArtMethod* referrer, StubTest* test)
Mathieu Chartier90443472015-07-16 20:32:27 -07001473 SHARED_REQUIRES(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001474#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1475 (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001476 uint16_t values[] = { 0, 1, 2, 255, 32768, 0xFFFF };
Fred Shih37f05ef2014-07-16 18:38:08 -07001477
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001478 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001479 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001480 reinterpret_cast<size_t>(obj->Get()),
1481 static_cast<size_t>(values[i]),
1482 StubTest::GetEntrypoint(self, kQuickSet16Instance),
1483 self,
1484 referrer);
1485
Mathieu Chartierc7853442015-03-27 14:35:38 -07001486 uint16_t res = f->GetChar(obj->Get());
Fred Shih37f05ef2014-07-16 18:38:08 -07001487 EXPECT_EQ(res, values[i]) << "Iteration " << i;
Mathieu Chartierc7853442015-03-27 14:35:38 -07001488 f->SetChar<false>(obj->Get(), ++res);
Fred Shih37f05ef2014-07-16 18:38:08 -07001489
Mathieu Chartierc7853442015-03-27 14:35:38 -07001490 size_t res2 = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001491 reinterpret_cast<size_t>(obj->Get()),
1492 0U,
1493 StubTest::GetEntrypoint(self, kQuickGetCharInstance),
1494 self,
1495 referrer);
1496 EXPECT_EQ(res, static_cast<uint16_t>(res2));
1497 }
1498#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001499 UNUSED(obj, f, self, referrer, test);
Fred Shih37f05ef2014-07-16 18:38:08 -07001500 LOG(INFO) << "Skipping set_char_instance as I don't know how to do that on " << kRuntimeISA;
1501 // Force-print to std::cout so it's also outside the logcat.
1502 std::cout << "Skipping set_char_instance as I don't know how to do that on " << kRuntimeISA << std::endl;
1503#endif
1504}
Mathieu Chartierc7853442015-03-27 14:35:38 -07001505static void GetSetShortInstance(Handle<mirror::Object>* obj, ArtField* f,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001506 Thread* self, ArtMethod* referrer, StubTest* test)
Mathieu Chartier90443472015-07-16 20:32:27 -07001507 SHARED_REQUIRES(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001508#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1509 (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001510 int16_t values[] = { -0x7FFF, -32768, 0, 255, 32767, 0x7FFE };
Fred Shih37f05ef2014-07-16 18:38:08 -07001511
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001512 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001513 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001514 reinterpret_cast<size_t>(obj->Get()),
1515 static_cast<size_t>(values[i]),
1516 StubTest::GetEntrypoint(self, kQuickSet16Instance),
1517 self,
1518 referrer);
1519
Mathieu Chartierc7853442015-03-27 14:35:38 -07001520 int16_t res = f->GetShort(obj->Get());
Fred Shih37f05ef2014-07-16 18:38:08 -07001521 EXPECT_EQ(res, values[i]) << "Iteration " << i;
Mathieu Chartierc7853442015-03-27 14:35:38 -07001522 f->SetShort<false>(obj->Get(), ++res);
Fred Shih37f05ef2014-07-16 18:38:08 -07001523
Mathieu Chartierc7853442015-03-27 14:35:38 -07001524 size_t res2 = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001525 reinterpret_cast<size_t>(obj->Get()),
1526 0U,
1527 StubTest::GetEntrypoint(self, kQuickGetShortInstance),
1528 self,
1529 referrer);
1530 EXPECT_EQ(res, static_cast<int16_t>(res2));
1531 }
1532#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001533 UNUSED(obj, f, self, referrer, test);
Fred Shih37f05ef2014-07-16 18:38:08 -07001534 LOG(INFO) << "Skipping set_short_instance as I don't know how to do that on " << kRuntimeISA;
1535 // Force-print to std::cout so it's also outside the logcat.
1536 std::cout << "Skipping set_short_instance as I don't know how to do that on " << kRuntimeISA << std::endl;
1537#endif
1538}
1539
Mathieu Chartiere401d142015-04-22 13:56:20 -07001540static void GetSet32Static(ArtField* f, Thread* self, ArtMethod* referrer,
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001541 StubTest* test)
Mathieu Chartier90443472015-07-16 20:32:27 -07001542 SHARED_REQUIRES(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001543#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1544 (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001545 uint32_t values[] = { 0, 1, 2, 255, 32768, 1000000, 0xFFFFFFFF };
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001546
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001547 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001548 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001549 static_cast<size_t>(values[i]),
1550 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001551 StubTest::GetEntrypoint(self, kQuickSet32Static),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001552 self,
1553 referrer);
1554
Mathieu Chartierc7853442015-03-27 14:35:38 -07001555 size_t res = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001556 0U, 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001557 StubTest::GetEntrypoint(self, kQuickGet32Static),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001558 self,
1559 referrer);
1560
Goran Jakovljevic04568812015-04-23 15:27:23 +02001561#if defined(__mips__) && defined(__LP64__)
1562 EXPECT_EQ(static_cast<uint32_t>(res), values[i]) << "Iteration " << i;
1563#else
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001564 EXPECT_EQ(res, values[i]) << "Iteration " << i;
Goran Jakovljevic04568812015-04-23 15:27:23 +02001565#endif
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001566 }
1567#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001568 UNUSED(f, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001569 LOG(INFO) << "Skipping set32static as I don't know how to do that on " << kRuntimeISA;
1570 // Force-print to std::cout so it's also outside the logcat.
1571 std::cout << "Skipping set32static as I don't know how to do that on " << kRuntimeISA << std::endl;
1572#endif
1573}
1574
1575
Mathieu Chartierc7853442015-03-27 14:35:38 -07001576static void GetSet32Instance(Handle<mirror::Object>* obj, ArtField* f,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001577 Thread* self, ArtMethod* referrer, StubTest* test)
Mathieu Chartier90443472015-07-16 20:32:27 -07001578 SHARED_REQUIRES(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001579#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1580 (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001581 uint32_t values[] = { 0, 1, 2, 255, 32768, 1000000, 0xFFFFFFFF };
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001582
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001583 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001584 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001585 reinterpret_cast<size_t>(obj->Get()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001586 static_cast<size_t>(values[i]),
Andreas Gampe29b38412014-08-13 00:15:43 -07001587 StubTest::GetEntrypoint(self, kQuickSet32Instance),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001588 self,
1589 referrer);
1590
Mathieu Chartierc7853442015-03-27 14:35:38 -07001591 int32_t res = f->GetInt(obj->Get());
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001592 EXPECT_EQ(res, static_cast<int32_t>(values[i])) << "Iteration " << i;
1593
1594 res++;
Mathieu Chartierc7853442015-03-27 14:35:38 -07001595 f->SetInt<false>(obj->Get(), res);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001596
Mathieu Chartierc7853442015-03-27 14:35:38 -07001597 size_t res2 = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001598 reinterpret_cast<size_t>(obj->Get()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001599 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001600 StubTest::GetEntrypoint(self, kQuickGet32Instance),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001601 self,
1602 referrer);
1603 EXPECT_EQ(res, static_cast<int32_t>(res2));
1604 }
1605#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001606 UNUSED(obj, f, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001607 LOG(INFO) << "Skipping set32instance as I don't know how to do that on " << kRuntimeISA;
1608 // Force-print to std::cout so it's also outside the logcat.
1609 std::cout << "Skipping set32instance as I don't know how to do that on " << kRuntimeISA << std::endl;
1610#endif
1611}
1612
1613
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001614#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1615 (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001616
1617static void set_and_check_static(uint32_t f_idx, mirror::Object* val, Thread* self,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001618 ArtMethod* referrer, StubTest* test)
Mathieu Chartier90443472015-07-16 20:32:27 -07001619 SHARED_REQUIRES(Locks::mutator_lock_) {
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001620 test->Invoke3WithReferrer(static_cast<size_t>(f_idx),
1621 reinterpret_cast<size_t>(val),
1622 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001623 StubTest::GetEntrypoint(self, kQuickSetObjStatic),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001624 self,
1625 referrer);
1626
1627 size_t res = test->Invoke3WithReferrer(static_cast<size_t>(f_idx),
1628 0U, 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001629 StubTest::GetEntrypoint(self, kQuickGetObjStatic),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001630 self,
1631 referrer);
1632
1633 EXPECT_EQ(res, reinterpret_cast<size_t>(val)) << "Value " << val;
1634}
1635#endif
1636
Mathieu Chartiere401d142015-04-22 13:56:20 -07001637static void GetSetObjStatic(ArtField* f, Thread* self, ArtMethod* referrer,
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001638 StubTest* test)
Mathieu Chartier90443472015-07-16 20:32:27 -07001639 SHARED_REQUIRES(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001640#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1641 (defined(__x86_64__) && !defined(__APPLE__))
Mathieu Chartierc7853442015-03-27 14:35:38 -07001642 set_and_check_static(f->GetDexFieldIndex(), nullptr, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001643
1644 // Allocate a string object for simplicity.
1645 mirror::String* str = mirror::String::AllocFromModifiedUtf8(self, "Test");
Mathieu Chartierc7853442015-03-27 14:35:38 -07001646 set_and_check_static(f->GetDexFieldIndex(), str, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001647
Mathieu Chartierc7853442015-03-27 14:35:38 -07001648 set_and_check_static(f->GetDexFieldIndex(), nullptr, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001649#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001650 UNUSED(f, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001651 LOG(INFO) << "Skipping setObjstatic as I don't know how to do that on " << kRuntimeISA;
1652 // Force-print to std::cout so it's also outside the logcat.
1653 std::cout << "Skipping setObjstatic as I don't know how to do that on " << kRuntimeISA << std::endl;
1654#endif
1655}
1656
1657
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001658#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1659 (defined(__x86_64__) && !defined(__APPLE__))
Mathieu Chartierc7853442015-03-27 14:35:38 -07001660static void set_and_check_instance(ArtField* f, mirror::Object* trg,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001661 mirror::Object* val, Thread* self, ArtMethod* referrer,
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001662 StubTest* test)
Mathieu Chartier90443472015-07-16 20:32:27 -07001663 SHARED_REQUIRES(Locks::mutator_lock_) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001664 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001665 reinterpret_cast<size_t>(trg),
1666 reinterpret_cast<size_t>(val),
Andreas Gampe29b38412014-08-13 00:15:43 -07001667 StubTest::GetEntrypoint(self, kQuickSetObjInstance),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001668 self,
1669 referrer);
1670
Mathieu Chartierc7853442015-03-27 14:35:38 -07001671 size_t res = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001672 reinterpret_cast<size_t>(trg),
1673 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001674 StubTest::GetEntrypoint(self, kQuickGetObjInstance),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001675 self,
1676 referrer);
1677
1678 EXPECT_EQ(res, reinterpret_cast<size_t>(val)) << "Value " << val;
1679
Mathieu Chartierc7853442015-03-27 14:35:38 -07001680 EXPECT_EQ(val, f->GetObj(trg));
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001681}
1682#endif
1683
Mathieu Chartierc7853442015-03-27 14:35:38 -07001684static void GetSetObjInstance(Handle<mirror::Object>* obj, ArtField* f,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001685 Thread* self, ArtMethod* referrer, StubTest* test)
Mathieu Chartier90443472015-07-16 20:32:27 -07001686 SHARED_REQUIRES(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001687#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1688 (defined(__x86_64__) && !defined(__APPLE__))
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001689 set_and_check_instance(f, obj->Get(), nullptr, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001690
1691 // Allocate a string object for simplicity.
1692 mirror::String* str = mirror::String::AllocFromModifiedUtf8(self, "Test");
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001693 set_and_check_instance(f, obj->Get(), str, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001694
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001695 set_and_check_instance(f, obj->Get(), nullptr, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001696#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001697 UNUSED(obj, f, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001698 LOG(INFO) << "Skipping setObjinstance as I don't know how to do that on " << kRuntimeISA;
1699 // Force-print to std::cout so it's also outside the logcat.
1700 std::cout << "Skipping setObjinstance as I don't know how to do that on " << kRuntimeISA << std::endl;
1701#endif
1702}
1703
1704
Calin Juravle872ab3f2015-10-02 07:27:51 +01001705// TODO: Complete these tests for 32b architectures
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001706
Mathieu Chartiere401d142015-04-22 13:56:20 -07001707static void GetSet64Static(ArtField* f, Thread* self, ArtMethod* referrer,
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001708 StubTest* test)
Mathieu Chartier90443472015-07-16 20:32:27 -07001709 SHARED_REQUIRES(Locks::mutator_lock_) {
Calin Juravle6e399ac2015-10-02 23:56:06 +01001710#if (defined(__x86_64__) && !defined(__APPLE__)) || (defined(__mips__) && defined(__LP64__)) \
1711 || defined(__aarch64__)
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001712 uint64_t values[] = { 0, 1, 2, 255, 32768, 1000000, 0xFFFFFFFF, 0xFFFFFFFFFFFF };
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001713
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001714 for (size_t i = 0; i < arraysize(values); ++i) {
Calin Juravle6e399ac2015-10-02 23:56:06 +01001715 // 64 bit FieldSet stores the set value in the second register.
1716 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Calin Juravle24cc1b32015-10-06 11:46:58 +01001717 0U,
1718 values[i],
1719 StubTest::GetEntrypoint(self, kQuickSet64Static),
1720 self,
1721 referrer);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001722
Mathieu Chartierc7853442015-03-27 14:35:38 -07001723 size_t res = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001724 0U, 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001725 StubTest::GetEntrypoint(self, kQuickGet64Static),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001726 self,
1727 referrer);
1728
1729 EXPECT_EQ(res, values[i]) << "Iteration " << i;
1730 }
1731#else
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001732 UNUSED(f, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001733 LOG(INFO) << "Skipping set64static as I don't know how to do that on " << kRuntimeISA;
1734 // Force-print to std::cout so it's also outside the logcat.
1735 std::cout << "Skipping set64static as I don't know how to do that on " << kRuntimeISA << std::endl;
1736#endif
1737}
1738
1739
Mathieu Chartierc7853442015-03-27 14:35:38 -07001740static void GetSet64Instance(Handle<mirror::Object>* obj, ArtField* f,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001741 Thread* self, ArtMethod* referrer, StubTest* test)
Mathieu Chartier90443472015-07-16 20:32:27 -07001742 SHARED_REQUIRES(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001743#if (defined(__x86_64__) && !defined(__APPLE__)) || (defined(__mips__) && defined(__LP64__)) || \
1744 defined(__aarch64__)
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001745 uint64_t values[] = { 0, 1, 2, 255, 32768, 1000000, 0xFFFFFFFF, 0xFFFFFFFFFFFF };
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001746
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001747 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001748 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001749 reinterpret_cast<size_t>(obj->Get()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001750 static_cast<size_t>(values[i]),
Andreas Gampe29b38412014-08-13 00:15:43 -07001751 StubTest::GetEntrypoint(self, kQuickSet64Instance),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001752 self,
1753 referrer);
1754
Mathieu Chartierc7853442015-03-27 14:35:38 -07001755 int64_t res = f->GetLong(obj->Get());
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001756 EXPECT_EQ(res, static_cast<int64_t>(values[i])) << "Iteration " << i;
1757
1758 res++;
Mathieu Chartierc7853442015-03-27 14:35:38 -07001759 f->SetLong<false>(obj->Get(), res);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001760
Mathieu Chartierc7853442015-03-27 14:35:38 -07001761 size_t res2 = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001762 reinterpret_cast<size_t>(obj->Get()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001763 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001764 StubTest::GetEntrypoint(self, kQuickGet64Instance),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001765 self,
1766 referrer);
1767 EXPECT_EQ(res, static_cast<int64_t>(res2));
1768 }
1769#else
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001770 UNUSED(obj, f, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001771 LOG(INFO) << "Skipping set64instance as I don't know how to do that on " << kRuntimeISA;
1772 // Force-print to std::cout so it's also outside the logcat.
1773 std::cout << "Skipping set64instance as I don't know how to do that on " << kRuntimeISA << std::endl;
1774#endif
1775}
1776
1777static void TestFields(Thread* self, StubTest* test, Primitive::Type test_type) {
1778 // garbage is created during ClassLinker::Init
1779
1780 JNIEnv* env = Thread::Current()->GetJniEnv();
1781 jclass jc = env->FindClass("AllFields");
Mathieu Chartier2cebb242015-04-21 16:50:40 -07001782 CHECK(jc != nullptr);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001783 jobject o = env->AllocObject(jc);
Mathieu Chartier2cebb242015-04-21 16:50:40 -07001784 CHECK(o != nullptr);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001785
1786 ScopedObjectAccess soa(self);
Mathieu Chartiere401d142015-04-22 13:56:20 -07001787 StackHandleScope<3> hs(self);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001788 Handle<mirror::Object> obj(hs.NewHandle(soa.Decode<mirror::Object*>(o)));
1789 Handle<mirror::Class> c(hs.NewHandle(obj->GetClass()));
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001790 // Need a method as a referrer
Mathieu Chartiere401d142015-04-22 13:56:20 -07001791 ArtMethod* m = c->GetDirectMethod(0, sizeof(void*));
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001792
1793 // Play with it...
1794
1795 // Static fields.
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001796 for (ArtField& f : c->GetSFields()) {
1797 Primitive::Type type = f.GetTypeAsPrimitiveType();
Mathieu Chartierc7853442015-03-27 14:35:38 -07001798 if (test_type != type) {
1799 continue;
1800 }
1801 switch (type) {
1802 case Primitive::Type::kPrimBoolean:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001803 GetSetBooleanStatic(&f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001804 break;
1805 case Primitive::Type::kPrimByte:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001806 GetSetByteStatic(&f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001807 break;
1808 case Primitive::Type::kPrimChar:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001809 GetSetCharStatic(&f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001810 break;
1811 case Primitive::Type::kPrimShort:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001812 GetSetShortStatic(&f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001813 break;
1814 case Primitive::Type::kPrimInt:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001815 GetSet32Static(&f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001816 break;
1817 case Primitive::Type::kPrimLong:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001818 GetSet64Static(&f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001819 break;
1820 case Primitive::Type::kPrimNot:
1821 // Don't try array.
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001822 if (f.GetTypeDescriptor()[0] != '[') {
1823 GetSetObjStatic(&f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001824 }
1825 break;
1826 default:
1827 break; // Skip.
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001828 }
1829 }
1830
1831 // Instance fields.
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001832 for (ArtField& f : c->GetIFields()) {
1833 Primitive::Type type = f.GetTypeAsPrimitiveType();
Mathieu Chartierc7853442015-03-27 14:35:38 -07001834 if (test_type != type) {
1835 continue;
1836 }
1837 switch (type) {
1838 case Primitive::Type::kPrimBoolean:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001839 GetSetBooleanInstance(&obj, &f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001840 break;
1841 case Primitive::Type::kPrimByte:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001842 GetSetByteInstance(&obj, &f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001843 break;
1844 case Primitive::Type::kPrimChar:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001845 GetSetCharInstance(&obj, &f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001846 break;
1847 case Primitive::Type::kPrimShort:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001848 GetSetShortInstance(&obj, &f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001849 break;
1850 case Primitive::Type::kPrimInt:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001851 GetSet32Instance(&obj, &f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001852 break;
1853 case Primitive::Type::kPrimLong:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001854 GetSet64Instance(&obj, &f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001855 break;
1856 case Primitive::Type::kPrimNot:
1857 // Don't try array.
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001858 if (f.GetTypeDescriptor()[0] != '[') {
1859 GetSetObjInstance(&obj, &f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001860 }
1861 break;
1862 default:
1863 break; // Skip.
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001864 }
1865 }
1866
1867 // TODO: Deallocate things.
1868}
1869
Fred Shih37f05ef2014-07-16 18:38:08 -07001870TEST_F(StubTest, Fields8) {
Fred Shih37f05ef2014-07-16 18:38:08 -07001871 Thread* self = Thread::Current();
1872
1873 self->TransitionFromSuspendedToRunnable();
1874 LoadDex("AllFields");
1875 bool started = runtime_->Start();
1876 CHECK(started);
1877
1878 TestFields(self, this, Primitive::Type::kPrimBoolean);
1879 TestFields(self, this, Primitive::Type::kPrimByte);
1880}
1881
1882TEST_F(StubTest, Fields16) {
Fred Shih37f05ef2014-07-16 18:38:08 -07001883 Thread* self = Thread::Current();
1884
1885 self->TransitionFromSuspendedToRunnable();
1886 LoadDex("AllFields");
1887 bool started = runtime_->Start();
1888 CHECK(started);
1889
1890 TestFields(self, this, Primitive::Type::kPrimChar);
1891 TestFields(self, this, Primitive::Type::kPrimShort);
1892}
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001893
1894TEST_F(StubTest, Fields32) {
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001895 Thread* self = Thread::Current();
1896
1897 self->TransitionFromSuspendedToRunnable();
1898 LoadDex("AllFields");
1899 bool started = runtime_->Start();
1900 CHECK(started);
1901
1902 TestFields(self, this, Primitive::Type::kPrimInt);
1903}
1904
1905TEST_F(StubTest, FieldsObj) {
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001906 Thread* self = Thread::Current();
1907
1908 self->TransitionFromSuspendedToRunnable();
1909 LoadDex("AllFields");
1910 bool started = runtime_->Start();
1911 CHECK(started);
1912
1913 TestFields(self, this, Primitive::Type::kPrimNot);
1914}
1915
1916TEST_F(StubTest, Fields64) {
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001917 Thread* self = Thread::Current();
1918
1919 self->TransitionFromSuspendedToRunnable();
1920 LoadDex("AllFields");
1921 bool started = runtime_->Start();
1922 CHECK(started);
1923
1924 TestFields(self, this, Primitive::Type::kPrimLong);
1925}
1926
Andreas Gampe51f76352014-05-21 08:28:48 -07001927TEST_F(StubTest, IMT) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001928#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1929 (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe51f76352014-05-21 08:28:48 -07001930 Thread* self = Thread::Current();
1931
1932 ScopedObjectAccess soa(self);
1933 StackHandleScope<7> hs(self);
1934
1935 JNIEnv* env = Thread::Current()->GetJniEnv();
1936
1937 // ArrayList
1938
1939 // Load ArrayList and used methods (JNI).
1940 jclass arraylist_jclass = env->FindClass("java/util/ArrayList");
1941 ASSERT_NE(nullptr, arraylist_jclass);
1942 jmethodID arraylist_constructor = env->GetMethodID(arraylist_jclass, "<init>", "()V");
1943 ASSERT_NE(nullptr, arraylist_constructor);
Mathieu Chartiere401d142015-04-22 13:56:20 -07001944 jmethodID contains_jmethod = env->GetMethodID(
1945 arraylist_jclass, "contains", "(Ljava/lang/Object;)Z");
Andreas Gampe51f76352014-05-21 08:28:48 -07001946 ASSERT_NE(nullptr, contains_jmethod);
1947 jmethodID add_jmethod = env->GetMethodID(arraylist_jclass, "add", "(Ljava/lang/Object;)Z");
1948 ASSERT_NE(nullptr, add_jmethod);
1949
Mathieu Chartiere401d142015-04-22 13:56:20 -07001950 // Get representation.
1951 ArtMethod* contains_amethod = soa.DecodeMethod(contains_jmethod);
Andreas Gampe51f76352014-05-21 08:28:48 -07001952
1953 // Patch up ArrayList.contains.
Mathieu Chartiere401d142015-04-22 13:56:20 -07001954 if (contains_amethod->GetEntryPointFromQuickCompiledCode() == nullptr) {
1955 contains_amethod->SetEntryPointFromQuickCompiledCode(reinterpret_cast<void*>(
Andreas Gampe29b38412014-08-13 00:15:43 -07001956 StubTest::GetEntrypoint(self, kQuickQuickToInterpreterBridge)));
Andreas Gampe51f76352014-05-21 08:28:48 -07001957 }
1958
1959 // List
1960
1961 // Load List and used methods (JNI).
1962 jclass list_jclass = env->FindClass("java/util/List");
1963 ASSERT_NE(nullptr, list_jclass);
Mathieu Chartiere401d142015-04-22 13:56:20 -07001964 jmethodID inf_contains_jmethod = env->GetMethodID(
1965 list_jclass, "contains", "(Ljava/lang/Object;)Z");
Andreas Gampe51f76352014-05-21 08:28:48 -07001966 ASSERT_NE(nullptr, inf_contains_jmethod);
1967
1968 // Get mirror representation.
Mathieu Chartiere401d142015-04-22 13:56:20 -07001969 ArtMethod* inf_contains = soa.DecodeMethod(inf_contains_jmethod);
Andreas Gampe51f76352014-05-21 08:28:48 -07001970
1971 // Object
1972
1973 jclass obj_jclass = env->FindClass("java/lang/Object");
1974 ASSERT_NE(nullptr, obj_jclass);
1975 jmethodID obj_constructor = env->GetMethodID(obj_jclass, "<init>", "()V");
1976 ASSERT_NE(nullptr, obj_constructor);
1977
Andreas Gampe51f76352014-05-21 08:28:48 -07001978 // Create instances.
1979
1980 jobject jarray_list = env->NewObject(arraylist_jclass, arraylist_constructor);
1981 ASSERT_NE(nullptr, jarray_list);
1982 Handle<mirror::Object> array_list(hs.NewHandle(soa.Decode<mirror::Object*>(jarray_list)));
1983
1984 jobject jobj = env->NewObject(obj_jclass, obj_constructor);
1985 ASSERT_NE(nullptr, jobj);
1986 Handle<mirror::Object> obj(hs.NewHandle(soa.Decode<mirror::Object*>(jobj)));
1987
Andreas Gampe1a7e2922014-05-21 15:37:53 -07001988 // Invocation tests.
1989
1990 // 1. imt_conflict
1991
1992 // Contains.
Andreas Gampe51f76352014-05-21 08:28:48 -07001993
1994 size_t result =
1995 Invoke3WithReferrerAndHidden(0U, reinterpret_cast<size_t>(array_list.Get()),
1996 reinterpret_cast<size_t>(obj.Get()),
Andreas Gampe29b38412014-08-13 00:15:43 -07001997 StubTest::GetEntrypoint(self, kQuickQuickImtConflictTrampoline),
Mathieu Chartiere401d142015-04-22 13:56:20 -07001998 self, contains_amethod,
1999 static_cast<size_t>(inf_contains->GetDexMethodIndex()));
Andreas Gampe51f76352014-05-21 08:28:48 -07002000
2001 ASSERT_FALSE(self->IsExceptionPending());
2002 EXPECT_EQ(static_cast<size_t>(JNI_FALSE), result);
2003
2004 // Add object.
2005
2006 env->CallBooleanMethod(jarray_list, add_jmethod, jobj);
2007
Nicolas Geoffray14691c52015-03-05 10:40:17 +00002008 ASSERT_FALSE(self->IsExceptionPending()) << PrettyTypeOf(self->GetException());
Andreas Gampe51f76352014-05-21 08:28:48 -07002009
Andreas Gampe1a7e2922014-05-21 15:37:53 -07002010 // Contains.
Andreas Gampe51f76352014-05-21 08:28:48 -07002011
Mathieu Chartiere401d142015-04-22 13:56:20 -07002012 result = Invoke3WithReferrerAndHidden(
2013 0U, reinterpret_cast<size_t>(array_list.Get()), reinterpret_cast<size_t>(obj.Get()),
2014 StubTest::GetEntrypoint(self, kQuickQuickImtConflictTrampoline), self, contains_amethod,
2015 static_cast<size_t>(inf_contains->GetDexMethodIndex()));
Andreas Gampe51f76352014-05-21 08:28:48 -07002016
2017 ASSERT_FALSE(self->IsExceptionPending());
2018 EXPECT_EQ(static_cast<size_t>(JNI_TRUE), result);
Andreas Gampe1a7e2922014-05-21 15:37:53 -07002019
2020 // 2. regular interface trampoline
2021
Mathieu Chartiere401d142015-04-22 13:56:20 -07002022 result = Invoke3WithReferrer(static_cast<size_t>(inf_contains->GetDexMethodIndex()),
Andreas Gampe1a7e2922014-05-21 15:37:53 -07002023 reinterpret_cast<size_t>(array_list.Get()),
2024 reinterpret_cast<size_t>(obj.Get()),
2025 StubTest::GetEntrypoint(self,
2026 kQuickInvokeInterfaceTrampolineWithAccessCheck),
Mathieu Chartiere401d142015-04-22 13:56:20 -07002027 self, contains_amethod);
Andreas Gampe1a7e2922014-05-21 15:37:53 -07002028
2029 ASSERT_FALSE(self->IsExceptionPending());
2030 EXPECT_EQ(static_cast<size_t>(JNI_TRUE), result);
2031
Mathieu Chartiere401d142015-04-22 13:56:20 -07002032 result = Invoke3WithReferrer(
2033 static_cast<size_t>(inf_contains->GetDexMethodIndex()),
2034 reinterpret_cast<size_t>(array_list.Get()), reinterpret_cast<size_t>(array_list.Get()),
2035 StubTest::GetEntrypoint(self, kQuickInvokeInterfaceTrampolineWithAccessCheck), self,
2036 contains_amethod);
Andreas Gampe1a7e2922014-05-21 15:37:53 -07002037
2038 ASSERT_FALSE(self->IsExceptionPending());
2039 EXPECT_EQ(static_cast<size_t>(JNI_FALSE), result);
Andreas Gampe51f76352014-05-21 08:28:48 -07002040#else
Andreas Gampe6aac3552014-06-09 14:55:53 -07002041 LOG(INFO) << "Skipping imt as I don't know how to do that on " << kRuntimeISA;
Andreas Gampe51f76352014-05-21 08:28:48 -07002042 // Force-print to std::cout so it's also outside the logcat.
Andreas Gampe6aac3552014-06-09 14:55:53 -07002043 std::cout << "Skipping imt as I don't know how to do that on " << kRuntimeISA << std::endl;
2044#endif
2045}
2046
Andreas Gampe6aac3552014-06-09 14:55:53 -07002047TEST_F(StubTest, StringIndexOf) {
Goran Jakovljevic801fcc42015-12-03 11:44:26 +01002048#if defined(__arm__) || defined(__aarch64__) || (defined(__mips__) && defined(__LP64__))
Andreas Gampe6aac3552014-06-09 14:55:53 -07002049 Thread* self = Thread::Current();
2050 ScopedObjectAccess soa(self);
2051 // garbage is created during ClassLinker::Init
2052
2053 // Create some strings
2054 // Use array so we can index into it and use a matrix for expected results
2055 // Setup: The first half is standard. The second half uses a non-zero offset.
2056 // TODO: Shared backing arrays.
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07002057 const char* c_str[] = { "", "a", "ba", "cba", "dcba", "edcba", "asdfghjkl" };
2058 static constexpr size_t kStringCount = arraysize(c_str);
2059 const char c_char[] = { 'a', 'b', 'c', 'd', 'e' };
2060 static constexpr size_t kCharCount = arraysize(c_char);
Andreas Gampe6aac3552014-06-09 14:55:53 -07002061
2062 StackHandleScope<kStringCount> hs(self);
2063 Handle<mirror::String> s[kStringCount];
2064
2065 for (size_t i = 0; i < kStringCount; ++i) {
2066 s[i] = hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), c_str[i]));
2067 }
2068
2069 // Matrix of expectations. First component is first parameter. Note we only check against the
2070 // sign, not the value. As we are testing random offsets, we need to compute this and need to
2071 // rely on String::CompareTo being correct.
2072 static constexpr size_t kMaxLen = 9;
2073 DCHECK_LE(strlen(c_str[kStringCount-1]), kMaxLen) << "Please fix the indexof test.";
2074
2075 // Last dimension: start, offset by 1.
2076 int32_t expected[kStringCount][kCharCount][kMaxLen + 3];
2077 for (size_t x = 0; x < kStringCount; ++x) {
2078 for (size_t y = 0; y < kCharCount; ++y) {
2079 for (size_t z = 0; z <= kMaxLen + 2; ++z) {
2080 expected[x][y][z] = s[x]->FastIndexOf(c_char[y], static_cast<int32_t>(z) - 1);
2081 }
2082 }
2083 }
2084
2085 // Play with it...
2086
2087 for (size_t x = 0; x < kStringCount; ++x) {
2088 for (size_t y = 0; y < kCharCount; ++y) {
2089 for (size_t z = 0; z <= kMaxLen + 2; ++z) {
2090 int32_t start = static_cast<int32_t>(z) - 1;
2091
2092 // Test string_compareto x y
2093 size_t result = Invoke3(reinterpret_cast<size_t>(s[x].Get()), c_char[y], start,
Andreas Gampe29b38412014-08-13 00:15:43 -07002094 StubTest::GetEntrypoint(self, kQuickIndexOf), self);
Andreas Gampe6aac3552014-06-09 14:55:53 -07002095
2096 EXPECT_FALSE(self->IsExceptionPending());
2097
2098 // The result is a 32b signed integer
2099 union {
2100 size_t r;
2101 int32_t i;
2102 } conv;
2103 conv.r = result;
2104
2105 EXPECT_EQ(expected[x][y][z], conv.i) << "Wrong result for " << c_str[x] << " / " <<
2106 c_char[y] << " @ " << start;
2107 }
2108 }
2109 }
2110
2111 // TODO: Deallocate things.
2112
2113 // Tests done.
2114#else
2115 LOG(INFO) << "Skipping indexof as I don't know how to do that on " << kRuntimeISA;
2116 // Force-print to std::cout so it's also outside the logcat.
2117 std::cout << "Skipping indexof as I don't know how to do that on " << kRuntimeISA << std::endl;
Andreas Gampe51f76352014-05-21 08:28:48 -07002118#endif
2119}
2120
Man Cao1aee9002015-07-14 22:31:42 -07002121TEST_F(StubTest, ReadBarrier) {
2122#if defined(ART_USE_READ_BARRIER) && (defined(__i386__) || defined(__arm__) || \
2123 defined(__aarch64__) || defined(__mips__) || (defined(__x86_64__) && !defined(__APPLE__)))
2124 Thread* self = Thread::Current();
2125
2126 const uintptr_t readBarrierSlow = StubTest::GetEntrypoint(self, kQuickReadBarrierSlow);
2127
2128 // Create an object
2129 ScopedObjectAccess soa(self);
2130 // garbage is created during ClassLinker::Init
2131
2132 StackHandleScope<2> hs(soa.Self());
2133 Handle<mirror::Class> c(
2134 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "Ljava/lang/Object;")));
2135
2136 // Build an object instance
2137 Handle<mirror::Object> obj(hs.NewHandle(c->AllocObject(soa.Self())));
2138
2139 EXPECT_FALSE(self->IsExceptionPending());
2140
2141 size_t result = Invoke3(0U, reinterpret_cast<size_t>(obj.Get()),
2142 mirror::Object::ClassOffset().SizeValue(), readBarrierSlow, self);
2143
2144 EXPECT_FALSE(self->IsExceptionPending());
2145 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
2146 mirror::Class* klass = reinterpret_cast<mirror::Class*>(result);
2147 EXPECT_EQ(klass, obj->GetClass());
2148
2149 // Tests done.
2150#else
2151 LOG(INFO) << "Skipping read_barrier_slow";
2152 // Force-print to std::cout so it's also outside the logcat.
2153 std::cout << "Skipping read_barrier_slow" << std::endl;
2154#endif
2155}
2156
Roland Levillain0d5a2812015-11-13 10:07:31 +00002157TEST_F(StubTest, ReadBarrierForRoot) {
2158#if defined(ART_USE_READ_BARRIER) && (defined(__i386__) || defined(__arm__) || \
2159 defined(__aarch64__) || defined(__mips__) || (defined(__x86_64__) && !defined(__APPLE__)))
2160 Thread* self = Thread::Current();
2161
2162 const uintptr_t readBarrierForRootSlow =
2163 StubTest::GetEntrypoint(self, kQuickReadBarrierForRootSlow);
2164
2165 // Create an object
2166 ScopedObjectAccess soa(self);
2167 // garbage is created during ClassLinker::Init
2168
2169 StackHandleScope<1> hs(soa.Self());
2170
2171 Handle<mirror::String> obj(
2172 hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), "hello, world!")));
2173
2174 EXPECT_FALSE(self->IsExceptionPending());
2175
2176 GcRoot<mirror::Class>& root = mirror::String::java_lang_String_;
2177 size_t result = Invoke3(reinterpret_cast<size_t>(&root), 0U, 0U, readBarrierForRootSlow, self);
2178
2179 EXPECT_FALSE(self->IsExceptionPending());
2180 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
2181 mirror::Class* klass = reinterpret_cast<mirror::Class*>(result);
2182 EXPECT_EQ(klass, obj->GetClass());
2183
2184 // Tests done.
2185#else
2186 LOG(INFO) << "Skipping read_barrier_for_root_slow";
2187 // Force-print to std::cout so it's also outside the logcat.
2188 std::cout << "Skipping read_barrier_for_root_slow" << std::endl;
2189#endif
2190}
2191
Andreas Gampe525cde22014-04-22 15:44:50 -07002192} // namespace art