blob: 09af3731d3c09bb41102141fb4fc68c2902cad85 [file] [log] [blame]
Andreas Gampe525cde22014-04-22 15:44:50 -07001/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
Ian Rogerse63db272014-07-15 15:36:11 -070017#include <cstdio>
18
Mathieu Chartierc7853442015-03-27 14:35:38 -070019#include "art_field-inl.h"
Mathieu Chartiere401d142015-04-22 13:56:20 -070020#include "art_method-inl.h"
Vladimir Marko3481ba22015-04-13 12:22:36 +010021#include "class_linker-inl.h"
Andreas Gampe525cde22014-04-22 15:44:50 -070022#include "common_runtime_test.h"
Andreas Gampe29b38412014-08-13 00:15:43 -070023#include "entrypoints/quick/quick_entrypoints_enum.h"
Nicolas Geoffray1004faa2016-03-23 14:28:30 +000024#include "linear_alloc.h"
Andreas Gampe51f76352014-05-21 08:28:48 -070025#include "mirror/class-inl.h"
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -070026#include "mirror/string-inl.h"
Ian Rogerse63db272014-07-15 15:36:11 -070027#include "scoped_thread_state_change.h"
Andreas Gampe525cde22014-04-22 15:44:50 -070028
29namespace art {
30
31
32class StubTest : public CommonRuntimeTest {
33 protected:
34 // We need callee-save methods set up in the Runtime for exceptions.
35 void SetUp() OVERRIDE {
36 // Do the normal setup.
37 CommonRuntimeTest::SetUp();
38
39 {
40 // Create callee-save methods
41 ScopedObjectAccess soa(Thread::Current());
Vladimir Marko7624d252014-05-02 14:40:15 +010042 runtime_->SetInstructionSet(kRuntimeISA);
Andreas Gampe525cde22014-04-22 15:44:50 -070043 for (int i = 0; i < Runtime::kLastCalleeSaveType; i++) {
44 Runtime::CalleeSaveType type = Runtime::CalleeSaveType(i);
45 if (!runtime_->HasCalleeSaveMethod(type)) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -070046 runtime_->SetCalleeSaveMethod(runtime_->CreateCalleeSaveMethod(), type);
Andreas Gampe525cde22014-04-22 15:44:50 -070047 }
48 }
49 }
50 }
51
Ian Rogerse63db272014-07-15 15:36:11 -070052 void SetUpRuntimeOptions(RuntimeOptions *options) OVERRIDE {
Andreas Gampe00c1e6d2014-04-25 15:47:13 -070053 // Use a smaller heap
54 for (std::pair<std::string, const void*>& pair : *options) {
55 if (pair.first.find("-Xmx") == 0) {
56 pair.first = "-Xmx4M"; // Smallest we can go.
57 }
58 }
Andreas Gampe51f76352014-05-21 08:28:48 -070059 options->push_back(std::make_pair("-Xint", nullptr));
Andreas Gampe00c1e6d2014-04-25 15:47:13 -070060 }
Andreas Gampe525cde22014-04-22 15:44:50 -070061
Mathieu Chartier119c6bd2014-05-09 14:11:47 -070062 // Helper function needed since TEST_F makes a new class.
63 Thread::tls_ptr_sized_values* GetTlsPtr(Thread* self) {
64 return &self->tlsPtr_;
65 }
66
Andreas Gampe4fc046e2014-05-06 16:56:39 -070067 public:
Andreas Gampe525cde22014-04-22 15:44:50 -070068 size_t Invoke3(size_t arg0, size_t arg1, size_t arg2, uintptr_t code, Thread* self) {
Andreas Gampe6cf80102014-05-19 11:32:41 -070069 return Invoke3WithReferrer(arg0, arg1, arg2, code, self, nullptr);
Andreas Gampe525cde22014-04-22 15:44:50 -070070 }
Andreas Gampe6e4e59c2014-05-05 20:11:02 -070071
Andreas Gampe6e4e59c2014-05-05 20:11:02 -070072 // TODO: Set up a frame according to referrer's specs.
73 size_t Invoke3WithReferrer(size_t arg0, size_t arg1, size_t arg2, uintptr_t code, Thread* self,
Mathieu Chartiere401d142015-04-22 13:56:20 -070074 ArtMethod* referrer) {
Andreas Gampe9537ba22015-10-12 14:29:38 -070075 return Invoke3WithReferrerAndHidden(arg0, arg1, arg2, code, self, referrer, 0);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -070076 }
77
Andreas Gampe51f76352014-05-21 08:28:48 -070078 // TODO: Set up a frame according to referrer's specs.
79 size_t Invoke3WithReferrerAndHidden(size_t arg0, size_t arg1, size_t arg2, uintptr_t code,
Mathieu Chartiere401d142015-04-22 13:56:20 -070080 Thread* self, ArtMethod* referrer, size_t hidden) {
Andreas Gampe51f76352014-05-21 08:28:48 -070081 // Push a transition back into managed code onto the linked list in thread.
82 ManagedStack fragment;
83 self->PushManagedStackFragment(&fragment);
84
85 size_t result;
86 size_t fpr_result = 0;
87#if defined(__i386__)
88 // TODO: Set the thread?
Andreas Gampe9537ba22015-10-12 14:29:38 -070089#define PUSH(reg) "push " # reg "\n\t .cfi_adjust_cfa_offset 4\n\t"
90#define POP(reg) "pop " # reg "\n\t .cfi_adjust_cfa_offset -4\n\t"
Andreas Gampe51f76352014-05-21 08:28:48 -070091 __asm__ __volatile__(
Andreas Gampe9537ba22015-10-12 14:29:38 -070092 "movd %[hidden], %%xmm7\n\t" // This is a memory op, so do this early. If it is off of
93 // esp, then we won't be able to access it after spilling.
94
95 // Spill 6 registers.
96 PUSH(%%ebx)
97 PUSH(%%ecx)
98 PUSH(%%edx)
99 PUSH(%%esi)
100 PUSH(%%edi)
101 PUSH(%%ebp)
102
103 // Store the inputs to the stack, but keep the referrer up top, less work.
104 PUSH(%[referrer]) // Align stack.
105 PUSH(%[referrer]) // Store referrer
106
107 PUSH(%[arg0])
108 PUSH(%[arg1])
109 PUSH(%[arg2])
110 PUSH(%[code])
111 // Now read them back into the required registers.
112 POP(%%edi)
113 POP(%%edx)
114 POP(%%ecx)
115 POP(%%eax)
116 // Call is prepared now.
117
Andreas Gampe51f76352014-05-21 08:28:48 -0700118 "call *%%edi\n\t" // Call the stub
Andreas Gampe9537ba22015-10-12 14:29:38 -0700119 "addl $8, %%esp\n\t" // Pop referrer and padding.
120 ".cfi_adjust_cfa_offset -8\n\t"
121
122 // Restore 6 registers.
123 POP(%%ebp)
124 POP(%%edi)
125 POP(%%esi)
126 POP(%%edx)
127 POP(%%ecx)
128 POP(%%ebx)
129
Andreas Gampe51f76352014-05-21 08:28:48 -0700130 : "=a" (result)
131 // Use the result from eax
Andreas Gampe9537ba22015-10-12 14:29:38 -0700132 : [arg0] "r"(arg0), [arg1] "r"(arg1), [arg2] "r"(arg2), [code] "r"(code),
133 [referrer]"r"(referrer), [hidden]"m"(hidden)
Andreas Gampe2f6e3512014-06-07 01:32:33 -0700134 // This places code into edi, arg0 into eax, arg1 into ecx, and arg2 into edx
Andreas Gampe9537ba22015-10-12 14:29:38 -0700135 : "memory", "xmm7"); // clobber.
136#undef PUSH
137#undef POP
Andreas Gampe51f76352014-05-21 08:28:48 -0700138#elif defined(__arm__)
139 __asm__ __volatile__(
140 "push {r1-r12, lr}\n\t" // Save state, 13*4B = 52B
141 ".cfi_adjust_cfa_offset 52\n\t"
142 "push {r9}\n\t"
143 ".cfi_adjust_cfa_offset 4\n\t"
144 "mov r9, %[referrer]\n\n"
145 "str r9, [sp, #-8]!\n\t" // Push referrer, +8B padding so 16B aligned
146 ".cfi_adjust_cfa_offset 8\n\t"
147 "ldr r9, [sp, #8]\n\t"
148
149 // Push everything on the stack, so we don't rely on the order. What a mess. :-(
150 "sub sp, sp, #24\n\t"
151 "str %[arg0], [sp]\n\t"
152 "str %[arg1], [sp, #4]\n\t"
153 "str %[arg2], [sp, #8]\n\t"
154 "str %[code], [sp, #12]\n\t"
155 "str %[self], [sp, #16]\n\t"
156 "str %[hidden], [sp, #20]\n\t"
157 "ldr r0, [sp]\n\t"
158 "ldr r1, [sp, #4]\n\t"
159 "ldr r2, [sp, #8]\n\t"
160 "ldr r3, [sp, #12]\n\t"
161 "ldr r9, [sp, #16]\n\t"
162 "ldr r12, [sp, #20]\n\t"
163 "add sp, sp, #24\n\t"
164
165 "blx r3\n\t" // Call the stub
Mathieu Chartier2cebb242015-04-21 16:50:40 -0700166 "add sp, sp, #12\n\t" // Pop null and padding
Andreas Gampe51f76352014-05-21 08:28:48 -0700167 ".cfi_adjust_cfa_offset -12\n\t"
168 "pop {r1-r12, lr}\n\t" // Restore state
169 ".cfi_adjust_cfa_offset -52\n\t"
170 "mov %[result], r0\n\t" // Save the result
171 : [result] "=r" (result)
172 // Use the result from r0
Andreas Gampe2f6e3512014-06-07 01:32:33 -0700173 : [arg0] "r"(arg0), [arg1] "r"(arg1), [arg2] "r"(arg2), [code] "r"(code), [self] "r"(self),
174 [referrer] "r"(referrer), [hidden] "r"(hidden)
Andreas Gampeff7b1142015-08-03 10:25:06 -0700175 : "r0", "memory"); // clobber.
Andreas Gampe51f76352014-05-21 08:28:48 -0700176#elif defined(__aarch64__)
177 __asm__ __volatile__(
Andreas Gampef39b3782014-06-03 14:38:30 -0700178 // Spill x0-x7 which we say we don't clobber. May contain args.
Bilyan Borisov970e1912016-02-10 11:51:17 +0000179 "sub sp, sp, #80\n\t"
180 ".cfi_adjust_cfa_offset 80\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700181 "stp x0, x1, [sp]\n\t"
182 "stp x2, x3, [sp, #16]\n\t"
183 "stp x4, x5, [sp, #32]\n\t"
184 "stp x6, x7, [sp, #48]\n\t"
Bilyan Borisov970e1912016-02-10 11:51:17 +0000185 // To be extra defensive, store x20. We do this because some of the stubs might make a
186 // transition into the runtime via the blr instruction below and *not* save x20.
187 "str x20, [sp, #64]\n\t"
188 // 8 byte buffer
Andreas Gampe51f76352014-05-21 08:28:48 -0700189
Andreas Gampef39b3782014-06-03 14:38:30 -0700190 "sub sp, sp, #16\n\t" // Reserve stack space, 16B aligned
191 ".cfi_adjust_cfa_offset 16\n\t"
192 "str %[referrer], [sp]\n\t" // referrer
Andreas Gampe51f76352014-05-21 08:28:48 -0700193
194 // Push everything on the stack, so we don't rely on the order. What a mess. :-(
195 "sub sp, sp, #48\n\t"
196 ".cfi_adjust_cfa_offset 48\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700197 // All things are "r" constraints, so direct str/stp should work.
198 "stp %[arg0], %[arg1], [sp]\n\t"
199 "stp %[arg2], %[code], [sp, #16]\n\t"
200 "stp %[self], %[hidden], [sp, #32]\n\t"
Andreas Gampe51f76352014-05-21 08:28:48 -0700201
202 // Now we definitely have x0-x3 free, use it to garble d8 - d15
203 "movk x0, #0xfad0\n\t"
204 "movk x0, #0xebad, lsl #16\n\t"
205 "movk x0, #0xfad0, lsl #32\n\t"
206 "movk x0, #0xebad, lsl #48\n\t"
207 "fmov d8, x0\n\t"
208 "add x0, x0, 1\n\t"
209 "fmov d9, x0\n\t"
210 "add x0, x0, 1\n\t"
211 "fmov d10, x0\n\t"
212 "add x0, x0, 1\n\t"
213 "fmov d11, x0\n\t"
214 "add x0, x0, 1\n\t"
215 "fmov d12, x0\n\t"
216 "add x0, x0, 1\n\t"
217 "fmov d13, x0\n\t"
218 "add x0, x0, 1\n\t"
219 "fmov d14, x0\n\t"
220 "add x0, x0, 1\n\t"
221 "fmov d15, x0\n\t"
222
Andreas Gampef39b3782014-06-03 14:38:30 -0700223 // Load call params into the right registers.
224 "ldp x0, x1, [sp]\n\t"
225 "ldp x2, x3, [sp, #16]\n\t"
Serban Constantinescu9bd88b02015-04-22 16:24:46 +0100226 "ldp x19, x17, [sp, #32]\n\t"
Andreas Gampe51f76352014-05-21 08:28:48 -0700227 "add sp, sp, #48\n\t"
228 ".cfi_adjust_cfa_offset -48\n\t"
229
Andreas Gampe51f76352014-05-21 08:28:48 -0700230 "blr x3\n\t" // Call the stub
Andreas Gampef39b3782014-06-03 14:38:30 -0700231 "mov x8, x0\n\t" // Store result
232 "add sp, sp, #16\n\t" // Drop the quick "frame"
233 ".cfi_adjust_cfa_offset -16\n\t"
Andreas Gampe51f76352014-05-21 08:28:48 -0700234
235 // Test d8 - d15. We can use x1 and x2.
236 "movk x1, #0xfad0\n\t"
237 "movk x1, #0xebad, lsl #16\n\t"
238 "movk x1, #0xfad0, lsl #32\n\t"
239 "movk x1, #0xebad, lsl #48\n\t"
240 "fmov x2, d8\n\t"
241 "cmp x1, x2\n\t"
242 "b.ne 1f\n\t"
243 "add x1, x1, 1\n\t"
244
245 "fmov x2, d9\n\t"
246 "cmp x1, x2\n\t"
247 "b.ne 1f\n\t"
248 "add x1, x1, 1\n\t"
249
250 "fmov x2, d10\n\t"
251 "cmp x1, x2\n\t"
252 "b.ne 1f\n\t"
253 "add x1, x1, 1\n\t"
254
255 "fmov x2, d11\n\t"
256 "cmp x1, x2\n\t"
257 "b.ne 1f\n\t"
258 "add x1, x1, 1\n\t"
259
260 "fmov x2, d12\n\t"
261 "cmp x1, x2\n\t"
262 "b.ne 1f\n\t"
263 "add x1, x1, 1\n\t"
264
265 "fmov x2, d13\n\t"
266 "cmp x1, x2\n\t"
267 "b.ne 1f\n\t"
268 "add x1, x1, 1\n\t"
269
270 "fmov x2, d14\n\t"
271 "cmp x1, x2\n\t"
272 "b.ne 1f\n\t"
273 "add x1, x1, 1\n\t"
274
275 "fmov x2, d15\n\t"
276 "cmp x1, x2\n\t"
277 "b.ne 1f\n\t"
278
Andreas Gampef39b3782014-06-03 14:38:30 -0700279 "mov x9, #0\n\t" // Use x9 as flag, in clobber list
Andreas Gampe51f76352014-05-21 08:28:48 -0700280
281 // Finish up.
282 "2:\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700283 "ldp x0, x1, [sp]\n\t" // Restore stuff not named clobbered, may contain fpr_result
284 "ldp x2, x3, [sp, #16]\n\t"
285 "ldp x4, x5, [sp, #32]\n\t"
286 "ldp x6, x7, [sp, #48]\n\t"
Bilyan Borisov970e1912016-02-10 11:51:17 +0000287 "ldr x20, [sp, #64]\n\t"
288 "add sp, sp, #80\n\t" // Free stack space, now sp as on entry
289 ".cfi_adjust_cfa_offset -80\n\t"
Andreas Gampe51f76352014-05-21 08:28:48 -0700290
Andreas Gampef39b3782014-06-03 14:38:30 -0700291 "str x9, %[fpr_result]\n\t" // Store the FPR comparison result
292 "mov %[result], x8\n\t" // Store the call result
293
Andreas Gampe51f76352014-05-21 08:28:48 -0700294 "b 3f\n\t" // Goto end
295
296 // Failed fpr verification.
297 "1:\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700298 "mov x9, #1\n\t"
Andreas Gampe51f76352014-05-21 08:28:48 -0700299 "b 2b\n\t" // Goto finish-up
300
301 // End
302 "3:\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700303 : [result] "=r" (result)
304 // Use the result from r0
Andreas Gampe51f76352014-05-21 08:28:48 -0700305 : [arg0] "0"(arg0), [arg1] "r"(arg1), [arg2] "r"(arg2), [code] "r"(code), [self] "r"(self),
Andreas Gampef39b3782014-06-03 14:38:30 -0700306 [referrer] "r"(referrer), [hidden] "r"(hidden), [fpr_result] "m" (fpr_result)
Bilyan Borisov970e1912016-02-10 11:51:17 +0000307 // Leave one register unclobbered, which is needed for compiling with
308 // -fstack-protector-strong. According to AAPCS64 registers x9-x15 are caller-saved,
309 // which means we should unclobber one of the callee-saved registers that are unused.
310 // Here we use x20.
311 : "x8", "x9", "x10", "x11", "x12", "x13", "x14", "x15", "x16", "x17", "x18", "x19",
Andreas Gampef39b3782014-06-03 14:38:30 -0700312 "x21", "x22", "x23", "x24", "x25", "x26", "x27", "x28", "x30",
313 "d0", "d1", "d2", "d3", "d4", "d5", "d6", "d7",
314 "d8", "d9", "d10", "d11", "d12", "d13", "d14", "d15",
315 "d16", "d17", "d18", "d19", "d20", "d21", "d22", "d23",
Andreas Gampe2f6e3512014-06-07 01:32:33 -0700316 "d24", "d25", "d26", "d27", "d28", "d29", "d30", "d31",
Bilyan Borisov970e1912016-02-10 11:51:17 +0000317 "memory");
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200318#elif defined(__mips__) && !defined(__LP64__)
319 __asm__ __volatile__ (
320 // Spill a0-a3 and t0-t7 which we say we don't clobber. May contain args.
321 "addiu $sp, $sp, -64\n\t"
322 "sw $a0, 0($sp)\n\t"
323 "sw $a1, 4($sp)\n\t"
324 "sw $a2, 8($sp)\n\t"
325 "sw $a3, 12($sp)\n\t"
326 "sw $t0, 16($sp)\n\t"
327 "sw $t1, 20($sp)\n\t"
328 "sw $t2, 24($sp)\n\t"
329 "sw $t3, 28($sp)\n\t"
330 "sw $t4, 32($sp)\n\t"
331 "sw $t5, 36($sp)\n\t"
332 "sw $t6, 40($sp)\n\t"
333 "sw $t7, 44($sp)\n\t"
334 // Spill gp register since it is caller save.
335 "sw $gp, 52($sp)\n\t"
336
337 "addiu $sp, $sp, -16\n\t" // Reserve stack space, 16B aligned.
338 "sw %[referrer], 0($sp)\n\t"
339
340 // Push everything on the stack, so we don't rely on the order.
341 "addiu $sp, $sp, -24\n\t"
342 "sw %[arg0], 0($sp)\n\t"
343 "sw %[arg1], 4($sp)\n\t"
344 "sw %[arg2], 8($sp)\n\t"
345 "sw %[code], 12($sp)\n\t"
346 "sw %[self], 16($sp)\n\t"
347 "sw %[hidden], 20($sp)\n\t"
348
349 // Load call params into the right registers.
350 "lw $a0, 0($sp)\n\t"
351 "lw $a1, 4($sp)\n\t"
352 "lw $a2, 8($sp)\n\t"
353 "lw $t9, 12($sp)\n\t"
354 "lw $s1, 16($sp)\n\t"
355 "lw $t0, 20($sp)\n\t"
356 "addiu $sp, $sp, 24\n\t"
357
358 "jalr $t9\n\t" // Call the stub.
359 "nop\n\t"
360 "addiu $sp, $sp, 16\n\t" // Drop the quick "frame".
361
362 // Restore stuff not named clobbered.
363 "lw $a0, 0($sp)\n\t"
364 "lw $a1, 4($sp)\n\t"
365 "lw $a2, 8($sp)\n\t"
366 "lw $a3, 12($sp)\n\t"
367 "lw $t0, 16($sp)\n\t"
368 "lw $t1, 20($sp)\n\t"
369 "lw $t2, 24($sp)\n\t"
370 "lw $t3, 28($sp)\n\t"
371 "lw $t4, 32($sp)\n\t"
372 "lw $t5, 36($sp)\n\t"
373 "lw $t6, 40($sp)\n\t"
374 "lw $t7, 44($sp)\n\t"
375 // Restore gp.
376 "lw $gp, 52($sp)\n\t"
377 "addiu $sp, $sp, 64\n\t" // Free stack space, now sp as on entry.
378
379 "move %[result], $v0\n\t" // Store the call result.
380 : [result] "=r" (result)
381 : [arg0] "r"(arg0), [arg1] "r"(arg1), [arg2] "r"(arg2), [code] "r"(code), [self] "r"(self),
382 [referrer] "r"(referrer), [hidden] "r"(hidden)
383 : "at", "v0", "v1", "s0", "s1", "s2", "s3", "s4", "s5", "s6", "s7", "t8", "t9", "k0", "k1",
384 "fp", "ra",
Nicolas Geoffrayc5b4b322015-09-15 16:36:50 +0100385 "$f0", "$f1", "$f2", "$f3", "$f4", "$f5", "$f6", "$f7", "$f8", "$f9", "$f10", "$f11",
386 "$f12", "$f13", "$f14", "$f15", "$f16", "$f17", "$f18", "$f19", "$f20", "$f21", "$f22",
387 "$f23", "$f24", "$f25", "$f26", "$f27", "$f28", "$f29", "$f30", "$f31",
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200388 "memory"); // clobber.
389#elif defined(__mips__) && defined(__LP64__)
390 __asm__ __volatile__ (
Goran Jakovljevic745f3cd2016-01-15 14:08:47 +0100391 // Spill a0-a7 which we say we don't clobber. May contain args.
392 "daddiu $sp, $sp, -64\n\t"
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200393 "sd $a0, 0($sp)\n\t"
394 "sd $a1, 8($sp)\n\t"
395 "sd $a2, 16($sp)\n\t"
396 "sd $a3, 24($sp)\n\t"
397 "sd $a4, 32($sp)\n\t"
398 "sd $a5, 40($sp)\n\t"
399 "sd $a6, 48($sp)\n\t"
400 "sd $a7, 56($sp)\n\t"
401
402 "daddiu $sp, $sp, -16\n\t" // Reserve stack space, 16B aligned.
403 "sd %[referrer], 0($sp)\n\t"
404
405 // Push everything on the stack, so we don't rely on the order.
406 "daddiu $sp, $sp, -48\n\t"
407 "sd %[arg0], 0($sp)\n\t"
408 "sd %[arg1], 8($sp)\n\t"
409 "sd %[arg2], 16($sp)\n\t"
410 "sd %[code], 24($sp)\n\t"
411 "sd %[self], 32($sp)\n\t"
412 "sd %[hidden], 40($sp)\n\t"
413
414 // Load call params into the right registers.
415 "ld $a0, 0($sp)\n\t"
416 "ld $a1, 8($sp)\n\t"
417 "ld $a2, 16($sp)\n\t"
418 "ld $t9, 24($sp)\n\t"
419 "ld $s1, 32($sp)\n\t"
420 "ld $t0, 40($sp)\n\t"
421 "daddiu $sp, $sp, 48\n\t"
422
423 "jalr $t9\n\t" // Call the stub.
424 "nop\n\t"
425 "daddiu $sp, $sp, 16\n\t" // Drop the quick "frame".
426
427 // Restore stuff not named clobbered.
428 "ld $a0, 0($sp)\n\t"
429 "ld $a1, 8($sp)\n\t"
430 "ld $a2, 16($sp)\n\t"
431 "ld $a3, 24($sp)\n\t"
432 "ld $a4, 32($sp)\n\t"
433 "ld $a5, 40($sp)\n\t"
434 "ld $a6, 48($sp)\n\t"
435 "ld $a7, 56($sp)\n\t"
Goran Jakovljevic745f3cd2016-01-15 14:08:47 +0100436 "daddiu $sp, $sp, 64\n\t"
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200437
438 "move %[result], $v0\n\t" // Store the call result.
439 : [result] "=r" (result)
440 : [arg0] "r"(arg0), [arg1] "r"(arg1), [arg2] "r"(arg2), [code] "r"(code), [self] "r"(self),
441 [referrer] "r"(referrer), [hidden] "r"(hidden)
Goran Jakovljevic745f3cd2016-01-15 14:08:47 +0100442 // Instead aliases t0-t3, register names $12-$15 has been used in the clobber list because
443 // t0-t3 are ambiguous.
444 : "at", "v0", "v1", "$12", "$13", "$14", "$15", "s0", "s1", "s2", "s3", "s4", "s5", "s6",
445 "s7", "t8", "t9", "k0", "k1", "fp", "ra",
Goran Jakovljevic4d44e532015-11-27 11:20:20 +0100446 "$f0", "$f1", "$f2", "$f3", "$f4", "$f5", "$f6", "$f7", "$f8", "$f9", "$f10", "$f11",
447 "$f12", "$f13", "$f14", "$f15", "$f16", "$f17", "$f18", "$f19", "$f20", "$f21", "$f22",
448 "$f23", "$f24", "$f25", "$f26", "$f27", "$f28", "$f29", "$f30", "$f31",
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200449 "memory"); // clobber.
Andreas Gampe9537ba22015-10-12 14:29:38 -0700450#elif defined(__x86_64__) && !defined(__APPLE__)
451#define PUSH(reg) "pushq " # reg "\n\t .cfi_adjust_cfa_offset 8\n\t"
452#define POP(reg) "popq " # reg "\n\t .cfi_adjust_cfa_offset -8\n\t"
453 // Note: Uses the native convention. We do a callee-save regimen by manually spilling and
454 // restoring almost all registers.
Andreas Gampe51f76352014-05-21 08:28:48 -0700455 // TODO: Set the thread?
456 __asm__ __volatile__(
Andreas Gampe9537ba22015-10-12 14:29:38 -0700457 // Spill almost everything (except rax, rsp). 14 registers.
458 PUSH(%%rbx)
459 PUSH(%%rcx)
460 PUSH(%%rdx)
461 PUSH(%%rsi)
462 PUSH(%%rdi)
463 PUSH(%%rbp)
464 PUSH(%%r8)
465 PUSH(%%r9)
466 PUSH(%%r10)
467 PUSH(%%r11)
468 PUSH(%%r12)
469 PUSH(%%r13)
470 PUSH(%%r14)
471 PUSH(%%r15)
472
473 PUSH(%[referrer]) // Push referrer & 16B alignment padding
474 PUSH(%[referrer])
475
476 // Now juggle the input registers.
477 PUSH(%[arg0])
478 PUSH(%[arg1])
479 PUSH(%[arg2])
480 PUSH(%[hidden])
481 PUSH(%[code])
482 POP(%%r8)
483 POP(%%rax)
484 POP(%%rdx)
485 POP(%%rsi)
486 POP(%%rdi)
487
488 "call *%%r8\n\t" // Call the stub
489 "addq $16, %%rsp\n\t" // Pop null and padding
Andreas Gampe51f76352014-05-21 08:28:48 -0700490 ".cfi_adjust_cfa_offset -16\n\t"
Andreas Gampe9537ba22015-10-12 14:29:38 -0700491
492 POP(%%r15)
493 POP(%%r14)
494 POP(%%r13)
495 POP(%%r12)
496 POP(%%r11)
497 POP(%%r10)
498 POP(%%r9)
499 POP(%%r8)
500 POP(%%rbp)
501 POP(%%rdi)
502 POP(%%rsi)
503 POP(%%rdx)
504 POP(%%rcx)
505 POP(%%rbx)
506
Andreas Gampe51f76352014-05-21 08:28:48 -0700507 : "=a" (result)
508 // Use the result from rax
Andreas Gampe9537ba22015-10-12 14:29:38 -0700509 : [arg0] "r"(arg0), [arg1] "r"(arg1), [arg2] "r"(arg2), [code] "r"(code),
510 [referrer] "r"(referrer), [hidden] "r"(hidden)
511 // This places arg0 into rdi, arg1 into rsi, arg2 into rdx, and code into some other
512 // register. We can't use "b" (rbx), as ASAN uses this for the frame pointer.
513 : "memory"); // We spill and restore (almost) all registers, so only mention memory here.
514#undef PUSH
515#undef POP
Andreas Gampe51f76352014-05-21 08:28:48 -0700516#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -0800517 UNUSED(arg0, arg1, arg2, code, referrer, hidden);
Andreas Gampe51f76352014-05-21 08:28:48 -0700518 LOG(WARNING) << "Was asked to invoke for an architecture I do not understand.";
519 result = 0;
520#endif
521 // Pop transition.
522 self->PopManagedStackFragment(fragment);
523
524 fp_result = fpr_result;
525 EXPECT_EQ(0U, fp_result);
526
527 return result;
528 }
529
Andreas Gampe29b38412014-08-13 00:15:43 -0700530 static uintptr_t GetEntrypoint(Thread* self, QuickEntrypointEnum entrypoint) {
531 int32_t offset;
532#ifdef __LP64__
533 offset = GetThreadOffset<8>(entrypoint).Int32Value();
534#else
535 offset = GetThreadOffset<4>(entrypoint).Int32Value();
536#endif
537 return *reinterpret_cast<uintptr_t*>(reinterpret_cast<uint8_t*>(self) + offset);
538 }
539
Andreas Gampe6cf80102014-05-19 11:32:41 -0700540 protected:
541 size_t fp_result;
Andreas Gampe525cde22014-04-22 15:44:50 -0700542};
543
544
Andreas Gampe525cde22014-04-22 15:44:50 -0700545TEST_F(StubTest, Memcpy) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200546#if defined(__i386__) || (defined(__x86_64__) && !defined(__APPLE__)) || defined(__mips__)
Andreas Gampe525cde22014-04-22 15:44:50 -0700547 Thread* self = Thread::Current();
548
549 uint32_t orig[20];
550 uint32_t trg[20];
551 for (size_t i = 0; i < 20; ++i) {
552 orig[i] = i;
553 trg[i] = 0;
554 }
555
556 Invoke3(reinterpret_cast<size_t>(&trg[4]), reinterpret_cast<size_t>(&orig[4]),
Andreas Gampe29b38412014-08-13 00:15:43 -0700557 10 * sizeof(uint32_t), StubTest::GetEntrypoint(self, kQuickMemcpy), self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700558
559 EXPECT_EQ(orig[0], trg[0]);
560
561 for (size_t i = 1; i < 4; ++i) {
562 EXPECT_NE(orig[i], trg[i]);
563 }
564
565 for (size_t i = 4; i < 14; ++i) {
566 EXPECT_EQ(orig[i], trg[i]);
567 }
568
569 for (size_t i = 14; i < 20; ++i) {
570 EXPECT_NE(orig[i], trg[i]);
571 }
572
573 // TODO: Test overlapping?
574
575#else
576 LOG(INFO) << "Skipping memcpy as I don't know how to do that on " << kRuntimeISA;
577 // Force-print to std::cout so it's also outside the logcat.
578 std::cout << "Skipping memcpy as I don't know how to do that on " << kRuntimeISA << std::endl;
579#endif
580}
581
Andreas Gampe525cde22014-04-22 15:44:50 -0700582TEST_F(StubTest, LockObject) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200583#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
584 (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -0700585 static constexpr size_t kThinLockLoops = 100;
586
Andreas Gampe525cde22014-04-22 15:44:50 -0700587 Thread* self = Thread::Current();
Andreas Gampe29b38412014-08-13 00:15:43 -0700588
589 const uintptr_t art_quick_lock_object = StubTest::GetEntrypoint(self, kQuickLockObject);
590
Andreas Gampe525cde22014-04-22 15:44:50 -0700591 // Create an object
592 ScopedObjectAccess soa(self);
593 // garbage is created during ClassLinker::Init
594
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700595 StackHandleScope<2> hs(soa.Self());
596 Handle<mirror::String> obj(
597 hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), "hello, world!")));
Andreas Gampe525cde22014-04-22 15:44:50 -0700598 LockWord lock = obj->GetLockWord(false);
599 LockWord::LockState old_state = lock.GetState();
600 EXPECT_EQ(LockWord::LockState::kUnlocked, old_state);
601
Andreas Gampe29b38412014-08-13 00:15:43 -0700602 Invoke3(reinterpret_cast<size_t>(obj.Get()), 0U, 0U, art_quick_lock_object, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700603
604 LockWord lock_after = obj->GetLockWord(false);
605 LockWord::LockState new_state = lock_after.GetState();
606 EXPECT_EQ(LockWord::LockState::kThinLocked, new_state);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700607 EXPECT_EQ(lock_after.ThinLockCount(), 0U); // Thin lock starts count at zero
608
609 for (size_t i = 1; i < kThinLockLoops; ++i) {
Andreas Gampe29b38412014-08-13 00:15:43 -0700610 Invoke3(reinterpret_cast<size_t>(obj.Get()), 0U, 0U, art_quick_lock_object, self);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700611
612 // Check we're at lock count i
613
614 LockWord l_inc = obj->GetLockWord(false);
615 LockWord::LockState l_inc_state = l_inc.GetState();
616 EXPECT_EQ(LockWord::LockState::kThinLocked, l_inc_state);
617 EXPECT_EQ(l_inc.ThinLockCount(), i);
618 }
619
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700620 // Force a fat lock by running identity hashcode to fill up lock word.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700621 Handle<mirror::String> obj2(hs.NewHandle(
622 mirror::String::AllocFromModifiedUtf8(soa.Self(), "hello, world!")));
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700623
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700624 obj2->IdentityHashCode();
625
Andreas Gampe29b38412014-08-13 00:15:43 -0700626 Invoke3(reinterpret_cast<size_t>(obj2.Get()), 0U, 0U, art_quick_lock_object, self);
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700627
628 LockWord lock_after2 = obj2->GetLockWord(false);
629 LockWord::LockState new_state2 = lock_after2.GetState();
630 EXPECT_EQ(LockWord::LockState::kFatLocked, new_state2);
631 EXPECT_NE(lock_after2.FatLockMonitor(), static_cast<Monitor*>(nullptr));
632
633 // Test done.
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700634#else
635 LOG(INFO) << "Skipping lock_object as I don't know how to do that on " << kRuntimeISA;
636 // Force-print to std::cout so it's also outside the logcat.
637 std::cout << "Skipping lock_object as I don't know how to do that on " << kRuntimeISA << std::endl;
638#endif
639}
640
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700641
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700642class RandGen {
643 public:
644 explicit RandGen(uint32_t seed) : val_(seed) {}
645
646 uint32_t next() {
647 val_ = val_ * 48271 % 2147483647 + 13;
648 return val_;
649 }
650
651 uint32_t val_;
652};
653
654
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700655// NO_THREAD_SAFETY_ANALYSIS as we do not want to grab exclusive mutator lock for MonitorInfo.
656static void TestUnlockObject(StubTest* test) NO_THREAD_SAFETY_ANALYSIS {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200657#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
658 (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -0700659 static constexpr size_t kThinLockLoops = 100;
660
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700661 Thread* self = Thread::Current();
Andreas Gampe29b38412014-08-13 00:15:43 -0700662
663 const uintptr_t art_quick_lock_object = StubTest::GetEntrypoint(self, kQuickLockObject);
664 const uintptr_t art_quick_unlock_object = StubTest::GetEntrypoint(self, kQuickUnlockObject);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700665 // Create an object
666 ScopedObjectAccess soa(self);
667 // garbage is created during ClassLinker::Init
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700668 static constexpr size_t kNumberOfLocks = 10; // Number of objects = lock
669 StackHandleScope<kNumberOfLocks + 1> hs(self);
670 Handle<mirror::String> obj(
671 hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), "hello, world!")));
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700672 LockWord lock = obj->GetLockWord(false);
673 LockWord::LockState old_state = lock.GetState();
674 EXPECT_EQ(LockWord::LockState::kUnlocked, old_state);
675
Andreas Gampe29b38412014-08-13 00:15:43 -0700676 test->Invoke3(reinterpret_cast<size_t>(obj.Get()), 0U, 0U, art_quick_unlock_object, self);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700677 // This should be an illegal monitor state.
678 EXPECT_TRUE(self->IsExceptionPending());
679 self->ClearException();
680
681 LockWord lock_after = obj->GetLockWord(false);
682 LockWord::LockState new_state = lock_after.GetState();
683 EXPECT_EQ(LockWord::LockState::kUnlocked, new_state);
Andreas Gampe525cde22014-04-22 15:44:50 -0700684
Andreas Gampe29b38412014-08-13 00:15:43 -0700685 test->Invoke3(reinterpret_cast<size_t>(obj.Get()), 0U, 0U, art_quick_lock_object, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700686
687 LockWord lock_after2 = obj->GetLockWord(false);
688 LockWord::LockState new_state2 = lock_after2.GetState();
689 EXPECT_EQ(LockWord::LockState::kThinLocked, new_state2);
690
Andreas Gampe29b38412014-08-13 00:15:43 -0700691 test->Invoke3(reinterpret_cast<size_t>(obj.Get()), 0U, 0U, art_quick_unlock_object, self);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700692
693 LockWord lock_after3 = obj->GetLockWord(false);
694 LockWord::LockState new_state3 = lock_after3.GetState();
695 EXPECT_EQ(LockWord::LockState::kUnlocked, new_state3);
696
697 // Stress test:
698 // Keep a number of objects and their locks in flight. Randomly lock or unlock one of them in
699 // each step.
700
701 RandGen r(0x1234);
702
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700703 constexpr size_t kIterations = 10000; // Number of iterations
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700704 constexpr size_t kMoveToFat = 1000; // Chance of 1:kMoveFat to make a lock fat.
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700705
706 size_t counts[kNumberOfLocks];
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700707 bool fat[kNumberOfLocks]; // Whether a lock should be thin or fat.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700708 Handle<mirror::String> objects[kNumberOfLocks];
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700709
710 // Initialize = allocate.
711 for (size_t i = 0; i < kNumberOfLocks; ++i) {
712 counts[i] = 0;
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700713 fat[i] = false;
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700714 objects[i] = hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), ""));
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700715 }
716
717 for (size_t i = 0; i < kIterations; ++i) {
718 // Select which lock to update.
719 size_t index = r.next() % kNumberOfLocks;
720
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700721 // Make lock fat?
722 if (!fat[index] && (r.next() % kMoveToFat == 0)) {
723 fat[index] = true;
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700724 objects[index]->IdentityHashCode();
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700725
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700726 LockWord lock_iter = objects[index]->GetLockWord(false);
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700727 LockWord::LockState iter_state = lock_iter.GetState();
728 if (counts[index] == 0) {
729 EXPECT_EQ(LockWord::LockState::kHashCode, iter_state);
730 } else {
731 EXPECT_EQ(LockWord::LockState::kFatLocked, iter_state);
732 }
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700733 } else {
Andreas Gampe277ccbd2014-11-03 21:36:10 -0800734 bool take_lock; // Whether to lock or unlock in this step.
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700735 if (counts[index] == 0) {
Andreas Gampe277ccbd2014-11-03 21:36:10 -0800736 take_lock = true;
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700737 } else if (counts[index] == kThinLockLoops) {
Andreas Gampe277ccbd2014-11-03 21:36:10 -0800738 take_lock = false;
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700739 } else {
740 // Randomly.
Andreas Gampe277ccbd2014-11-03 21:36:10 -0800741 take_lock = r.next() % 2 == 0;
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700742 }
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700743
Andreas Gampe277ccbd2014-11-03 21:36:10 -0800744 if (take_lock) {
Andreas Gampe29b38412014-08-13 00:15:43 -0700745 test->Invoke3(reinterpret_cast<size_t>(objects[index].Get()), 0U, 0U, art_quick_lock_object,
746 self);
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700747 counts[index]++;
748 } else {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700749 test->Invoke3(reinterpret_cast<size_t>(objects[index].Get()), 0U, 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -0700750 art_quick_unlock_object, self);
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700751 counts[index]--;
752 }
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700753
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700754 EXPECT_FALSE(self->IsExceptionPending());
755
756 // Check the new state.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700757 LockWord lock_iter = objects[index]->GetLockWord(true);
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700758 LockWord::LockState iter_state = lock_iter.GetState();
759 if (fat[index]) {
760 // Abuse MonitorInfo.
761 EXPECT_EQ(LockWord::LockState::kFatLocked, iter_state) << index;
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700762 MonitorInfo info(objects[index].Get());
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700763 EXPECT_EQ(counts[index], info.entry_count_) << index;
764 } else {
765 if (counts[index] > 0) {
766 EXPECT_EQ(LockWord::LockState::kThinLocked, iter_state);
767 EXPECT_EQ(counts[index] - 1, lock_iter.ThinLockCount());
768 } else {
769 EXPECT_EQ(LockWord::LockState::kUnlocked, iter_state);
770 }
771 }
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700772 }
773 }
774
775 // Unlock the remaining count times and then check it's unlocked. Then deallocate.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700776 // Go reverse order to correctly handle Handles.
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700777 for (size_t i = 0; i < kNumberOfLocks; ++i) {
778 size_t index = kNumberOfLocks - 1 - i;
779 size_t count = counts[index];
780 while (count > 0) {
Andreas Gampe29b38412014-08-13 00:15:43 -0700781 test->Invoke3(reinterpret_cast<size_t>(objects[index].Get()), 0U, 0U, art_quick_unlock_object,
782 self);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700783 count--;
784 }
785
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700786 LockWord lock_after4 = objects[index]->GetLockWord(false);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700787 LockWord::LockState new_state4 = lock_after4.GetState();
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700788 EXPECT_TRUE(LockWord::LockState::kUnlocked == new_state4
789 || LockWord::LockState::kFatLocked == new_state4);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700790 }
791
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700792 // Test done.
Andreas Gampe525cde22014-04-22 15:44:50 -0700793#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -0800794 UNUSED(test);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700795 LOG(INFO) << "Skipping unlock_object as I don't know how to do that on " << kRuntimeISA;
Andreas Gampe525cde22014-04-22 15:44:50 -0700796 // Force-print to std::cout so it's also outside the logcat.
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700797 std::cout << "Skipping unlock_object as I don't know how to do that on " << kRuntimeISA << std::endl;
Andreas Gampe525cde22014-04-22 15:44:50 -0700798#endif
799}
800
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700801TEST_F(StubTest, UnlockObject) {
Andreas Gampe369810a2015-01-14 19:53:31 -0800802 // This will lead to monitor error messages in the log.
803 ScopedLogSeverity sls(LogSeverity::FATAL);
804
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700805 TestUnlockObject(this);
806}
Andreas Gampe525cde22014-04-22 15:44:50 -0700807
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200808#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
809 (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe525cde22014-04-22 15:44:50 -0700810extern "C" void art_quick_check_cast(void);
811#endif
812
813TEST_F(StubTest, CheckCast) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200814#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
815 (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe525cde22014-04-22 15:44:50 -0700816 Thread* self = Thread::Current();
Andreas Gampe29b38412014-08-13 00:15:43 -0700817
818 const uintptr_t art_quick_check_cast = StubTest::GetEntrypoint(self, kQuickCheckCast);
819
Andreas Gampe525cde22014-04-22 15:44:50 -0700820 // Find some classes.
821 ScopedObjectAccess soa(self);
822 // garbage is created during ClassLinker::Init
823
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700824 StackHandleScope<2> hs(soa.Self());
825 Handle<mirror::Class> c(
826 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "[Ljava/lang/Object;")));
827 Handle<mirror::Class> c2(
828 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "[Ljava/lang/String;")));
Andreas Gampe525cde22014-04-22 15:44:50 -0700829
830 EXPECT_FALSE(self->IsExceptionPending());
831
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700832 Invoke3(reinterpret_cast<size_t>(c.Get()), reinterpret_cast<size_t>(c.Get()), 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -0700833 art_quick_check_cast, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700834
835 EXPECT_FALSE(self->IsExceptionPending());
836
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700837 Invoke3(reinterpret_cast<size_t>(c2.Get()), reinterpret_cast<size_t>(c2.Get()), 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -0700838 art_quick_check_cast, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700839
840 EXPECT_FALSE(self->IsExceptionPending());
841
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700842 Invoke3(reinterpret_cast<size_t>(c.Get()), reinterpret_cast<size_t>(c2.Get()), 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -0700843 art_quick_check_cast, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700844
845 EXPECT_FALSE(self->IsExceptionPending());
846
847 // TODO: Make the following work. But that would require correct managed frames.
848
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700849 Invoke3(reinterpret_cast<size_t>(c2.Get()), reinterpret_cast<size_t>(c.Get()), 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -0700850 art_quick_check_cast, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700851
852 EXPECT_TRUE(self->IsExceptionPending());
853 self->ClearException();
854
855#else
856 LOG(INFO) << "Skipping check_cast as I don't know how to do that on " << kRuntimeISA;
857 // Force-print to std::cout so it's also outside the logcat.
858 std::cout << "Skipping check_cast as I don't know how to do that on " << kRuntimeISA << std::endl;
859#endif
860}
861
862
Andreas Gampe525cde22014-04-22 15:44:50 -0700863TEST_F(StubTest, APutObj) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200864#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
865 (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe525cde22014-04-22 15:44:50 -0700866 Thread* self = Thread::Current();
Andreas Gampe29b38412014-08-13 00:15:43 -0700867
868 // Do not check non-checked ones, we'd need handlers and stuff...
869 const uintptr_t art_quick_aput_obj_with_null_and_bound_check =
870 StubTest::GetEntrypoint(self, kQuickAputObjectWithNullAndBoundCheck);
871
Andreas Gampe525cde22014-04-22 15:44:50 -0700872 // Create an object
873 ScopedObjectAccess soa(self);
874 // garbage is created during ClassLinker::Init
875
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700876 StackHandleScope<5> hs(soa.Self());
877 Handle<mirror::Class> c(
878 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "Ljava/lang/Object;")));
879 Handle<mirror::Class> ca(
880 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "[Ljava/lang/String;")));
Andreas Gampe525cde22014-04-22 15:44:50 -0700881
882 // Build a string array of size 1
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700883 Handle<mirror::ObjectArray<mirror::Object>> array(
884 hs.NewHandle(mirror::ObjectArray<mirror::Object>::Alloc(soa.Self(), ca.Get(), 10)));
Andreas Gampe525cde22014-04-22 15:44:50 -0700885
886 // Build a string -> should be assignable
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700887 Handle<mirror::String> str_obj(
888 hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), "hello, world!")));
Andreas Gampe525cde22014-04-22 15:44:50 -0700889
890 // Build a generic object -> should fail assigning
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700891 Handle<mirror::Object> obj_obj(hs.NewHandle(c->AllocObject(soa.Self())));
Andreas Gampe525cde22014-04-22 15:44:50 -0700892
893 // Play with it...
894
895 // 1) Success cases
Andreas Gampef4e910b2014-04-29 16:55:52 -0700896 // 1.1) Assign str_obj to array[0..3]
Andreas Gampe525cde22014-04-22 15:44:50 -0700897
898 EXPECT_FALSE(self->IsExceptionPending());
899
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700900 Invoke3(reinterpret_cast<size_t>(array.Get()), 0U, reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampe29b38412014-08-13 00:15:43 -0700901 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700902
903 EXPECT_FALSE(self->IsExceptionPending());
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700904 EXPECT_EQ(str_obj.Get(), array->Get(0));
Andreas Gampe525cde22014-04-22 15:44:50 -0700905
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700906 Invoke3(reinterpret_cast<size_t>(array.Get()), 1U, reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampe29b38412014-08-13 00:15:43 -0700907 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampef4e910b2014-04-29 16:55:52 -0700908
909 EXPECT_FALSE(self->IsExceptionPending());
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700910 EXPECT_EQ(str_obj.Get(), array->Get(1));
Andreas Gampef4e910b2014-04-29 16:55:52 -0700911
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700912 Invoke3(reinterpret_cast<size_t>(array.Get()), 2U, reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampe29b38412014-08-13 00:15:43 -0700913 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampef4e910b2014-04-29 16:55:52 -0700914
915 EXPECT_FALSE(self->IsExceptionPending());
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700916 EXPECT_EQ(str_obj.Get(), array->Get(2));
Andreas Gampef4e910b2014-04-29 16:55:52 -0700917
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700918 Invoke3(reinterpret_cast<size_t>(array.Get()), 3U, reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampe29b38412014-08-13 00:15:43 -0700919 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampef4e910b2014-04-29 16:55:52 -0700920
921 EXPECT_FALSE(self->IsExceptionPending());
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700922 EXPECT_EQ(str_obj.Get(), array->Get(3));
Andreas Gampef4e910b2014-04-29 16:55:52 -0700923
924 // 1.2) Assign null to array[0..3]
Andreas Gampe525cde22014-04-22 15:44:50 -0700925
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700926 Invoke3(reinterpret_cast<size_t>(array.Get()), 0U, reinterpret_cast<size_t>(nullptr),
Andreas Gampe29b38412014-08-13 00:15:43 -0700927 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700928
929 EXPECT_FALSE(self->IsExceptionPending());
Andreas Gampef4e910b2014-04-29 16:55:52 -0700930 EXPECT_EQ(nullptr, array->Get(0));
931
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700932 Invoke3(reinterpret_cast<size_t>(array.Get()), 1U, reinterpret_cast<size_t>(nullptr),
Andreas Gampe29b38412014-08-13 00:15:43 -0700933 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampef4e910b2014-04-29 16:55:52 -0700934
935 EXPECT_FALSE(self->IsExceptionPending());
936 EXPECT_EQ(nullptr, array->Get(1));
937
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700938 Invoke3(reinterpret_cast<size_t>(array.Get()), 2U, reinterpret_cast<size_t>(nullptr),
Andreas Gampe29b38412014-08-13 00:15:43 -0700939 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampef4e910b2014-04-29 16:55:52 -0700940
941 EXPECT_FALSE(self->IsExceptionPending());
942 EXPECT_EQ(nullptr, array->Get(2));
943
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700944 Invoke3(reinterpret_cast<size_t>(array.Get()), 3U, reinterpret_cast<size_t>(nullptr),
Andreas Gampe29b38412014-08-13 00:15:43 -0700945 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampef4e910b2014-04-29 16:55:52 -0700946
947 EXPECT_FALSE(self->IsExceptionPending());
948 EXPECT_EQ(nullptr, array->Get(3));
Andreas Gampe525cde22014-04-22 15:44:50 -0700949
950 // TODO: Check _which_ exception is thrown. Then make 3) check that it's the right check order.
951
952 // 2) Failure cases (str into str[])
953 // 2.1) Array = null
954 // TODO: Throwing NPE needs actual DEX code
955
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700956// Invoke3(reinterpret_cast<size_t>(nullptr), 0U, reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampe525cde22014-04-22 15:44:50 -0700957// reinterpret_cast<uintptr_t>(&art_quick_aput_obj_with_null_and_bound_check), self);
958//
959// EXPECT_TRUE(self->IsExceptionPending());
960// self->ClearException();
961
962 // 2.2) Index < 0
963
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700964 Invoke3(reinterpret_cast<size_t>(array.Get()), static_cast<size_t>(-1),
965 reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampe29b38412014-08-13 00:15:43 -0700966 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700967
968 EXPECT_TRUE(self->IsExceptionPending());
969 self->ClearException();
970
971 // 2.3) Index > 0
972
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700973 Invoke3(reinterpret_cast<size_t>(array.Get()), 10U, reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampe29b38412014-08-13 00:15:43 -0700974 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700975
976 EXPECT_TRUE(self->IsExceptionPending());
977 self->ClearException();
978
979 // 3) Failure cases (obj into str[])
980
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700981 Invoke3(reinterpret_cast<size_t>(array.Get()), 0U, reinterpret_cast<size_t>(obj_obj.Get()),
Andreas Gampe29b38412014-08-13 00:15:43 -0700982 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700983
984 EXPECT_TRUE(self->IsExceptionPending());
985 self->ClearException();
986
987 // Tests done.
988#else
989 LOG(INFO) << "Skipping aput_obj as I don't know how to do that on " << kRuntimeISA;
990 // Force-print to std::cout so it's also outside the logcat.
991 std::cout << "Skipping aput_obj as I don't know how to do that on " << kRuntimeISA << std::endl;
992#endif
993}
994
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700995TEST_F(StubTest, AllocObject) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200996#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
997 (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe369810a2015-01-14 19:53:31 -0800998 // This will lead to OOM error messages in the log.
999 ScopedLogSeverity sls(LogSeverity::FATAL);
1000
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001001 // TODO: Check the "Unresolved" allocation stubs
1002
1003 Thread* self = Thread::Current();
1004 // Create an object
1005 ScopedObjectAccess soa(self);
1006 // garbage is created during ClassLinker::Init
1007
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001008 StackHandleScope<2> hs(soa.Self());
1009 Handle<mirror::Class> c(
1010 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "Ljava/lang/Object;")));
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001011
1012 // Play with it...
1013
1014 EXPECT_FALSE(self->IsExceptionPending());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001015 {
1016 // Use an arbitrary method from c to use as referrer
1017 size_t result = Invoke3(static_cast<size_t>(c->GetDexTypeIndex()), // type_idx
Mathieu Chartiere401d142015-04-22 13:56:20 -07001018 // arbitrary
1019 reinterpret_cast<size_t>(c->GetVirtualMethod(0, sizeof(void*))),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001020 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001021 StubTest::GetEntrypoint(self, kQuickAllocObject),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001022 self);
1023
1024 EXPECT_FALSE(self->IsExceptionPending());
1025 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
1026 mirror::Object* obj = reinterpret_cast<mirror::Object*>(result);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001027 EXPECT_EQ(c.Get(), obj->GetClass());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001028 VerifyObject(obj);
1029 }
1030
1031 {
Mathieu Chartier2cebb242015-04-21 16:50:40 -07001032 // We can use null in the second argument as we do not need a method here (not used in
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001033 // resolved/initialized cases)
Mathieu Chartiere401d142015-04-22 13:56:20 -07001034 size_t result = Invoke3(reinterpret_cast<size_t>(c.Get()), 0u, 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001035 StubTest::GetEntrypoint(self, kQuickAllocObjectResolved),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001036 self);
1037
1038 EXPECT_FALSE(self->IsExceptionPending());
1039 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
1040 mirror::Object* obj = reinterpret_cast<mirror::Object*>(result);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001041 EXPECT_EQ(c.Get(), obj->GetClass());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001042 VerifyObject(obj);
1043 }
1044
1045 {
Mathieu Chartier2cebb242015-04-21 16:50:40 -07001046 // We can use null in the second argument as we do not need a method here (not used in
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001047 // resolved/initialized cases)
Mathieu Chartiere401d142015-04-22 13:56:20 -07001048 size_t result = Invoke3(reinterpret_cast<size_t>(c.Get()), 0u, 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001049 StubTest::GetEntrypoint(self, kQuickAllocObjectInitialized),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001050 self);
1051
1052 EXPECT_FALSE(self->IsExceptionPending());
1053 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
1054 mirror::Object* obj = reinterpret_cast<mirror::Object*>(result);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001055 EXPECT_EQ(c.Get(), obj->GetClass());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001056 VerifyObject(obj);
1057 }
1058
1059 // Failure tests.
1060
1061 // Out-of-memory.
1062 {
1063 Runtime::Current()->GetHeap()->SetIdealFootprint(1 * GB);
1064
1065 // Array helps to fill memory faster.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001066 Handle<mirror::Class> ca(
1067 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "[Ljava/lang/Object;")));
1068
1069 // Use arbitrary large amount for now.
1070 static const size_t kMaxHandles = 1000000;
Ian Rogers700a4022014-05-19 16:49:03 -07001071 std::unique_ptr<StackHandleScope<kMaxHandles>> hsp(new StackHandleScope<kMaxHandles>(self));
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001072
1073 std::vector<Handle<mirror::Object>> handles;
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001074 // Start allocating with 128K
1075 size_t length = 128 * KB / 4;
1076 while (length > 10) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001077 Handle<mirror::Object> h(hsp->NewHandle<mirror::Object>(
1078 mirror::ObjectArray<mirror::Object>::Alloc(soa.Self(), ca.Get(), length / 4)));
1079 if (self->IsExceptionPending() || h.Get() == nullptr) {
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001080 self->ClearException();
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001081
1082 // Try a smaller length
1083 length = length / 8;
1084 // Use at most half the reported free space.
1085 size_t mem = Runtime::Current()->GetHeap()->GetFreeMemory();
1086 if (length * 8 > mem) {
1087 length = mem / 8;
1088 }
1089 } else {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001090 handles.push_back(h);
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001091 }
1092 }
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001093 LOG(INFO) << "Used " << handles.size() << " arrays to fill space.";
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001094
1095 // Allocate simple objects till it fails.
1096 while (!self->IsExceptionPending()) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001097 Handle<mirror::Object> h = hsp->NewHandle(c->AllocObject(soa.Self()));
1098 if (!self->IsExceptionPending() && h.Get() != nullptr) {
1099 handles.push_back(h);
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001100 }
1101 }
1102 self->ClearException();
1103
Mathieu Chartiere401d142015-04-22 13:56:20 -07001104 size_t result = Invoke3(reinterpret_cast<size_t>(c.Get()), 0u, 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001105 StubTest::GetEntrypoint(self, kQuickAllocObjectInitialized),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001106 self);
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001107 EXPECT_TRUE(self->IsExceptionPending());
1108 self->ClearException();
1109 EXPECT_EQ(reinterpret_cast<size_t>(nullptr), result);
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001110 }
1111
1112 // Tests done.
1113#else
1114 LOG(INFO) << "Skipping alloc_object as I don't know how to do that on " << kRuntimeISA;
1115 // Force-print to std::cout so it's also outside the logcat.
1116 std::cout << "Skipping alloc_object as I don't know how to do that on " << kRuntimeISA << std::endl;
1117#endif
1118}
1119
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001120TEST_F(StubTest, AllocObjectArray) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001121#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1122 (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001123 // TODO: Check the "Unresolved" allocation stubs
1124
Andreas Gampe369810a2015-01-14 19:53:31 -08001125 // This will lead to OOM error messages in the log.
1126 ScopedLogSeverity sls(LogSeverity::FATAL);
1127
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001128 Thread* self = Thread::Current();
1129 // Create an object
1130 ScopedObjectAccess soa(self);
1131 // garbage is created during ClassLinker::Init
1132
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001133 StackHandleScope<2> hs(self);
1134 Handle<mirror::Class> c(
1135 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "[Ljava/lang/Object;")));
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001136
1137 // Needed to have a linked method.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001138 Handle<mirror::Class> c_obj(
1139 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "Ljava/lang/Object;")));
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001140
1141 // Play with it...
1142
1143 EXPECT_FALSE(self->IsExceptionPending());
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001144
1145 // For some reason this does not work, as the type_idx is artificial and outside what the
1146 // resolved types of c_obj allow...
1147
Ian Rogerscf7f1912014-10-22 22:06:39 -07001148 if ((false)) {
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001149 // Use an arbitrary method from c to use as referrer
1150 size_t result = Invoke3(static_cast<size_t>(c->GetDexTypeIndex()), // type_idx
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001151 10U,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001152 // arbitrary
1153 reinterpret_cast<size_t>(c_obj->GetVirtualMethod(0, sizeof(void*))),
Andreas Gampe29b38412014-08-13 00:15:43 -07001154 StubTest::GetEntrypoint(self, kQuickAllocArray),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001155 self);
1156
1157 EXPECT_FALSE(self->IsExceptionPending());
1158 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
1159 mirror::Array* obj = reinterpret_cast<mirror::Array*>(result);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001160 EXPECT_EQ(c.Get(), obj->GetClass());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001161 VerifyObject(obj);
1162 EXPECT_EQ(obj->GetLength(), 10);
1163 }
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001164
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001165 {
Mathieu Chartier2cebb242015-04-21 16:50:40 -07001166 // We can use null in the second argument as we do not need a method here (not used in
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001167 // resolved/initialized cases)
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08001168 size_t result = Invoke3(reinterpret_cast<size_t>(c.Get()), 10U,
1169 reinterpret_cast<size_t>(nullptr),
Andreas Gampe29b38412014-08-13 00:15:43 -07001170 StubTest::GetEntrypoint(self, kQuickAllocArrayResolved),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001171 self);
Nicolas Geoffray14691c52015-03-05 10:40:17 +00001172 EXPECT_FALSE(self->IsExceptionPending()) << PrettyTypeOf(self->GetException());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001173 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
1174 mirror::Object* obj = reinterpret_cast<mirror::Object*>(result);
1175 EXPECT_TRUE(obj->IsArrayInstance());
1176 EXPECT_TRUE(obj->IsObjectArray());
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001177 EXPECT_EQ(c.Get(), obj->GetClass());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001178 VerifyObject(obj);
1179 mirror::Array* array = reinterpret_cast<mirror::Array*>(result);
1180 EXPECT_EQ(array->GetLength(), 10);
1181 }
1182
1183 // Failure tests.
1184
1185 // Out-of-memory.
1186 {
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08001187 size_t result = Invoke3(reinterpret_cast<size_t>(c.Get()),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001188 GB, // that should fail...
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08001189 reinterpret_cast<size_t>(nullptr),
Andreas Gampe29b38412014-08-13 00:15:43 -07001190 StubTest::GetEntrypoint(self, kQuickAllocArrayResolved),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001191 self);
1192
1193 EXPECT_TRUE(self->IsExceptionPending());
1194 self->ClearException();
1195 EXPECT_EQ(reinterpret_cast<size_t>(nullptr), result);
1196 }
1197
1198 // Tests done.
1199#else
1200 LOG(INFO) << "Skipping alloc_array as I don't know how to do that on " << kRuntimeISA;
1201 // Force-print to std::cout so it's also outside the logcat.
1202 std::cout << "Skipping alloc_array as I don't know how to do that on " << kRuntimeISA << std::endl;
1203#endif
1204}
1205
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001206
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001207TEST_F(StubTest, StringCompareTo) {
Scott Wakelingc25cbf12016-04-18 09:00:11 +01001208 // There is no StringCompareTo runtime entrypoint for __arm__ or __aarch64__.
1209#if defined(__i386__) || defined(__mips__) || \
1210 (defined(__x86_64__) && !defined(__APPLE__))
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001211 // TODO: Check the "Unresolved" allocation stubs
1212
1213 Thread* self = Thread::Current();
Andreas Gampe29b38412014-08-13 00:15:43 -07001214
1215 const uintptr_t art_quick_string_compareto = StubTest::GetEntrypoint(self, kQuickStringCompareTo);
1216
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001217 ScopedObjectAccess soa(self);
1218 // garbage is created during ClassLinker::Init
1219
1220 // Create some strings
1221 // Use array so we can index into it and use a matrix for expected results
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -07001222 // Setup: The first half is standard. The second half uses a non-zero offset.
1223 // TODO: Shared backing arrays.
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001224 const char* c[] = { "", "", "a", "aa", "ab",
Serban Constantinescu86797a72014-06-19 16:17:56 +01001225 "aacaacaacaacaacaac", // This one's under the default limit to go to __memcmp16.
1226 "aacaacaacaacaacaacaacaacaacaacaacaac", // This one's over.
1227 "aacaacaacaacaacaacaacaacaacaacaacaaca" }; // As is this one. We need a separate one to
1228 // defeat object-equal optimizations.
Jeff Hao848f70a2014-01-15 13:49:50 -08001229 static constexpr size_t kStringCount = arraysize(c);
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001230
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001231 StackHandleScope<kStringCount> hs(self);
1232 Handle<mirror::String> s[kStringCount];
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001233
Jeff Hao848f70a2014-01-15 13:49:50 -08001234 for (size_t i = 0; i < kStringCount; ++i) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001235 s[i] = hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), c[i]));
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001236 }
1237
1238 // TODO: wide characters
1239
1240 // Matrix of expectations. First component is first parameter. Note we only check against the
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -07001241 // sign, not the value. As we are testing random offsets, we need to compute this and need to
1242 // rely on String::CompareTo being correct.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001243 int32_t expected[kStringCount][kStringCount];
1244 for (size_t x = 0; x < kStringCount; ++x) {
1245 for (size_t y = 0; y < kStringCount; ++y) {
1246 expected[x][y] = s[x]->CompareTo(s[y].Get());
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -07001247 }
1248 }
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001249
1250 // Play with it...
1251
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001252 for (size_t x = 0; x < kStringCount; ++x) {
1253 for (size_t y = 0; y < kStringCount; ++y) {
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001254 // Test string_compareto x y
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001255 size_t result = Invoke3(reinterpret_cast<size_t>(s[x].Get()),
1256 reinterpret_cast<size_t>(s[y].Get()), 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001257 art_quick_string_compareto, self);
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001258
1259 EXPECT_FALSE(self->IsExceptionPending());
1260
1261 // The result is a 32b signed integer
1262 union {
1263 size_t r;
1264 int32_t i;
1265 } conv;
1266 conv.r = result;
1267 int32_t e = expected[x][y];
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -07001268 EXPECT_TRUE(e == 0 ? conv.i == 0 : true) << "x=" << c[x] << " y=" << c[y] << " res=" <<
1269 conv.r;
1270 EXPECT_TRUE(e < 0 ? conv.i < 0 : true) << "x=" << c[x] << " y=" << c[y] << " res=" <<
1271 conv.r;
1272 EXPECT_TRUE(e > 0 ? conv.i > 0 : true) << "x=" << c[x] << " y=" << c[y] << " res=" <<
1273 conv.r;
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001274 }
1275 }
1276
Andreas Gampe7177d7c2014-05-02 12:10:02 -07001277 // TODO: Deallocate things.
1278
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001279 // Tests done.
1280#else
1281 LOG(INFO) << "Skipping string_compareto as I don't know how to do that on " << kRuntimeISA;
1282 // Force-print to std::cout so it's also outside the logcat.
1283 std::cout << "Skipping string_compareto as I don't know how to do that on " << kRuntimeISA <<
1284 std::endl;
1285#endif
1286}
1287
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001288
Mathieu Chartierc7853442015-03-27 14:35:38 -07001289static void GetSetBooleanStatic(ArtField* f, Thread* self,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001290 ArtMethod* referrer, StubTest* test)
Mathieu Chartier90443472015-07-16 20:32:27 -07001291 SHARED_REQUIRES(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001292#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1293 (defined(__x86_64__) && !defined(__APPLE__))
Fred Shih37f05ef2014-07-16 18:38:08 -07001294 constexpr size_t num_values = 5;
1295 uint8_t values[num_values] = { 0, 1, 2, 128, 0xFF };
1296
1297 for (size_t i = 0; i < num_values; ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001298 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001299 static_cast<size_t>(values[i]),
1300 0U,
1301 StubTest::GetEntrypoint(self, kQuickSet8Static),
1302 self,
1303 referrer);
1304
Mathieu Chartierc7853442015-03-27 14:35:38 -07001305 size_t res = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001306 0U, 0U,
1307 StubTest::GetEntrypoint(self, kQuickGetBooleanStatic),
1308 self,
1309 referrer);
1310 // Boolean currently stores bools as uint8_t, be more zealous about asserting correct writes/gets.
1311 EXPECT_EQ(values[i], static_cast<uint8_t>(res)) << "Iteration " << i;
1312 }
1313#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001314 UNUSED(f, self, referrer, test);
Fred Shih37f05ef2014-07-16 18:38:08 -07001315 LOG(INFO) << "Skipping set_boolean_static as I don't know how to do that on " << kRuntimeISA;
1316 // Force-print to std::cout so it's also outside the logcat.
1317 std::cout << "Skipping set_boolean_static as I don't know how to do that on " << kRuntimeISA << std::endl;
1318#endif
1319}
Mathieu Chartiere401d142015-04-22 13:56:20 -07001320static void GetSetByteStatic(ArtField* f, Thread* self, ArtMethod* referrer,
Mathieu Chartierc7853442015-03-27 14:35:38 -07001321 StubTest* test)
Mathieu Chartier90443472015-07-16 20:32:27 -07001322 SHARED_REQUIRES(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001323#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1324 (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001325 int8_t values[] = { -128, -64, 0, 64, 127 };
Fred Shih37f05ef2014-07-16 18:38:08 -07001326
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001327 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001328 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001329 static_cast<size_t>(values[i]),
1330 0U,
1331 StubTest::GetEntrypoint(self, kQuickSet8Static),
1332 self,
1333 referrer);
1334
Mathieu Chartierc7853442015-03-27 14:35:38 -07001335 size_t res = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001336 0U, 0U,
1337 StubTest::GetEntrypoint(self, kQuickGetByteStatic),
1338 self,
1339 referrer);
1340 EXPECT_EQ(values[i], static_cast<int8_t>(res)) << "Iteration " << i;
1341 }
1342#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001343 UNUSED(f, self, referrer, test);
Fred Shih37f05ef2014-07-16 18:38:08 -07001344 LOG(INFO) << "Skipping set_byte_static as I don't know how to do that on " << kRuntimeISA;
1345 // Force-print to std::cout so it's also outside the logcat.
1346 std::cout << "Skipping set_byte_static as I don't know how to do that on " << kRuntimeISA << std::endl;
1347#endif
1348}
1349
1350
Mathieu Chartierc7853442015-03-27 14:35:38 -07001351static void GetSetBooleanInstance(Handle<mirror::Object>* obj, ArtField* f, Thread* self,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001352 ArtMethod* referrer, StubTest* test)
Mathieu Chartier90443472015-07-16 20:32:27 -07001353 SHARED_REQUIRES(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001354#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1355 (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001356 uint8_t values[] = { 0, true, 2, 128, 0xFF };
Fred Shih37f05ef2014-07-16 18:38:08 -07001357
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001358 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001359 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001360 reinterpret_cast<size_t>(obj->Get()),
1361 static_cast<size_t>(values[i]),
1362 StubTest::GetEntrypoint(self, kQuickSet8Instance),
1363 self,
1364 referrer);
1365
Mathieu Chartierc7853442015-03-27 14:35:38 -07001366 uint8_t res = f->GetBoolean(obj->Get());
Fred Shih37f05ef2014-07-16 18:38:08 -07001367 EXPECT_EQ(values[i], res) << "Iteration " << i;
1368
Mathieu Chartierc7853442015-03-27 14:35:38 -07001369 f->SetBoolean<false>(obj->Get(), res);
Fred Shih37f05ef2014-07-16 18:38:08 -07001370
Mathieu Chartierc7853442015-03-27 14:35:38 -07001371 size_t res2 = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001372 reinterpret_cast<size_t>(obj->Get()),
1373 0U,
1374 StubTest::GetEntrypoint(self, kQuickGetBooleanInstance),
1375 self,
1376 referrer);
1377 EXPECT_EQ(res, static_cast<uint8_t>(res2));
1378 }
1379#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001380 UNUSED(obj, f, self, referrer, test);
Fred Shih37f05ef2014-07-16 18:38:08 -07001381 LOG(INFO) << "Skipping set_boolean_instance as I don't know how to do that on " << kRuntimeISA;
1382 // Force-print to std::cout so it's also outside the logcat.
1383 std::cout << "Skipping set_boolean_instance as I don't know how to do that on " << kRuntimeISA << std::endl;
1384#endif
1385}
Mathieu Chartierc7853442015-03-27 14:35:38 -07001386static void GetSetByteInstance(Handle<mirror::Object>* obj, ArtField* f,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001387 Thread* self, ArtMethod* referrer, StubTest* test)
Mathieu Chartier90443472015-07-16 20:32:27 -07001388 SHARED_REQUIRES(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001389#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1390 (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001391 int8_t values[] = { -128, -64, 0, 64, 127 };
Fred Shih37f05ef2014-07-16 18:38:08 -07001392
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001393 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001394 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001395 reinterpret_cast<size_t>(obj->Get()),
1396 static_cast<size_t>(values[i]),
1397 StubTest::GetEntrypoint(self, kQuickSet8Instance),
1398 self,
1399 referrer);
1400
Mathieu Chartierc7853442015-03-27 14:35:38 -07001401 int8_t res = f->GetByte(obj->Get());
Fred Shih37f05ef2014-07-16 18:38:08 -07001402 EXPECT_EQ(res, values[i]) << "Iteration " << i;
Mathieu Chartierc7853442015-03-27 14:35:38 -07001403 f->SetByte<false>(obj->Get(), ++res);
Fred Shih37f05ef2014-07-16 18:38:08 -07001404
Mathieu Chartierc7853442015-03-27 14:35:38 -07001405 size_t res2 = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001406 reinterpret_cast<size_t>(obj->Get()),
1407 0U,
1408 StubTest::GetEntrypoint(self, kQuickGetByteInstance),
1409 self,
1410 referrer);
1411 EXPECT_EQ(res, static_cast<int8_t>(res2));
1412 }
1413#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001414 UNUSED(obj, f, self, referrer, test);
Fred Shih37f05ef2014-07-16 18:38:08 -07001415 LOG(INFO) << "Skipping set_byte_instance as I don't know how to do that on " << kRuntimeISA;
1416 // Force-print to std::cout so it's also outside the logcat.
1417 std::cout << "Skipping set_byte_instance as I don't know how to do that on " << kRuntimeISA << std::endl;
1418#endif
1419}
1420
Mathieu Chartiere401d142015-04-22 13:56:20 -07001421static void GetSetCharStatic(ArtField* f, Thread* self, ArtMethod* referrer,
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001422 StubTest* test)
Mathieu Chartier90443472015-07-16 20:32:27 -07001423 SHARED_REQUIRES(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001424#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1425 (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001426 uint16_t values[] = { 0, 1, 2, 255, 32768, 0xFFFF };
Fred Shih37f05ef2014-07-16 18:38:08 -07001427
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001428 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001429 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001430 static_cast<size_t>(values[i]),
1431 0U,
1432 StubTest::GetEntrypoint(self, kQuickSet16Static),
1433 self,
1434 referrer);
1435
Mathieu Chartierc7853442015-03-27 14:35:38 -07001436 size_t res = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001437 0U, 0U,
1438 StubTest::GetEntrypoint(self, kQuickGetCharStatic),
1439 self,
1440 referrer);
1441
1442 EXPECT_EQ(values[i], static_cast<uint16_t>(res)) << "Iteration " << i;
1443 }
1444#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001445 UNUSED(f, self, referrer, test);
Fred Shih37f05ef2014-07-16 18:38:08 -07001446 LOG(INFO) << "Skipping set_char_static as I don't know how to do that on " << kRuntimeISA;
1447 // Force-print to std::cout so it's also outside the logcat.
1448 std::cout << "Skipping set_char_static as I don't know how to do that on " << kRuntimeISA << std::endl;
1449#endif
1450}
Mathieu Chartierc7853442015-03-27 14:35:38 -07001451static void GetSetShortStatic(ArtField* f, Thread* self,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001452 ArtMethod* referrer, StubTest* test)
Mathieu Chartier90443472015-07-16 20:32:27 -07001453 SHARED_REQUIRES(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001454#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1455 (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001456 int16_t values[] = { -0x7FFF, -32768, 0, 255, 32767, 0x7FFE };
Fred Shih37f05ef2014-07-16 18:38:08 -07001457
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001458 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001459 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001460 static_cast<size_t>(values[i]),
1461 0U,
1462 StubTest::GetEntrypoint(self, kQuickSet16Static),
1463 self,
1464 referrer);
1465
Mathieu Chartierc7853442015-03-27 14:35:38 -07001466 size_t res = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001467 0U, 0U,
1468 StubTest::GetEntrypoint(self, kQuickGetShortStatic),
1469 self,
1470 referrer);
1471
1472 EXPECT_EQ(static_cast<int16_t>(res), values[i]) << "Iteration " << i;
1473 }
1474#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001475 UNUSED(f, self, referrer, test);
Fred Shih37f05ef2014-07-16 18:38:08 -07001476 LOG(INFO) << "Skipping set_short_static as I don't know how to do that on " << kRuntimeISA;
1477 // Force-print to std::cout so it's also outside the logcat.
1478 std::cout << "Skipping set_short_static as I don't know how to do that on " << kRuntimeISA << std::endl;
1479#endif
1480}
1481
Mathieu Chartierc7853442015-03-27 14:35:38 -07001482static void GetSetCharInstance(Handle<mirror::Object>* obj, ArtField* f,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001483 Thread* self, ArtMethod* referrer, StubTest* test)
Mathieu Chartier90443472015-07-16 20:32:27 -07001484 SHARED_REQUIRES(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001485#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1486 (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001487 uint16_t values[] = { 0, 1, 2, 255, 32768, 0xFFFF };
Fred Shih37f05ef2014-07-16 18:38:08 -07001488
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001489 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001490 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001491 reinterpret_cast<size_t>(obj->Get()),
1492 static_cast<size_t>(values[i]),
1493 StubTest::GetEntrypoint(self, kQuickSet16Instance),
1494 self,
1495 referrer);
1496
Mathieu Chartierc7853442015-03-27 14:35:38 -07001497 uint16_t res = f->GetChar(obj->Get());
Fred Shih37f05ef2014-07-16 18:38:08 -07001498 EXPECT_EQ(res, values[i]) << "Iteration " << i;
Mathieu Chartierc7853442015-03-27 14:35:38 -07001499 f->SetChar<false>(obj->Get(), ++res);
Fred Shih37f05ef2014-07-16 18:38:08 -07001500
Mathieu Chartierc7853442015-03-27 14:35:38 -07001501 size_t res2 = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001502 reinterpret_cast<size_t>(obj->Get()),
1503 0U,
1504 StubTest::GetEntrypoint(self, kQuickGetCharInstance),
1505 self,
1506 referrer);
1507 EXPECT_EQ(res, static_cast<uint16_t>(res2));
1508 }
1509#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001510 UNUSED(obj, f, self, referrer, test);
Fred Shih37f05ef2014-07-16 18:38:08 -07001511 LOG(INFO) << "Skipping set_char_instance as I don't know how to do that on " << kRuntimeISA;
1512 // Force-print to std::cout so it's also outside the logcat.
1513 std::cout << "Skipping set_char_instance as I don't know how to do that on " << kRuntimeISA << std::endl;
1514#endif
1515}
Mathieu Chartierc7853442015-03-27 14:35:38 -07001516static void GetSetShortInstance(Handle<mirror::Object>* obj, ArtField* f,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001517 Thread* self, ArtMethod* referrer, StubTest* test)
Mathieu Chartier90443472015-07-16 20:32:27 -07001518 SHARED_REQUIRES(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001519#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1520 (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001521 int16_t values[] = { -0x7FFF, -32768, 0, 255, 32767, 0x7FFE };
Fred Shih37f05ef2014-07-16 18:38:08 -07001522
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001523 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001524 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001525 reinterpret_cast<size_t>(obj->Get()),
1526 static_cast<size_t>(values[i]),
1527 StubTest::GetEntrypoint(self, kQuickSet16Instance),
1528 self,
1529 referrer);
1530
Mathieu Chartierc7853442015-03-27 14:35:38 -07001531 int16_t res = f->GetShort(obj->Get());
Fred Shih37f05ef2014-07-16 18:38:08 -07001532 EXPECT_EQ(res, values[i]) << "Iteration " << i;
Mathieu Chartierc7853442015-03-27 14:35:38 -07001533 f->SetShort<false>(obj->Get(), ++res);
Fred Shih37f05ef2014-07-16 18:38:08 -07001534
Mathieu Chartierc7853442015-03-27 14:35:38 -07001535 size_t res2 = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001536 reinterpret_cast<size_t>(obj->Get()),
1537 0U,
1538 StubTest::GetEntrypoint(self, kQuickGetShortInstance),
1539 self,
1540 referrer);
1541 EXPECT_EQ(res, static_cast<int16_t>(res2));
1542 }
1543#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001544 UNUSED(obj, f, self, referrer, test);
Fred Shih37f05ef2014-07-16 18:38:08 -07001545 LOG(INFO) << "Skipping set_short_instance as I don't know how to do that on " << kRuntimeISA;
1546 // Force-print to std::cout so it's also outside the logcat.
1547 std::cout << "Skipping set_short_instance as I don't know how to do that on " << kRuntimeISA << std::endl;
1548#endif
1549}
1550
Mathieu Chartiere401d142015-04-22 13:56:20 -07001551static void GetSet32Static(ArtField* f, Thread* self, ArtMethod* referrer,
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001552 StubTest* test)
Mathieu Chartier90443472015-07-16 20:32:27 -07001553 SHARED_REQUIRES(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001554#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1555 (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001556 uint32_t values[] = { 0, 1, 2, 255, 32768, 1000000, 0xFFFFFFFF };
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001557
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001558 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001559 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001560 static_cast<size_t>(values[i]),
1561 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001562 StubTest::GetEntrypoint(self, kQuickSet32Static),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001563 self,
1564 referrer);
1565
Mathieu Chartierc7853442015-03-27 14:35:38 -07001566 size_t res = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001567 0U, 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001568 StubTest::GetEntrypoint(self, kQuickGet32Static),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001569 self,
1570 referrer);
1571
Goran Jakovljevic04568812015-04-23 15:27:23 +02001572#if defined(__mips__) && defined(__LP64__)
1573 EXPECT_EQ(static_cast<uint32_t>(res), values[i]) << "Iteration " << i;
1574#else
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001575 EXPECT_EQ(res, values[i]) << "Iteration " << i;
Goran Jakovljevic04568812015-04-23 15:27:23 +02001576#endif
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001577 }
1578#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001579 UNUSED(f, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001580 LOG(INFO) << "Skipping set32static as I don't know how to do that on " << kRuntimeISA;
1581 // Force-print to std::cout so it's also outside the logcat.
1582 std::cout << "Skipping set32static as I don't know how to do that on " << kRuntimeISA << std::endl;
1583#endif
1584}
1585
1586
Mathieu Chartierc7853442015-03-27 14:35:38 -07001587static void GetSet32Instance(Handle<mirror::Object>* obj, ArtField* f,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001588 Thread* self, ArtMethod* referrer, StubTest* test)
Mathieu Chartier90443472015-07-16 20:32:27 -07001589 SHARED_REQUIRES(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001590#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1591 (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001592 uint32_t values[] = { 0, 1, 2, 255, 32768, 1000000, 0xFFFFFFFF };
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001593
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001594 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001595 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001596 reinterpret_cast<size_t>(obj->Get()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001597 static_cast<size_t>(values[i]),
Andreas Gampe29b38412014-08-13 00:15:43 -07001598 StubTest::GetEntrypoint(self, kQuickSet32Instance),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001599 self,
1600 referrer);
1601
Mathieu Chartierc7853442015-03-27 14:35:38 -07001602 int32_t res = f->GetInt(obj->Get());
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001603 EXPECT_EQ(res, static_cast<int32_t>(values[i])) << "Iteration " << i;
1604
1605 res++;
Mathieu Chartierc7853442015-03-27 14:35:38 -07001606 f->SetInt<false>(obj->Get(), res);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001607
Mathieu Chartierc7853442015-03-27 14:35:38 -07001608 size_t res2 = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001609 reinterpret_cast<size_t>(obj->Get()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001610 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001611 StubTest::GetEntrypoint(self, kQuickGet32Instance),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001612 self,
1613 referrer);
1614 EXPECT_EQ(res, static_cast<int32_t>(res2));
1615 }
1616#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001617 UNUSED(obj, f, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001618 LOG(INFO) << "Skipping set32instance as I don't know how to do that on " << kRuntimeISA;
1619 // Force-print to std::cout so it's also outside the logcat.
1620 std::cout << "Skipping set32instance as I don't know how to do that on " << kRuntimeISA << std::endl;
1621#endif
1622}
1623
1624
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001625#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1626 (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001627
1628static void set_and_check_static(uint32_t f_idx, mirror::Object* val, Thread* self,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001629 ArtMethod* referrer, StubTest* test)
Mathieu Chartier90443472015-07-16 20:32:27 -07001630 SHARED_REQUIRES(Locks::mutator_lock_) {
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001631 test->Invoke3WithReferrer(static_cast<size_t>(f_idx),
1632 reinterpret_cast<size_t>(val),
1633 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001634 StubTest::GetEntrypoint(self, kQuickSetObjStatic),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001635 self,
1636 referrer);
1637
1638 size_t res = test->Invoke3WithReferrer(static_cast<size_t>(f_idx),
1639 0U, 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001640 StubTest::GetEntrypoint(self, kQuickGetObjStatic),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001641 self,
1642 referrer);
1643
1644 EXPECT_EQ(res, reinterpret_cast<size_t>(val)) << "Value " << val;
1645}
1646#endif
1647
Mathieu Chartiere401d142015-04-22 13:56:20 -07001648static void GetSetObjStatic(ArtField* f, Thread* self, ArtMethod* referrer,
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001649 StubTest* test)
Mathieu Chartier90443472015-07-16 20:32:27 -07001650 SHARED_REQUIRES(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001651#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1652 (defined(__x86_64__) && !defined(__APPLE__))
Mathieu Chartierc7853442015-03-27 14:35:38 -07001653 set_and_check_static(f->GetDexFieldIndex(), nullptr, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001654
1655 // Allocate a string object for simplicity.
1656 mirror::String* str = mirror::String::AllocFromModifiedUtf8(self, "Test");
Mathieu Chartierc7853442015-03-27 14:35:38 -07001657 set_and_check_static(f->GetDexFieldIndex(), str, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001658
Mathieu Chartierc7853442015-03-27 14:35:38 -07001659 set_and_check_static(f->GetDexFieldIndex(), nullptr, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001660#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001661 UNUSED(f, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001662 LOG(INFO) << "Skipping setObjstatic as I don't know how to do that on " << kRuntimeISA;
1663 // Force-print to std::cout so it's also outside the logcat.
1664 std::cout << "Skipping setObjstatic as I don't know how to do that on " << kRuntimeISA << std::endl;
1665#endif
1666}
1667
1668
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001669#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1670 (defined(__x86_64__) && !defined(__APPLE__))
Mathieu Chartierc7853442015-03-27 14:35:38 -07001671static void set_and_check_instance(ArtField* f, mirror::Object* trg,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001672 mirror::Object* val, Thread* self, ArtMethod* referrer,
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001673 StubTest* test)
Mathieu Chartier90443472015-07-16 20:32:27 -07001674 SHARED_REQUIRES(Locks::mutator_lock_) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001675 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001676 reinterpret_cast<size_t>(trg),
1677 reinterpret_cast<size_t>(val),
Andreas Gampe29b38412014-08-13 00:15:43 -07001678 StubTest::GetEntrypoint(self, kQuickSetObjInstance),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001679 self,
1680 referrer);
1681
Mathieu Chartierc7853442015-03-27 14:35:38 -07001682 size_t res = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001683 reinterpret_cast<size_t>(trg),
1684 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001685 StubTest::GetEntrypoint(self, kQuickGetObjInstance),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001686 self,
1687 referrer);
1688
1689 EXPECT_EQ(res, reinterpret_cast<size_t>(val)) << "Value " << val;
1690
Mathieu Chartierc7853442015-03-27 14:35:38 -07001691 EXPECT_EQ(val, f->GetObj(trg));
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001692}
1693#endif
1694
Mathieu Chartierc7853442015-03-27 14:35:38 -07001695static void GetSetObjInstance(Handle<mirror::Object>* obj, ArtField* f,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001696 Thread* self, ArtMethod* referrer, StubTest* test)
Mathieu Chartier90443472015-07-16 20:32:27 -07001697 SHARED_REQUIRES(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001698#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1699 (defined(__x86_64__) && !defined(__APPLE__))
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001700 set_and_check_instance(f, obj->Get(), nullptr, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001701
1702 // Allocate a string object for simplicity.
1703 mirror::String* str = mirror::String::AllocFromModifiedUtf8(self, "Test");
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001704 set_and_check_instance(f, obj->Get(), str, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001705
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001706 set_and_check_instance(f, obj->Get(), nullptr, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001707#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001708 UNUSED(obj, f, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001709 LOG(INFO) << "Skipping setObjinstance as I don't know how to do that on " << kRuntimeISA;
1710 // Force-print to std::cout so it's also outside the logcat.
1711 std::cout << "Skipping setObjinstance as I don't know how to do that on " << kRuntimeISA << std::endl;
1712#endif
1713}
1714
1715
Calin Juravle872ab3f2015-10-02 07:27:51 +01001716// TODO: Complete these tests for 32b architectures
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001717
Mathieu Chartiere401d142015-04-22 13:56:20 -07001718static void GetSet64Static(ArtField* f, Thread* self, ArtMethod* referrer,
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001719 StubTest* test)
Mathieu Chartier90443472015-07-16 20:32:27 -07001720 SHARED_REQUIRES(Locks::mutator_lock_) {
Calin Juravle6e399ac2015-10-02 23:56:06 +01001721#if (defined(__x86_64__) && !defined(__APPLE__)) || (defined(__mips__) && defined(__LP64__)) \
1722 || defined(__aarch64__)
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001723 uint64_t values[] = { 0, 1, 2, 255, 32768, 1000000, 0xFFFFFFFF, 0xFFFFFFFFFFFF };
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001724
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001725 for (size_t i = 0; i < arraysize(values); ++i) {
Calin Juravle6e399ac2015-10-02 23:56:06 +01001726 // 64 bit FieldSet stores the set value in the second register.
1727 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Calin Juravle24cc1b32015-10-06 11:46:58 +01001728 0U,
1729 values[i],
1730 StubTest::GetEntrypoint(self, kQuickSet64Static),
1731 self,
1732 referrer);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001733
Mathieu Chartierc7853442015-03-27 14:35:38 -07001734 size_t res = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001735 0U, 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001736 StubTest::GetEntrypoint(self, kQuickGet64Static),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001737 self,
1738 referrer);
1739
1740 EXPECT_EQ(res, values[i]) << "Iteration " << i;
1741 }
1742#else
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001743 UNUSED(f, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001744 LOG(INFO) << "Skipping set64static as I don't know how to do that on " << kRuntimeISA;
1745 // Force-print to std::cout so it's also outside the logcat.
1746 std::cout << "Skipping set64static as I don't know how to do that on " << kRuntimeISA << std::endl;
1747#endif
1748}
1749
1750
Mathieu Chartierc7853442015-03-27 14:35:38 -07001751static void GetSet64Instance(Handle<mirror::Object>* obj, ArtField* f,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001752 Thread* self, ArtMethod* referrer, StubTest* test)
Mathieu Chartier90443472015-07-16 20:32:27 -07001753 SHARED_REQUIRES(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001754#if (defined(__x86_64__) && !defined(__APPLE__)) || (defined(__mips__) && defined(__LP64__)) || \
1755 defined(__aarch64__)
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001756 uint64_t values[] = { 0, 1, 2, 255, 32768, 1000000, 0xFFFFFFFF, 0xFFFFFFFFFFFF };
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001757
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001758 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001759 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001760 reinterpret_cast<size_t>(obj->Get()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001761 static_cast<size_t>(values[i]),
Andreas Gampe29b38412014-08-13 00:15:43 -07001762 StubTest::GetEntrypoint(self, kQuickSet64Instance),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001763 self,
1764 referrer);
1765
Mathieu Chartierc7853442015-03-27 14:35:38 -07001766 int64_t res = f->GetLong(obj->Get());
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001767 EXPECT_EQ(res, static_cast<int64_t>(values[i])) << "Iteration " << i;
1768
1769 res++;
Mathieu Chartierc7853442015-03-27 14:35:38 -07001770 f->SetLong<false>(obj->Get(), res);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001771
Mathieu Chartierc7853442015-03-27 14:35:38 -07001772 size_t res2 = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001773 reinterpret_cast<size_t>(obj->Get()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001774 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001775 StubTest::GetEntrypoint(self, kQuickGet64Instance),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001776 self,
1777 referrer);
1778 EXPECT_EQ(res, static_cast<int64_t>(res2));
1779 }
1780#else
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001781 UNUSED(obj, f, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001782 LOG(INFO) << "Skipping set64instance as I don't know how to do that on " << kRuntimeISA;
1783 // Force-print to std::cout so it's also outside the logcat.
1784 std::cout << "Skipping set64instance as I don't know how to do that on " << kRuntimeISA << std::endl;
1785#endif
1786}
1787
1788static void TestFields(Thread* self, StubTest* test, Primitive::Type test_type) {
1789 // garbage is created during ClassLinker::Init
1790
1791 JNIEnv* env = Thread::Current()->GetJniEnv();
1792 jclass jc = env->FindClass("AllFields");
Mathieu Chartier2cebb242015-04-21 16:50:40 -07001793 CHECK(jc != nullptr);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001794 jobject o = env->AllocObject(jc);
Mathieu Chartier2cebb242015-04-21 16:50:40 -07001795 CHECK(o != nullptr);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001796
1797 ScopedObjectAccess soa(self);
Mathieu Chartiere401d142015-04-22 13:56:20 -07001798 StackHandleScope<3> hs(self);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001799 Handle<mirror::Object> obj(hs.NewHandle(soa.Decode<mirror::Object*>(o)));
1800 Handle<mirror::Class> c(hs.NewHandle(obj->GetClass()));
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001801 // Need a method as a referrer
Mathieu Chartiere401d142015-04-22 13:56:20 -07001802 ArtMethod* m = c->GetDirectMethod(0, sizeof(void*));
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001803
1804 // Play with it...
1805
1806 // Static fields.
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001807 for (ArtField& f : c->GetSFields()) {
1808 Primitive::Type type = f.GetTypeAsPrimitiveType();
Mathieu Chartierc7853442015-03-27 14:35:38 -07001809 if (test_type != type) {
1810 continue;
1811 }
1812 switch (type) {
1813 case Primitive::Type::kPrimBoolean:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001814 GetSetBooleanStatic(&f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001815 break;
1816 case Primitive::Type::kPrimByte:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001817 GetSetByteStatic(&f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001818 break;
1819 case Primitive::Type::kPrimChar:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001820 GetSetCharStatic(&f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001821 break;
1822 case Primitive::Type::kPrimShort:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001823 GetSetShortStatic(&f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001824 break;
1825 case Primitive::Type::kPrimInt:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001826 GetSet32Static(&f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001827 break;
1828 case Primitive::Type::kPrimLong:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001829 GetSet64Static(&f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001830 break;
1831 case Primitive::Type::kPrimNot:
1832 // Don't try array.
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001833 if (f.GetTypeDescriptor()[0] != '[') {
1834 GetSetObjStatic(&f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001835 }
1836 break;
1837 default:
1838 break; // Skip.
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001839 }
1840 }
1841
1842 // Instance fields.
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001843 for (ArtField& f : c->GetIFields()) {
1844 Primitive::Type type = f.GetTypeAsPrimitiveType();
Mathieu Chartierc7853442015-03-27 14:35:38 -07001845 if (test_type != type) {
1846 continue;
1847 }
1848 switch (type) {
1849 case Primitive::Type::kPrimBoolean:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001850 GetSetBooleanInstance(&obj, &f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001851 break;
1852 case Primitive::Type::kPrimByte:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001853 GetSetByteInstance(&obj, &f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001854 break;
1855 case Primitive::Type::kPrimChar:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001856 GetSetCharInstance(&obj, &f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001857 break;
1858 case Primitive::Type::kPrimShort:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001859 GetSetShortInstance(&obj, &f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001860 break;
1861 case Primitive::Type::kPrimInt:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001862 GetSet32Instance(&obj, &f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001863 break;
1864 case Primitive::Type::kPrimLong:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001865 GetSet64Instance(&obj, &f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001866 break;
1867 case Primitive::Type::kPrimNot:
1868 // Don't try array.
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001869 if (f.GetTypeDescriptor()[0] != '[') {
1870 GetSetObjInstance(&obj, &f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001871 }
1872 break;
1873 default:
1874 break; // Skip.
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001875 }
1876 }
1877
1878 // TODO: Deallocate things.
1879}
1880
Fred Shih37f05ef2014-07-16 18:38:08 -07001881TEST_F(StubTest, Fields8) {
Fred Shih37f05ef2014-07-16 18:38:08 -07001882 Thread* self = Thread::Current();
1883
1884 self->TransitionFromSuspendedToRunnable();
1885 LoadDex("AllFields");
1886 bool started = runtime_->Start();
1887 CHECK(started);
1888
1889 TestFields(self, this, Primitive::Type::kPrimBoolean);
1890 TestFields(self, this, Primitive::Type::kPrimByte);
1891}
1892
1893TEST_F(StubTest, Fields16) {
Fred Shih37f05ef2014-07-16 18:38:08 -07001894 Thread* self = Thread::Current();
1895
1896 self->TransitionFromSuspendedToRunnable();
1897 LoadDex("AllFields");
1898 bool started = runtime_->Start();
1899 CHECK(started);
1900
1901 TestFields(self, this, Primitive::Type::kPrimChar);
1902 TestFields(self, this, Primitive::Type::kPrimShort);
1903}
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001904
1905TEST_F(StubTest, Fields32) {
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001906 Thread* self = Thread::Current();
1907
1908 self->TransitionFromSuspendedToRunnable();
1909 LoadDex("AllFields");
1910 bool started = runtime_->Start();
1911 CHECK(started);
1912
1913 TestFields(self, this, Primitive::Type::kPrimInt);
1914}
1915
1916TEST_F(StubTest, FieldsObj) {
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001917 Thread* self = Thread::Current();
1918
1919 self->TransitionFromSuspendedToRunnable();
1920 LoadDex("AllFields");
1921 bool started = runtime_->Start();
1922 CHECK(started);
1923
1924 TestFields(self, this, Primitive::Type::kPrimNot);
1925}
1926
1927TEST_F(StubTest, Fields64) {
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001928 Thread* self = Thread::Current();
1929
1930 self->TransitionFromSuspendedToRunnable();
1931 LoadDex("AllFields");
1932 bool started = runtime_->Start();
1933 CHECK(started);
1934
1935 TestFields(self, this, Primitive::Type::kPrimLong);
1936}
1937
Vladimir Marko9d07e3d2016-03-31 12:02:28 +01001938// Disabled, b/27991555 .
1939// FIXME: Hacking the entry point to point to art_quick_to_interpreter_bridge is broken.
1940// The bridge calls through to GetCalleeSaveMethodCaller() which looks up the pre-header
1941// and gets a bogus OatQuickMethodHeader* pointing into our assembly code just before
1942// the bridge and uses that to check for inlined frames, crashing in the process.
1943TEST_F(StubTest, DISABLED_IMT) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001944#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1945 (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe51f76352014-05-21 08:28:48 -07001946 Thread* self = Thread::Current();
1947
1948 ScopedObjectAccess soa(self);
1949 StackHandleScope<7> hs(self);
1950
1951 JNIEnv* env = Thread::Current()->GetJniEnv();
1952
1953 // ArrayList
1954
1955 // Load ArrayList and used methods (JNI).
1956 jclass arraylist_jclass = env->FindClass("java/util/ArrayList");
1957 ASSERT_NE(nullptr, arraylist_jclass);
1958 jmethodID arraylist_constructor = env->GetMethodID(arraylist_jclass, "<init>", "()V");
1959 ASSERT_NE(nullptr, arraylist_constructor);
Mathieu Chartiere401d142015-04-22 13:56:20 -07001960 jmethodID contains_jmethod = env->GetMethodID(
1961 arraylist_jclass, "contains", "(Ljava/lang/Object;)Z");
Andreas Gampe51f76352014-05-21 08:28:48 -07001962 ASSERT_NE(nullptr, contains_jmethod);
1963 jmethodID add_jmethod = env->GetMethodID(arraylist_jclass, "add", "(Ljava/lang/Object;)Z");
1964 ASSERT_NE(nullptr, add_jmethod);
1965
Mathieu Chartiere401d142015-04-22 13:56:20 -07001966 // Get representation.
1967 ArtMethod* contains_amethod = soa.DecodeMethod(contains_jmethod);
Andreas Gampe51f76352014-05-21 08:28:48 -07001968
1969 // Patch up ArrayList.contains.
Mathieu Chartiere401d142015-04-22 13:56:20 -07001970 if (contains_amethod->GetEntryPointFromQuickCompiledCode() == nullptr) {
1971 contains_amethod->SetEntryPointFromQuickCompiledCode(reinterpret_cast<void*>(
Andreas Gampe29b38412014-08-13 00:15:43 -07001972 StubTest::GetEntrypoint(self, kQuickQuickToInterpreterBridge)));
Andreas Gampe51f76352014-05-21 08:28:48 -07001973 }
1974
1975 // List
1976
1977 // Load List and used methods (JNI).
1978 jclass list_jclass = env->FindClass("java/util/List");
1979 ASSERT_NE(nullptr, list_jclass);
Mathieu Chartiere401d142015-04-22 13:56:20 -07001980 jmethodID inf_contains_jmethod = env->GetMethodID(
1981 list_jclass, "contains", "(Ljava/lang/Object;)Z");
Andreas Gampe51f76352014-05-21 08:28:48 -07001982 ASSERT_NE(nullptr, inf_contains_jmethod);
1983
1984 // Get mirror representation.
Mathieu Chartiere401d142015-04-22 13:56:20 -07001985 ArtMethod* inf_contains = soa.DecodeMethod(inf_contains_jmethod);
Andreas Gampe51f76352014-05-21 08:28:48 -07001986
1987 // Object
1988
1989 jclass obj_jclass = env->FindClass("java/lang/Object");
1990 ASSERT_NE(nullptr, obj_jclass);
1991 jmethodID obj_constructor = env->GetMethodID(obj_jclass, "<init>", "()V");
1992 ASSERT_NE(nullptr, obj_constructor);
1993
Andreas Gampe51f76352014-05-21 08:28:48 -07001994 // Create instances.
1995
1996 jobject jarray_list = env->NewObject(arraylist_jclass, arraylist_constructor);
1997 ASSERT_NE(nullptr, jarray_list);
1998 Handle<mirror::Object> array_list(hs.NewHandle(soa.Decode<mirror::Object*>(jarray_list)));
1999
2000 jobject jobj = env->NewObject(obj_jclass, obj_constructor);
2001 ASSERT_NE(nullptr, jobj);
2002 Handle<mirror::Object> obj(hs.NewHandle(soa.Decode<mirror::Object*>(jobj)));
2003
Andreas Gampe1a7e2922014-05-21 15:37:53 -07002004 // Invocation tests.
2005
2006 // 1. imt_conflict
2007
2008 // Contains.
Nicolas Geoffray1004faa2016-03-23 14:28:30 +00002009
2010 // We construct the ImtConflictTable ourselves, as we cannot go into the runtime stub
2011 // that will create it: the runtime stub expects to be called by compiled code.
Nicolas Geoffray1004faa2016-03-23 14:28:30 +00002012 LinearAlloc* linear_alloc = Runtime::Current()->GetLinearAlloc();
2013 ArtMethod* conflict_method = Runtime::Current()->CreateImtConflictMethod(linear_alloc);
Mathieu Chartiere42888f2016-04-14 10:49:19 -07002014 ImtConflictTable* empty_conflict_table =
2015 Runtime::Current()->GetClassLinker()->CreateImtConflictTable(/*count*/0u, linear_alloc);
Nicolas Geoffray1004faa2016-03-23 14:28:30 +00002016 void* data = linear_alloc->Alloc(
2017 self,
Mathieu Chartiere42888f2016-04-14 10:49:19 -07002018 ImtConflictTable::ComputeSizeWithOneMoreEntry(empty_conflict_table, sizeof(void*)));
Nicolas Geoffray1004faa2016-03-23 14:28:30 +00002019 ImtConflictTable* new_table = new (data) ImtConflictTable(
Mathieu Chartiere42888f2016-04-14 10:49:19 -07002020 empty_conflict_table, inf_contains, contains_amethod, sizeof(void*));
2021 conflict_method->SetImtConflictTable(new_table, sizeof(void*));
Nicolas Geoffray1004faa2016-03-23 14:28:30 +00002022
Andreas Gampe51f76352014-05-21 08:28:48 -07002023 size_t result =
Nicolas Geoffray1004faa2016-03-23 14:28:30 +00002024 Invoke3WithReferrerAndHidden(reinterpret_cast<size_t>(conflict_method),
2025 reinterpret_cast<size_t>(array_list.Get()),
Andreas Gampe51f76352014-05-21 08:28:48 -07002026 reinterpret_cast<size_t>(obj.Get()),
Andreas Gampe29b38412014-08-13 00:15:43 -07002027 StubTest::GetEntrypoint(self, kQuickQuickImtConflictTrampoline),
Nicolas Geoffray1004faa2016-03-23 14:28:30 +00002028 self,
2029 contains_amethod,
Mathieu Chartiere401d142015-04-22 13:56:20 -07002030 static_cast<size_t>(inf_contains->GetDexMethodIndex()));
Andreas Gampe51f76352014-05-21 08:28:48 -07002031
2032 ASSERT_FALSE(self->IsExceptionPending());
2033 EXPECT_EQ(static_cast<size_t>(JNI_FALSE), result);
2034
2035 // Add object.
2036
2037 env->CallBooleanMethod(jarray_list, add_jmethod, jobj);
2038
Nicolas Geoffray14691c52015-03-05 10:40:17 +00002039 ASSERT_FALSE(self->IsExceptionPending()) << PrettyTypeOf(self->GetException());
Andreas Gampe51f76352014-05-21 08:28:48 -07002040
Andreas Gampe1a7e2922014-05-21 15:37:53 -07002041 // Contains.
Andreas Gampe51f76352014-05-21 08:28:48 -07002042
Nicolas Geoffray1004faa2016-03-23 14:28:30 +00002043 result =
2044 Invoke3WithReferrerAndHidden(reinterpret_cast<size_t>(conflict_method),
2045 reinterpret_cast<size_t>(array_list.Get()),
2046 reinterpret_cast<size_t>(obj.Get()),
2047 StubTest::GetEntrypoint(self, kQuickQuickImtConflictTrampoline),
2048 self,
2049 contains_amethod,
2050 static_cast<size_t>(inf_contains->GetDexMethodIndex()));
Andreas Gampe51f76352014-05-21 08:28:48 -07002051
2052 ASSERT_FALSE(self->IsExceptionPending());
2053 EXPECT_EQ(static_cast<size_t>(JNI_TRUE), result);
Nicolas Geoffray1004faa2016-03-23 14:28:30 +00002054
Andreas Gampe1a7e2922014-05-21 15:37:53 -07002055 // 2. regular interface trampoline
2056
Nicolas Geoffray1004faa2016-03-23 14:28:30 +00002057 result = Invoke3WithReferrer(static_cast<size_t>(inf_contains->GetDexMethodIndex()),
2058 reinterpret_cast<size_t>(array_list.Get()),
2059 reinterpret_cast<size_t>(obj.Get()),
2060 StubTest::GetEntrypoint(self,
2061 kQuickInvokeInterfaceTrampolineWithAccessCheck),
2062 self, contains_amethod);
Andreas Gampe1a7e2922014-05-21 15:37:53 -07002063
2064 ASSERT_FALSE(self->IsExceptionPending());
Nicolas Geoffray1004faa2016-03-23 14:28:30 +00002065 EXPECT_EQ(static_cast<size_t>(JNI_TRUE), result);
Andreas Gampe1a7e2922014-05-21 15:37:53 -07002066
Mathieu Chartiere401d142015-04-22 13:56:20 -07002067 result = Invoke3WithReferrer(
2068 static_cast<size_t>(inf_contains->GetDexMethodIndex()),
2069 reinterpret_cast<size_t>(array_list.Get()), reinterpret_cast<size_t>(array_list.Get()),
2070 StubTest::GetEntrypoint(self, kQuickInvokeInterfaceTrampolineWithAccessCheck), self,
2071 contains_amethod);
Andreas Gampe1a7e2922014-05-21 15:37:53 -07002072
2073 ASSERT_FALSE(self->IsExceptionPending());
2074 EXPECT_EQ(static_cast<size_t>(JNI_FALSE), result);
Andreas Gampe51f76352014-05-21 08:28:48 -07002075#else
Andreas Gampe6aac3552014-06-09 14:55:53 -07002076 LOG(INFO) << "Skipping imt as I don't know how to do that on " << kRuntimeISA;
Andreas Gampe51f76352014-05-21 08:28:48 -07002077 // Force-print to std::cout so it's also outside the logcat.
Andreas Gampe6aac3552014-06-09 14:55:53 -07002078 std::cout << "Skipping imt as I don't know how to do that on " << kRuntimeISA << std::endl;
2079#endif
2080}
2081
Andreas Gampe6aac3552014-06-09 14:55:53 -07002082TEST_F(StubTest, StringIndexOf) {
Chris Larsencf283da2016-01-19 16:45:35 -08002083#if defined(__arm__) || defined(__aarch64__) || defined(__mips__)
Andreas Gampe6aac3552014-06-09 14:55:53 -07002084 Thread* self = Thread::Current();
2085 ScopedObjectAccess soa(self);
2086 // garbage is created during ClassLinker::Init
2087
2088 // Create some strings
2089 // Use array so we can index into it and use a matrix for expected results
2090 // Setup: The first half is standard. The second half uses a non-zero offset.
2091 // TODO: Shared backing arrays.
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07002092 const char* c_str[] = { "", "a", "ba", "cba", "dcba", "edcba", "asdfghjkl" };
2093 static constexpr size_t kStringCount = arraysize(c_str);
2094 const char c_char[] = { 'a', 'b', 'c', 'd', 'e' };
2095 static constexpr size_t kCharCount = arraysize(c_char);
Andreas Gampe6aac3552014-06-09 14:55:53 -07002096
2097 StackHandleScope<kStringCount> hs(self);
2098 Handle<mirror::String> s[kStringCount];
2099
2100 for (size_t i = 0; i < kStringCount; ++i) {
2101 s[i] = hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), c_str[i]));
2102 }
2103
2104 // Matrix of expectations. First component is first parameter. Note we only check against the
2105 // sign, not the value. As we are testing random offsets, we need to compute this and need to
2106 // rely on String::CompareTo being correct.
2107 static constexpr size_t kMaxLen = 9;
2108 DCHECK_LE(strlen(c_str[kStringCount-1]), kMaxLen) << "Please fix the indexof test.";
2109
2110 // Last dimension: start, offset by 1.
2111 int32_t expected[kStringCount][kCharCount][kMaxLen + 3];
2112 for (size_t x = 0; x < kStringCount; ++x) {
2113 for (size_t y = 0; y < kCharCount; ++y) {
2114 for (size_t z = 0; z <= kMaxLen + 2; ++z) {
2115 expected[x][y][z] = s[x]->FastIndexOf(c_char[y], static_cast<int32_t>(z) - 1);
2116 }
2117 }
2118 }
2119
2120 // Play with it...
2121
2122 for (size_t x = 0; x < kStringCount; ++x) {
2123 for (size_t y = 0; y < kCharCount; ++y) {
2124 for (size_t z = 0; z <= kMaxLen + 2; ++z) {
2125 int32_t start = static_cast<int32_t>(z) - 1;
2126
2127 // Test string_compareto x y
2128 size_t result = Invoke3(reinterpret_cast<size_t>(s[x].Get()), c_char[y], start,
Andreas Gampe29b38412014-08-13 00:15:43 -07002129 StubTest::GetEntrypoint(self, kQuickIndexOf), self);
Andreas Gampe6aac3552014-06-09 14:55:53 -07002130
2131 EXPECT_FALSE(self->IsExceptionPending());
2132
2133 // The result is a 32b signed integer
2134 union {
2135 size_t r;
2136 int32_t i;
2137 } conv;
2138 conv.r = result;
2139
2140 EXPECT_EQ(expected[x][y][z], conv.i) << "Wrong result for " << c_str[x] << " / " <<
2141 c_char[y] << " @ " << start;
2142 }
2143 }
2144 }
2145
2146 // TODO: Deallocate things.
2147
2148 // Tests done.
2149#else
2150 LOG(INFO) << "Skipping indexof as I don't know how to do that on " << kRuntimeISA;
2151 // Force-print to std::cout so it's also outside the logcat.
2152 std::cout << "Skipping indexof as I don't know how to do that on " << kRuntimeISA << std::endl;
Andreas Gampe51f76352014-05-21 08:28:48 -07002153#endif
2154}
2155
Roland Levillain02b75802016-07-13 11:54:35 +01002156// TODO: Exercise the ReadBarrierMarkRegX entry points.
2157
Man Cao1aee9002015-07-14 22:31:42 -07002158TEST_F(StubTest, ReadBarrier) {
2159#if defined(ART_USE_READ_BARRIER) && (defined(__i386__) || defined(__arm__) || \
2160 defined(__aarch64__) || defined(__mips__) || (defined(__x86_64__) && !defined(__APPLE__)))
2161 Thread* self = Thread::Current();
2162
2163 const uintptr_t readBarrierSlow = StubTest::GetEntrypoint(self, kQuickReadBarrierSlow);
2164
2165 // Create an object
2166 ScopedObjectAccess soa(self);
2167 // garbage is created during ClassLinker::Init
2168
2169 StackHandleScope<2> hs(soa.Self());
2170 Handle<mirror::Class> c(
2171 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "Ljava/lang/Object;")));
2172
2173 // Build an object instance
2174 Handle<mirror::Object> obj(hs.NewHandle(c->AllocObject(soa.Self())));
2175
2176 EXPECT_FALSE(self->IsExceptionPending());
2177
2178 size_t result = Invoke3(0U, reinterpret_cast<size_t>(obj.Get()),
2179 mirror::Object::ClassOffset().SizeValue(), readBarrierSlow, self);
2180
2181 EXPECT_FALSE(self->IsExceptionPending());
2182 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
2183 mirror::Class* klass = reinterpret_cast<mirror::Class*>(result);
2184 EXPECT_EQ(klass, obj->GetClass());
2185
2186 // Tests done.
2187#else
2188 LOG(INFO) << "Skipping read_barrier_slow";
2189 // Force-print to std::cout so it's also outside the logcat.
2190 std::cout << "Skipping read_barrier_slow" << std::endl;
2191#endif
2192}
2193
Roland Levillain0d5a2812015-11-13 10:07:31 +00002194TEST_F(StubTest, ReadBarrierForRoot) {
2195#if defined(ART_USE_READ_BARRIER) && (defined(__i386__) || defined(__arm__) || \
2196 defined(__aarch64__) || defined(__mips__) || (defined(__x86_64__) && !defined(__APPLE__)))
2197 Thread* self = Thread::Current();
2198
2199 const uintptr_t readBarrierForRootSlow =
2200 StubTest::GetEntrypoint(self, kQuickReadBarrierForRootSlow);
2201
2202 // Create an object
2203 ScopedObjectAccess soa(self);
2204 // garbage is created during ClassLinker::Init
2205
2206 StackHandleScope<1> hs(soa.Self());
2207
2208 Handle<mirror::String> obj(
2209 hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), "hello, world!")));
2210
2211 EXPECT_FALSE(self->IsExceptionPending());
2212
2213 GcRoot<mirror::Class>& root = mirror::String::java_lang_String_;
2214 size_t result = Invoke3(reinterpret_cast<size_t>(&root), 0U, 0U, readBarrierForRootSlow, self);
2215
2216 EXPECT_FALSE(self->IsExceptionPending());
2217 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
2218 mirror::Class* klass = reinterpret_cast<mirror::Class*>(result);
2219 EXPECT_EQ(klass, obj->GetClass());
2220
2221 // Tests done.
2222#else
2223 LOG(INFO) << "Skipping read_barrier_for_root_slow";
2224 // Force-print to std::cout so it's also outside the logcat.
2225 std::cout << "Skipping read_barrier_for_root_slow" << std::endl;
2226#endif
2227}
2228
Andreas Gampe525cde22014-04-22 15:44:50 -07002229} // namespace art