blob: 80bb51d9b6a00d8e84cab4ba23625ae19569a8fa [file] [log] [blame]
Andreas Gampe525cde22014-04-22 15:44:50 -07001/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
Ian Rogerse63db272014-07-15 15:36:11 -070017#include <cstdio>
18
Mathieu Chartierc7853442015-03-27 14:35:38 -070019#include "art_field-inl.h"
Mathieu Chartiere401d142015-04-22 13:56:20 -070020#include "art_method-inl.h"
Andreas Gampe542451c2016-07-26 09:02:02 -070021#include "base/enums.h"
Vladimir Marko3481ba22015-04-13 12:22:36 +010022#include "class_linker-inl.h"
Andreas Gampe525cde22014-04-22 15:44:50 -070023#include "common_runtime_test.h"
Andreas Gampe29b38412014-08-13 00:15:43 -070024#include "entrypoints/quick/quick_entrypoints_enum.h"
Nicolas Geoffray1004faa2016-03-23 14:28:30 +000025#include "linear_alloc.h"
Andreas Gampe51f76352014-05-21 08:28:48 -070026#include "mirror/class-inl.h"
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -070027#include "mirror/string-inl.h"
Ian Rogerse63db272014-07-15 15:36:11 -070028#include "scoped_thread_state_change.h"
Andreas Gampe525cde22014-04-22 15:44:50 -070029
30namespace art {
31
32
33class StubTest : public CommonRuntimeTest {
34 protected:
35 // We need callee-save methods set up in the Runtime for exceptions.
36 void SetUp() OVERRIDE {
37 // Do the normal setup.
38 CommonRuntimeTest::SetUp();
39
40 {
41 // Create callee-save methods
42 ScopedObjectAccess soa(Thread::Current());
Vladimir Marko7624d252014-05-02 14:40:15 +010043 runtime_->SetInstructionSet(kRuntimeISA);
Andreas Gampe525cde22014-04-22 15:44:50 -070044 for (int i = 0; i < Runtime::kLastCalleeSaveType; i++) {
45 Runtime::CalleeSaveType type = Runtime::CalleeSaveType(i);
46 if (!runtime_->HasCalleeSaveMethod(type)) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -070047 runtime_->SetCalleeSaveMethod(runtime_->CreateCalleeSaveMethod(), type);
Andreas Gampe525cde22014-04-22 15:44:50 -070048 }
49 }
50 }
51 }
52
Ian Rogerse63db272014-07-15 15:36:11 -070053 void SetUpRuntimeOptions(RuntimeOptions *options) OVERRIDE {
Andreas Gampe00c1e6d2014-04-25 15:47:13 -070054 // Use a smaller heap
55 for (std::pair<std::string, const void*>& pair : *options) {
56 if (pair.first.find("-Xmx") == 0) {
57 pair.first = "-Xmx4M"; // Smallest we can go.
58 }
59 }
Andreas Gampe51f76352014-05-21 08:28:48 -070060 options->push_back(std::make_pair("-Xint", nullptr));
Andreas Gampe00c1e6d2014-04-25 15:47:13 -070061 }
Andreas Gampe525cde22014-04-22 15:44:50 -070062
Mathieu Chartier119c6bd2014-05-09 14:11:47 -070063 // Helper function needed since TEST_F makes a new class.
64 Thread::tls_ptr_sized_values* GetTlsPtr(Thread* self) {
65 return &self->tlsPtr_;
66 }
67
Andreas Gampe4fc046e2014-05-06 16:56:39 -070068 public:
Andreas Gampe525cde22014-04-22 15:44:50 -070069 size_t Invoke3(size_t arg0, size_t arg1, size_t arg2, uintptr_t code, Thread* self) {
Andreas Gampe6cf80102014-05-19 11:32:41 -070070 return Invoke3WithReferrer(arg0, arg1, arg2, code, self, nullptr);
Andreas Gampe525cde22014-04-22 15:44:50 -070071 }
Andreas Gampe6e4e59c2014-05-05 20:11:02 -070072
Andreas Gampe6e4e59c2014-05-05 20:11:02 -070073 // TODO: Set up a frame according to referrer's specs.
74 size_t Invoke3WithReferrer(size_t arg0, size_t arg1, size_t arg2, uintptr_t code, Thread* self,
Mathieu Chartiere401d142015-04-22 13:56:20 -070075 ArtMethod* referrer) {
Andreas Gampe9537ba22015-10-12 14:29:38 -070076 return Invoke3WithReferrerAndHidden(arg0, arg1, arg2, code, self, referrer, 0);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -070077 }
78
Andreas Gampe51f76352014-05-21 08:28:48 -070079 // TODO: Set up a frame according to referrer's specs.
80 size_t Invoke3WithReferrerAndHidden(size_t arg0, size_t arg1, size_t arg2, uintptr_t code,
Mathieu Chartiere401d142015-04-22 13:56:20 -070081 Thread* self, ArtMethod* referrer, size_t hidden) {
Andreas Gampe51f76352014-05-21 08:28:48 -070082 // Push a transition back into managed code onto the linked list in thread.
83 ManagedStack fragment;
84 self->PushManagedStackFragment(&fragment);
85
86 size_t result;
87 size_t fpr_result = 0;
88#if defined(__i386__)
89 // TODO: Set the thread?
Andreas Gampe9537ba22015-10-12 14:29:38 -070090#define PUSH(reg) "push " # reg "\n\t .cfi_adjust_cfa_offset 4\n\t"
91#define POP(reg) "pop " # reg "\n\t .cfi_adjust_cfa_offset -4\n\t"
Andreas Gampe51f76352014-05-21 08:28:48 -070092 __asm__ __volatile__(
Andreas Gampe9537ba22015-10-12 14:29:38 -070093 "movd %[hidden], %%xmm7\n\t" // This is a memory op, so do this early. If it is off of
94 // esp, then we won't be able to access it after spilling.
95
96 // Spill 6 registers.
97 PUSH(%%ebx)
98 PUSH(%%ecx)
99 PUSH(%%edx)
100 PUSH(%%esi)
101 PUSH(%%edi)
102 PUSH(%%ebp)
103
104 // Store the inputs to the stack, but keep the referrer up top, less work.
105 PUSH(%[referrer]) // Align stack.
106 PUSH(%[referrer]) // Store referrer
107
108 PUSH(%[arg0])
109 PUSH(%[arg1])
110 PUSH(%[arg2])
111 PUSH(%[code])
112 // Now read them back into the required registers.
113 POP(%%edi)
114 POP(%%edx)
115 POP(%%ecx)
116 POP(%%eax)
117 // Call is prepared now.
118
Andreas Gampe51f76352014-05-21 08:28:48 -0700119 "call *%%edi\n\t" // Call the stub
Andreas Gampe9537ba22015-10-12 14:29:38 -0700120 "addl $8, %%esp\n\t" // Pop referrer and padding.
121 ".cfi_adjust_cfa_offset -8\n\t"
122
123 // Restore 6 registers.
124 POP(%%ebp)
125 POP(%%edi)
126 POP(%%esi)
127 POP(%%edx)
128 POP(%%ecx)
129 POP(%%ebx)
130
Andreas Gampe51f76352014-05-21 08:28:48 -0700131 : "=a" (result)
132 // Use the result from eax
Andreas Gampe9537ba22015-10-12 14:29:38 -0700133 : [arg0] "r"(arg0), [arg1] "r"(arg1), [arg2] "r"(arg2), [code] "r"(code),
134 [referrer]"r"(referrer), [hidden]"m"(hidden)
Andreas Gampe2f6e3512014-06-07 01:32:33 -0700135 // This places code into edi, arg0 into eax, arg1 into ecx, and arg2 into edx
Andreas Gampe9537ba22015-10-12 14:29:38 -0700136 : "memory", "xmm7"); // clobber.
137#undef PUSH
138#undef POP
Andreas Gampe51f76352014-05-21 08:28:48 -0700139#elif defined(__arm__)
140 __asm__ __volatile__(
141 "push {r1-r12, lr}\n\t" // Save state, 13*4B = 52B
142 ".cfi_adjust_cfa_offset 52\n\t"
143 "push {r9}\n\t"
144 ".cfi_adjust_cfa_offset 4\n\t"
145 "mov r9, %[referrer]\n\n"
146 "str r9, [sp, #-8]!\n\t" // Push referrer, +8B padding so 16B aligned
147 ".cfi_adjust_cfa_offset 8\n\t"
148 "ldr r9, [sp, #8]\n\t"
149
150 // Push everything on the stack, so we don't rely on the order. What a mess. :-(
151 "sub sp, sp, #24\n\t"
152 "str %[arg0], [sp]\n\t"
153 "str %[arg1], [sp, #4]\n\t"
154 "str %[arg2], [sp, #8]\n\t"
155 "str %[code], [sp, #12]\n\t"
156 "str %[self], [sp, #16]\n\t"
157 "str %[hidden], [sp, #20]\n\t"
158 "ldr r0, [sp]\n\t"
159 "ldr r1, [sp, #4]\n\t"
160 "ldr r2, [sp, #8]\n\t"
161 "ldr r3, [sp, #12]\n\t"
162 "ldr r9, [sp, #16]\n\t"
163 "ldr r12, [sp, #20]\n\t"
164 "add sp, sp, #24\n\t"
165
166 "blx r3\n\t" // Call the stub
Mathieu Chartier2cebb242015-04-21 16:50:40 -0700167 "add sp, sp, #12\n\t" // Pop null and padding
Andreas Gampe51f76352014-05-21 08:28:48 -0700168 ".cfi_adjust_cfa_offset -12\n\t"
169 "pop {r1-r12, lr}\n\t" // Restore state
170 ".cfi_adjust_cfa_offset -52\n\t"
171 "mov %[result], r0\n\t" // Save the result
172 : [result] "=r" (result)
173 // Use the result from r0
Andreas Gampe2f6e3512014-06-07 01:32:33 -0700174 : [arg0] "r"(arg0), [arg1] "r"(arg1), [arg2] "r"(arg2), [code] "r"(code), [self] "r"(self),
175 [referrer] "r"(referrer), [hidden] "r"(hidden)
Andreas Gampeff7b1142015-08-03 10:25:06 -0700176 : "r0", "memory"); // clobber.
Andreas Gampe51f76352014-05-21 08:28:48 -0700177#elif defined(__aarch64__)
178 __asm__ __volatile__(
Andreas Gampef39b3782014-06-03 14:38:30 -0700179 // Spill x0-x7 which we say we don't clobber. May contain args.
Bilyan Borisov970e1912016-02-10 11:51:17 +0000180 "sub sp, sp, #80\n\t"
181 ".cfi_adjust_cfa_offset 80\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700182 "stp x0, x1, [sp]\n\t"
183 "stp x2, x3, [sp, #16]\n\t"
184 "stp x4, x5, [sp, #32]\n\t"
185 "stp x6, x7, [sp, #48]\n\t"
Bilyan Borisov970e1912016-02-10 11:51:17 +0000186 // To be extra defensive, store x20. We do this because some of the stubs might make a
187 // transition into the runtime via the blr instruction below and *not* save x20.
188 "str x20, [sp, #64]\n\t"
189 // 8 byte buffer
Andreas Gampe51f76352014-05-21 08:28:48 -0700190
Andreas Gampef39b3782014-06-03 14:38:30 -0700191 "sub sp, sp, #16\n\t" // Reserve stack space, 16B aligned
192 ".cfi_adjust_cfa_offset 16\n\t"
193 "str %[referrer], [sp]\n\t" // referrer
Andreas Gampe51f76352014-05-21 08:28:48 -0700194
195 // Push everything on the stack, so we don't rely on the order. What a mess. :-(
196 "sub sp, sp, #48\n\t"
197 ".cfi_adjust_cfa_offset 48\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700198 // All things are "r" constraints, so direct str/stp should work.
199 "stp %[arg0], %[arg1], [sp]\n\t"
200 "stp %[arg2], %[code], [sp, #16]\n\t"
201 "stp %[self], %[hidden], [sp, #32]\n\t"
Andreas Gampe51f76352014-05-21 08:28:48 -0700202
203 // Now we definitely have x0-x3 free, use it to garble d8 - d15
204 "movk x0, #0xfad0\n\t"
205 "movk x0, #0xebad, lsl #16\n\t"
206 "movk x0, #0xfad0, lsl #32\n\t"
207 "movk x0, #0xebad, lsl #48\n\t"
208 "fmov d8, x0\n\t"
209 "add x0, x0, 1\n\t"
210 "fmov d9, x0\n\t"
211 "add x0, x0, 1\n\t"
212 "fmov d10, x0\n\t"
213 "add x0, x0, 1\n\t"
214 "fmov d11, x0\n\t"
215 "add x0, x0, 1\n\t"
216 "fmov d12, x0\n\t"
217 "add x0, x0, 1\n\t"
218 "fmov d13, x0\n\t"
219 "add x0, x0, 1\n\t"
220 "fmov d14, x0\n\t"
221 "add x0, x0, 1\n\t"
222 "fmov d15, x0\n\t"
223
Andreas Gampef39b3782014-06-03 14:38:30 -0700224 // Load call params into the right registers.
225 "ldp x0, x1, [sp]\n\t"
226 "ldp x2, x3, [sp, #16]\n\t"
Serban Constantinescu9bd88b02015-04-22 16:24:46 +0100227 "ldp x19, x17, [sp, #32]\n\t"
Andreas Gampe51f76352014-05-21 08:28:48 -0700228 "add sp, sp, #48\n\t"
229 ".cfi_adjust_cfa_offset -48\n\t"
230
Andreas Gampe51f76352014-05-21 08:28:48 -0700231 "blr x3\n\t" // Call the stub
Andreas Gampef39b3782014-06-03 14:38:30 -0700232 "mov x8, x0\n\t" // Store result
233 "add sp, sp, #16\n\t" // Drop the quick "frame"
234 ".cfi_adjust_cfa_offset -16\n\t"
Andreas Gampe51f76352014-05-21 08:28:48 -0700235
236 // Test d8 - d15. We can use x1 and x2.
237 "movk x1, #0xfad0\n\t"
238 "movk x1, #0xebad, lsl #16\n\t"
239 "movk x1, #0xfad0, lsl #32\n\t"
240 "movk x1, #0xebad, lsl #48\n\t"
241 "fmov x2, d8\n\t"
242 "cmp x1, x2\n\t"
243 "b.ne 1f\n\t"
244 "add x1, x1, 1\n\t"
245
246 "fmov x2, d9\n\t"
247 "cmp x1, x2\n\t"
248 "b.ne 1f\n\t"
249 "add x1, x1, 1\n\t"
250
251 "fmov x2, d10\n\t"
252 "cmp x1, x2\n\t"
253 "b.ne 1f\n\t"
254 "add x1, x1, 1\n\t"
255
256 "fmov x2, d11\n\t"
257 "cmp x1, x2\n\t"
258 "b.ne 1f\n\t"
259 "add x1, x1, 1\n\t"
260
261 "fmov x2, d12\n\t"
262 "cmp x1, x2\n\t"
263 "b.ne 1f\n\t"
264 "add x1, x1, 1\n\t"
265
266 "fmov x2, d13\n\t"
267 "cmp x1, x2\n\t"
268 "b.ne 1f\n\t"
269 "add x1, x1, 1\n\t"
270
271 "fmov x2, d14\n\t"
272 "cmp x1, x2\n\t"
273 "b.ne 1f\n\t"
274 "add x1, x1, 1\n\t"
275
276 "fmov x2, d15\n\t"
277 "cmp x1, x2\n\t"
278 "b.ne 1f\n\t"
279
Andreas Gampef39b3782014-06-03 14:38:30 -0700280 "mov x9, #0\n\t" // Use x9 as flag, in clobber list
Andreas Gampe51f76352014-05-21 08:28:48 -0700281
282 // Finish up.
283 "2:\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700284 "ldp x0, x1, [sp]\n\t" // Restore stuff not named clobbered, may contain fpr_result
285 "ldp x2, x3, [sp, #16]\n\t"
286 "ldp x4, x5, [sp, #32]\n\t"
287 "ldp x6, x7, [sp, #48]\n\t"
Bilyan Borisov970e1912016-02-10 11:51:17 +0000288 "ldr x20, [sp, #64]\n\t"
289 "add sp, sp, #80\n\t" // Free stack space, now sp as on entry
290 ".cfi_adjust_cfa_offset -80\n\t"
Andreas Gampe51f76352014-05-21 08:28:48 -0700291
Andreas Gampef39b3782014-06-03 14:38:30 -0700292 "str x9, %[fpr_result]\n\t" // Store the FPR comparison result
293 "mov %[result], x8\n\t" // Store the call result
294
Andreas Gampe51f76352014-05-21 08:28:48 -0700295 "b 3f\n\t" // Goto end
296
297 // Failed fpr verification.
298 "1:\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700299 "mov x9, #1\n\t"
Andreas Gampe51f76352014-05-21 08:28:48 -0700300 "b 2b\n\t" // Goto finish-up
301
302 // End
303 "3:\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700304 : [result] "=r" (result)
305 // Use the result from r0
Andreas Gampe51f76352014-05-21 08:28:48 -0700306 : [arg0] "0"(arg0), [arg1] "r"(arg1), [arg2] "r"(arg2), [code] "r"(code), [self] "r"(self),
Andreas Gampef39b3782014-06-03 14:38:30 -0700307 [referrer] "r"(referrer), [hidden] "r"(hidden), [fpr_result] "m" (fpr_result)
Bilyan Borisov970e1912016-02-10 11:51:17 +0000308 // Leave one register unclobbered, which is needed for compiling with
309 // -fstack-protector-strong. According to AAPCS64 registers x9-x15 are caller-saved,
310 // which means we should unclobber one of the callee-saved registers that are unused.
311 // Here we use x20.
312 : "x8", "x9", "x10", "x11", "x12", "x13", "x14", "x15", "x16", "x17", "x18", "x19",
Andreas Gampef39b3782014-06-03 14:38:30 -0700313 "x21", "x22", "x23", "x24", "x25", "x26", "x27", "x28", "x30",
314 "d0", "d1", "d2", "d3", "d4", "d5", "d6", "d7",
315 "d8", "d9", "d10", "d11", "d12", "d13", "d14", "d15",
316 "d16", "d17", "d18", "d19", "d20", "d21", "d22", "d23",
Andreas Gampe2f6e3512014-06-07 01:32:33 -0700317 "d24", "d25", "d26", "d27", "d28", "d29", "d30", "d31",
Bilyan Borisov970e1912016-02-10 11:51:17 +0000318 "memory");
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200319#elif defined(__mips__) && !defined(__LP64__)
320 __asm__ __volatile__ (
321 // Spill a0-a3 and t0-t7 which we say we don't clobber. May contain args.
322 "addiu $sp, $sp, -64\n\t"
323 "sw $a0, 0($sp)\n\t"
324 "sw $a1, 4($sp)\n\t"
325 "sw $a2, 8($sp)\n\t"
326 "sw $a3, 12($sp)\n\t"
327 "sw $t0, 16($sp)\n\t"
328 "sw $t1, 20($sp)\n\t"
329 "sw $t2, 24($sp)\n\t"
330 "sw $t3, 28($sp)\n\t"
331 "sw $t4, 32($sp)\n\t"
332 "sw $t5, 36($sp)\n\t"
333 "sw $t6, 40($sp)\n\t"
334 "sw $t7, 44($sp)\n\t"
335 // Spill gp register since it is caller save.
336 "sw $gp, 52($sp)\n\t"
337
338 "addiu $sp, $sp, -16\n\t" // Reserve stack space, 16B aligned.
339 "sw %[referrer], 0($sp)\n\t"
340
341 // Push everything on the stack, so we don't rely on the order.
342 "addiu $sp, $sp, -24\n\t"
343 "sw %[arg0], 0($sp)\n\t"
344 "sw %[arg1], 4($sp)\n\t"
345 "sw %[arg2], 8($sp)\n\t"
346 "sw %[code], 12($sp)\n\t"
347 "sw %[self], 16($sp)\n\t"
348 "sw %[hidden], 20($sp)\n\t"
349
350 // Load call params into the right registers.
351 "lw $a0, 0($sp)\n\t"
352 "lw $a1, 4($sp)\n\t"
353 "lw $a2, 8($sp)\n\t"
354 "lw $t9, 12($sp)\n\t"
355 "lw $s1, 16($sp)\n\t"
356 "lw $t0, 20($sp)\n\t"
357 "addiu $sp, $sp, 24\n\t"
358
359 "jalr $t9\n\t" // Call the stub.
360 "nop\n\t"
361 "addiu $sp, $sp, 16\n\t" // Drop the quick "frame".
362
363 // Restore stuff not named clobbered.
364 "lw $a0, 0($sp)\n\t"
365 "lw $a1, 4($sp)\n\t"
366 "lw $a2, 8($sp)\n\t"
367 "lw $a3, 12($sp)\n\t"
368 "lw $t0, 16($sp)\n\t"
369 "lw $t1, 20($sp)\n\t"
370 "lw $t2, 24($sp)\n\t"
371 "lw $t3, 28($sp)\n\t"
372 "lw $t4, 32($sp)\n\t"
373 "lw $t5, 36($sp)\n\t"
374 "lw $t6, 40($sp)\n\t"
375 "lw $t7, 44($sp)\n\t"
376 // Restore gp.
377 "lw $gp, 52($sp)\n\t"
378 "addiu $sp, $sp, 64\n\t" // Free stack space, now sp as on entry.
379
380 "move %[result], $v0\n\t" // Store the call result.
381 : [result] "=r" (result)
382 : [arg0] "r"(arg0), [arg1] "r"(arg1), [arg2] "r"(arg2), [code] "r"(code), [self] "r"(self),
383 [referrer] "r"(referrer), [hidden] "r"(hidden)
384 : "at", "v0", "v1", "s0", "s1", "s2", "s3", "s4", "s5", "s6", "s7", "t8", "t9", "k0", "k1",
385 "fp", "ra",
Nicolas Geoffrayc5b4b322015-09-15 16:36:50 +0100386 "$f0", "$f1", "$f2", "$f3", "$f4", "$f5", "$f6", "$f7", "$f8", "$f9", "$f10", "$f11",
387 "$f12", "$f13", "$f14", "$f15", "$f16", "$f17", "$f18", "$f19", "$f20", "$f21", "$f22",
388 "$f23", "$f24", "$f25", "$f26", "$f27", "$f28", "$f29", "$f30", "$f31",
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200389 "memory"); // clobber.
390#elif defined(__mips__) && defined(__LP64__)
391 __asm__ __volatile__ (
Goran Jakovljevic745f3cd2016-01-15 14:08:47 +0100392 // Spill a0-a7 which we say we don't clobber. May contain args.
393 "daddiu $sp, $sp, -64\n\t"
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200394 "sd $a0, 0($sp)\n\t"
395 "sd $a1, 8($sp)\n\t"
396 "sd $a2, 16($sp)\n\t"
397 "sd $a3, 24($sp)\n\t"
398 "sd $a4, 32($sp)\n\t"
399 "sd $a5, 40($sp)\n\t"
400 "sd $a6, 48($sp)\n\t"
401 "sd $a7, 56($sp)\n\t"
402
403 "daddiu $sp, $sp, -16\n\t" // Reserve stack space, 16B aligned.
404 "sd %[referrer], 0($sp)\n\t"
405
406 // Push everything on the stack, so we don't rely on the order.
407 "daddiu $sp, $sp, -48\n\t"
408 "sd %[arg0], 0($sp)\n\t"
409 "sd %[arg1], 8($sp)\n\t"
410 "sd %[arg2], 16($sp)\n\t"
411 "sd %[code], 24($sp)\n\t"
412 "sd %[self], 32($sp)\n\t"
413 "sd %[hidden], 40($sp)\n\t"
414
415 // Load call params into the right registers.
416 "ld $a0, 0($sp)\n\t"
417 "ld $a1, 8($sp)\n\t"
418 "ld $a2, 16($sp)\n\t"
419 "ld $t9, 24($sp)\n\t"
420 "ld $s1, 32($sp)\n\t"
421 "ld $t0, 40($sp)\n\t"
422 "daddiu $sp, $sp, 48\n\t"
423
424 "jalr $t9\n\t" // Call the stub.
425 "nop\n\t"
426 "daddiu $sp, $sp, 16\n\t" // Drop the quick "frame".
427
428 // Restore stuff not named clobbered.
429 "ld $a0, 0($sp)\n\t"
430 "ld $a1, 8($sp)\n\t"
431 "ld $a2, 16($sp)\n\t"
432 "ld $a3, 24($sp)\n\t"
433 "ld $a4, 32($sp)\n\t"
434 "ld $a5, 40($sp)\n\t"
435 "ld $a6, 48($sp)\n\t"
436 "ld $a7, 56($sp)\n\t"
Goran Jakovljevic745f3cd2016-01-15 14:08:47 +0100437 "daddiu $sp, $sp, 64\n\t"
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200438
439 "move %[result], $v0\n\t" // Store the call result.
440 : [result] "=r" (result)
441 : [arg0] "r"(arg0), [arg1] "r"(arg1), [arg2] "r"(arg2), [code] "r"(code), [self] "r"(self),
442 [referrer] "r"(referrer), [hidden] "r"(hidden)
Goran Jakovljevic745f3cd2016-01-15 14:08:47 +0100443 // Instead aliases t0-t3, register names $12-$15 has been used in the clobber list because
444 // t0-t3 are ambiguous.
445 : "at", "v0", "v1", "$12", "$13", "$14", "$15", "s0", "s1", "s2", "s3", "s4", "s5", "s6",
446 "s7", "t8", "t9", "k0", "k1", "fp", "ra",
Goran Jakovljevic4d44e532015-11-27 11:20:20 +0100447 "$f0", "$f1", "$f2", "$f3", "$f4", "$f5", "$f6", "$f7", "$f8", "$f9", "$f10", "$f11",
448 "$f12", "$f13", "$f14", "$f15", "$f16", "$f17", "$f18", "$f19", "$f20", "$f21", "$f22",
449 "$f23", "$f24", "$f25", "$f26", "$f27", "$f28", "$f29", "$f30", "$f31",
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200450 "memory"); // clobber.
Andreas Gampe9537ba22015-10-12 14:29:38 -0700451#elif defined(__x86_64__) && !defined(__APPLE__)
452#define PUSH(reg) "pushq " # reg "\n\t .cfi_adjust_cfa_offset 8\n\t"
453#define POP(reg) "popq " # reg "\n\t .cfi_adjust_cfa_offset -8\n\t"
454 // Note: Uses the native convention. We do a callee-save regimen by manually spilling and
455 // restoring almost all registers.
Andreas Gampe51f76352014-05-21 08:28:48 -0700456 // TODO: Set the thread?
457 __asm__ __volatile__(
Andreas Gampe9537ba22015-10-12 14:29:38 -0700458 // Spill almost everything (except rax, rsp). 14 registers.
459 PUSH(%%rbx)
460 PUSH(%%rcx)
461 PUSH(%%rdx)
462 PUSH(%%rsi)
463 PUSH(%%rdi)
464 PUSH(%%rbp)
465 PUSH(%%r8)
466 PUSH(%%r9)
467 PUSH(%%r10)
468 PUSH(%%r11)
469 PUSH(%%r12)
470 PUSH(%%r13)
471 PUSH(%%r14)
472 PUSH(%%r15)
473
474 PUSH(%[referrer]) // Push referrer & 16B alignment padding
475 PUSH(%[referrer])
476
477 // Now juggle the input registers.
478 PUSH(%[arg0])
479 PUSH(%[arg1])
480 PUSH(%[arg2])
481 PUSH(%[hidden])
482 PUSH(%[code])
483 POP(%%r8)
484 POP(%%rax)
485 POP(%%rdx)
486 POP(%%rsi)
487 POP(%%rdi)
488
489 "call *%%r8\n\t" // Call the stub
490 "addq $16, %%rsp\n\t" // Pop null and padding
Andreas Gampe51f76352014-05-21 08:28:48 -0700491 ".cfi_adjust_cfa_offset -16\n\t"
Andreas Gampe9537ba22015-10-12 14:29:38 -0700492
493 POP(%%r15)
494 POP(%%r14)
495 POP(%%r13)
496 POP(%%r12)
497 POP(%%r11)
498 POP(%%r10)
499 POP(%%r9)
500 POP(%%r8)
501 POP(%%rbp)
502 POP(%%rdi)
503 POP(%%rsi)
504 POP(%%rdx)
505 POP(%%rcx)
506 POP(%%rbx)
507
Andreas Gampe51f76352014-05-21 08:28:48 -0700508 : "=a" (result)
509 // Use the result from rax
Andreas Gampe9537ba22015-10-12 14:29:38 -0700510 : [arg0] "r"(arg0), [arg1] "r"(arg1), [arg2] "r"(arg2), [code] "r"(code),
511 [referrer] "r"(referrer), [hidden] "r"(hidden)
512 // This places arg0 into rdi, arg1 into rsi, arg2 into rdx, and code into some other
513 // register. We can't use "b" (rbx), as ASAN uses this for the frame pointer.
514 : "memory"); // We spill and restore (almost) all registers, so only mention memory here.
515#undef PUSH
516#undef POP
Andreas Gampe51f76352014-05-21 08:28:48 -0700517#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -0800518 UNUSED(arg0, arg1, arg2, code, referrer, hidden);
Andreas Gampe51f76352014-05-21 08:28:48 -0700519 LOG(WARNING) << "Was asked to invoke for an architecture I do not understand.";
520 result = 0;
521#endif
522 // Pop transition.
523 self->PopManagedStackFragment(fragment);
524
525 fp_result = fpr_result;
526 EXPECT_EQ(0U, fp_result);
527
528 return result;
529 }
530
Andreas Gampe29b38412014-08-13 00:15:43 -0700531 static uintptr_t GetEntrypoint(Thread* self, QuickEntrypointEnum entrypoint) {
532 int32_t offset;
Andreas Gampe542451c2016-07-26 09:02:02 -0700533 offset = GetThreadOffset<kRuntimePointerSize>(entrypoint).Int32Value();
Andreas Gampe29b38412014-08-13 00:15:43 -0700534 return *reinterpret_cast<uintptr_t*>(reinterpret_cast<uint8_t*>(self) + offset);
535 }
536
Andreas Gampe6cf80102014-05-19 11:32:41 -0700537 protected:
538 size_t fp_result;
Andreas Gampe525cde22014-04-22 15:44:50 -0700539};
540
541
Andreas Gampe525cde22014-04-22 15:44:50 -0700542TEST_F(StubTest, Memcpy) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200543#if defined(__i386__) || (defined(__x86_64__) && !defined(__APPLE__)) || defined(__mips__)
Andreas Gampe525cde22014-04-22 15:44:50 -0700544 Thread* self = Thread::Current();
545
546 uint32_t orig[20];
547 uint32_t trg[20];
548 for (size_t i = 0; i < 20; ++i) {
549 orig[i] = i;
550 trg[i] = 0;
551 }
552
553 Invoke3(reinterpret_cast<size_t>(&trg[4]), reinterpret_cast<size_t>(&orig[4]),
Andreas Gampe29b38412014-08-13 00:15:43 -0700554 10 * sizeof(uint32_t), StubTest::GetEntrypoint(self, kQuickMemcpy), self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700555
556 EXPECT_EQ(orig[0], trg[0]);
557
558 for (size_t i = 1; i < 4; ++i) {
559 EXPECT_NE(orig[i], trg[i]);
560 }
561
562 for (size_t i = 4; i < 14; ++i) {
563 EXPECT_EQ(orig[i], trg[i]);
564 }
565
566 for (size_t i = 14; i < 20; ++i) {
567 EXPECT_NE(orig[i], trg[i]);
568 }
569
570 // TODO: Test overlapping?
571
572#else
573 LOG(INFO) << "Skipping memcpy as I don't know how to do that on " << kRuntimeISA;
574 // Force-print to std::cout so it's also outside the logcat.
575 std::cout << "Skipping memcpy as I don't know how to do that on " << kRuntimeISA << std::endl;
576#endif
577}
578
Andreas Gampe525cde22014-04-22 15:44:50 -0700579TEST_F(StubTest, LockObject) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200580#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
581 (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -0700582 static constexpr size_t kThinLockLoops = 100;
583
Andreas Gampe525cde22014-04-22 15:44:50 -0700584 Thread* self = Thread::Current();
Andreas Gampe29b38412014-08-13 00:15:43 -0700585
586 const uintptr_t art_quick_lock_object = StubTest::GetEntrypoint(self, kQuickLockObject);
587
Andreas Gampe525cde22014-04-22 15:44:50 -0700588 // Create an object
589 ScopedObjectAccess soa(self);
590 // garbage is created during ClassLinker::Init
591
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700592 StackHandleScope<2> hs(soa.Self());
593 Handle<mirror::String> obj(
594 hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), "hello, world!")));
Andreas Gampe525cde22014-04-22 15:44:50 -0700595 LockWord lock = obj->GetLockWord(false);
596 LockWord::LockState old_state = lock.GetState();
597 EXPECT_EQ(LockWord::LockState::kUnlocked, old_state);
598
Andreas Gampe29b38412014-08-13 00:15:43 -0700599 Invoke3(reinterpret_cast<size_t>(obj.Get()), 0U, 0U, art_quick_lock_object, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700600
601 LockWord lock_after = obj->GetLockWord(false);
602 LockWord::LockState new_state = lock_after.GetState();
603 EXPECT_EQ(LockWord::LockState::kThinLocked, new_state);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700604 EXPECT_EQ(lock_after.ThinLockCount(), 0U); // Thin lock starts count at zero
605
606 for (size_t i = 1; i < kThinLockLoops; ++i) {
Andreas Gampe29b38412014-08-13 00:15:43 -0700607 Invoke3(reinterpret_cast<size_t>(obj.Get()), 0U, 0U, art_quick_lock_object, self);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700608
609 // Check we're at lock count i
610
611 LockWord l_inc = obj->GetLockWord(false);
612 LockWord::LockState l_inc_state = l_inc.GetState();
613 EXPECT_EQ(LockWord::LockState::kThinLocked, l_inc_state);
614 EXPECT_EQ(l_inc.ThinLockCount(), i);
615 }
616
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700617 // Force a fat lock by running identity hashcode to fill up lock word.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700618 Handle<mirror::String> obj2(hs.NewHandle(
619 mirror::String::AllocFromModifiedUtf8(soa.Self(), "hello, world!")));
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700620
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700621 obj2->IdentityHashCode();
622
Andreas Gampe29b38412014-08-13 00:15:43 -0700623 Invoke3(reinterpret_cast<size_t>(obj2.Get()), 0U, 0U, art_quick_lock_object, self);
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700624
625 LockWord lock_after2 = obj2->GetLockWord(false);
626 LockWord::LockState new_state2 = lock_after2.GetState();
627 EXPECT_EQ(LockWord::LockState::kFatLocked, new_state2);
628 EXPECT_NE(lock_after2.FatLockMonitor(), static_cast<Monitor*>(nullptr));
629
630 // Test done.
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700631#else
632 LOG(INFO) << "Skipping lock_object as I don't know how to do that on " << kRuntimeISA;
633 // Force-print to std::cout so it's also outside the logcat.
634 std::cout << "Skipping lock_object as I don't know how to do that on " << kRuntimeISA << std::endl;
635#endif
636}
637
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700638
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700639class RandGen {
640 public:
641 explicit RandGen(uint32_t seed) : val_(seed) {}
642
643 uint32_t next() {
644 val_ = val_ * 48271 % 2147483647 + 13;
645 return val_;
646 }
647
648 uint32_t val_;
649};
650
651
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700652// NO_THREAD_SAFETY_ANALYSIS as we do not want to grab exclusive mutator lock for MonitorInfo.
653static void TestUnlockObject(StubTest* test) NO_THREAD_SAFETY_ANALYSIS {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200654#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
655 (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -0700656 static constexpr size_t kThinLockLoops = 100;
657
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700658 Thread* self = Thread::Current();
Andreas Gampe29b38412014-08-13 00:15:43 -0700659
660 const uintptr_t art_quick_lock_object = StubTest::GetEntrypoint(self, kQuickLockObject);
661 const uintptr_t art_quick_unlock_object = StubTest::GetEntrypoint(self, kQuickUnlockObject);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700662 // Create an object
663 ScopedObjectAccess soa(self);
664 // garbage is created during ClassLinker::Init
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700665 static constexpr size_t kNumberOfLocks = 10; // Number of objects = lock
666 StackHandleScope<kNumberOfLocks + 1> hs(self);
667 Handle<mirror::String> obj(
668 hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), "hello, world!")));
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700669 LockWord lock = obj->GetLockWord(false);
670 LockWord::LockState old_state = lock.GetState();
671 EXPECT_EQ(LockWord::LockState::kUnlocked, old_state);
672
Andreas Gampe29b38412014-08-13 00:15:43 -0700673 test->Invoke3(reinterpret_cast<size_t>(obj.Get()), 0U, 0U, art_quick_unlock_object, self);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700674 // This should be an illegal monitor state.
675 EXPECT_TRUE(self->IsExceptionPending());
676 self->ClearException();
677
678 LockWord lock_after = obj->GetLockWord(false);
679 LockWord::LockState new_state = lock_after.GetState();
680 EXPECT_EQ(LockWord::LockState::kUnlocked, new_state);
Andreas Gampe525cde22014-04-22 15:44:50 -0700681
Andreas Gampe29b38412014-08-13 00:15:43 -0700682 test->Invoke3(reinterpret_cast<size_t>(obj.Get()), 0U, 0U, art_quick_lock_object, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700683
684 LockWord lock_after2 = obj->GetLockWord(false);
685 LockWord::LockState new_state2 = lock_after2.GetState();
686 EXPECT_EQ(LockWord::LockState::kThinLocked, new_state2);
687
Andreas Gampe29b38412014-08-13 00:15:43 -0700688 test->Invoke3(reinterpret_cast<size_t>(obj.Get()), 0U, 0U, art_quick_unlock_object, self);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700689
690 LockWord lock_after3 = obj->GetLockWord(false);
691 LockWord::LockState new_state3 = lock_after3.GetState();
692 EXPECT_EQ(LockWord::LockState::kUnlocked, new_state3);
693
694 // Stress test:
695 // Keep a number of objects and their locks in flight. Randomly lock or unlock one of them in
696 // each step.
697
698 RandGen r(0x1234);
699
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700700 constexpr size_t kIterations = 10000; // Number of iterations
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700701 constexpr size_t kMoveToFat = 1000; // Chance of 1:kMoveFat to make a lock fat.
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700702
703 size_t counts[kNumberOfLocks];
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700704 bool fat[kNumberOfLocks]; // Whether a lock should be thin or fat.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700705 Handle<mirror::String> objects[kNumberOfLocks];
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700706
707 // Initialize = allocate.
708 for (size_t i = 0; i < kNumberOfLocks; ++i) {
709 counts[i] = 0;
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700710 fat[i] = false;
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700711 objects[i] = hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), ""));
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700712 }
713
714 for (size_t i = 0; i < kIterations; ++i) {
715 // Select which lock to update.
716 size_t index = r.next() % kNumberOfLocks;
717
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700718 // Make lock fat?
719 if (!fat[index] && (r.next() % kMoveToFat == 0)) {
720 fat[index] = true;
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700721 objects[index]->IdentityHashCode();
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700722
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700723 LockWord lock_iter = objects[index]->GetLockWord(false);
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700724 LockWord::LockState iter_state = lock_iter.GetState();
725 if (counts[index] == 0) {
726 EXPECT_EQ(LockWord::LockState::kHashCode, iter_state);
727 } else {
728 EXPECT_EQ(LockWord::LockState::kFatLocked, iter_state);
729 }
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700730 } else {
Andreas Gampe277ccbd2014-11-03 21:36:10 -0800731 bool take_lock; // Whether to lock or unlock in this step.
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700732 if (counts[index] == 0) {
Andreas Gampe277ccbd2014-11-03 21:36:10 -0800733 take_lock = true;
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700734 } else if (counts[index] == kThinLockLoops) {
Andreas Gampe277ccbd2014-11-03 21:36:10 -0800735 take_lock = false;
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700736 } else {
737 // Randomly.
Andreas Gampe277ccbd2014-11-03 21:36:10 -0800738 take_lock = r.next() % 2 == 0;
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700739 }
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700740
Andreas Gampe277ccbd2014-11-03 21:36:10 -0800741 if (take_lock) {
Andreas Gampe29b38412014-08-13 00:15:43 -0700742 test->Invoke3(reinterpret_cast<size_t>(objects[index].Get()), 0U, 0U, art_quick_lock_object,
743 self);
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700744 counts[index]++;
745 } else {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700746 test->Invoke3(reinterpret_cast<size_t>(objects[index].Get()), 0U, 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -0700747 art_quick_unlock_object, self);
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700748 counts[index]--;
749 }
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700750
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700751 EXPECT_FALSE(self->IsExceptionPending());
752
753 // Check the new state.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700754 LockWord lock_iter = objects[index]->GetLockWord(true);
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700755 LockWord::LockState iter_state = lock_iter.GetState();
756 if (fat[index]) {
757 // Abuse MonitorInfo.
758 EXPECT_EQ(LockWord::LockState::kFatLocked, iter_state) << index;
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700759 MonitorInfo info(objects[index].Get());
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700760 EXPECT_EQ(counts[index], info.entry_count_) << index;
761 } else {
762 if (counts[index] > 0) {
763 EXPECT_EQ(LockWord::LockState::kThinLocked, iter_state);
764 EXPECT_EQ(counts[index] - 1, lock_iter.ThinLockCount());
765 } else {
766 EXPECT_EQ(LockWord::LockState::kUnlocked, iter_state);
767 }
768 }
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700769 }
770 }
771
772 // Unlock the remaining count times and then check it's unlocked. Then deallocate.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700773 // Go reverse order to correctly handle Handles.
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700774 for (size_t i = 0; i < kNumberOfLocks; ++i) {
775 size_t index = kNumberOfLocks - 1 - i;
776 size_t count = counts[index];
777 while (count > 0) {
Andreas Gampe29b38412014-08-13 00:15:43 -0700778 test->Invoke3(reinterpret_cast<size_t>(objects[index].Get()), 0U, 0U, art_quick_unlock_object,
779 self);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700780 count--;
781 }
782
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700783 LockWord lock_after4 = objects[index]->GetLockWord(false);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700784 LockWord::LockState new_state4 = lock_after4.GetState();
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700785 EXPECT_TRUE(LockWord::LockState::kUnlocked == new_state4
786 || LockWord::LockState::kFatLocked == new_state4);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700787 }
788
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700789 // Test done.
Andreas Gampe525cde22014-04-22 15:44:50 -0700790#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -0800791 UNUSED(test);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700792 LOG(INFO) << "Skipping unlock_object as I don't know how to do that on " << kRuntimeISA;
Andreas Gampe525cde22014-04-22 15:44:50 -0700793 // Force-print to std::cout so it's also outside the logcat.
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700794 std::cout << "Skipping unlock_object as I don't know how to do that on " << kRuntimeISA << std::endl;
Andreas Gampe525cde22014-04-22 15:44:50 -0700795#endif
796}
797
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700798TEST_F(StubTest, UnlockObject) {
Andreas Gampe369810a2015-01-14 19:53:31 -0800799 // This will lead to monitor error messages in the log.
800 ScopedLogSeverity sls(LogSeverity::FATAL);
801
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700802 TestUnlockObject(this);
803}
Andreas Gampe525cde22014-04-22 15:44:50 -0700804
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200805#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
806 (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe525cde22014-04-22 15:44:50 -0700807extern "C" void art_quick_check_cast(void);
808#endif
809
810TEST_F(StubTest, CheckCast) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200811#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
812 (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe525cde22014-04-22 15:44:50 -0700813 Thread* self = Thread::Current();
Andreas Gampe29b38412014-08-13 00:15:43 -0700814
815 const uintptr_t art_quick_check_cast = StubTest::GetEntrypoint(self, kQuickCheckCast);
816
Andreas Gampe525cde22014-04-22 15:44:50 -0700817 // Find some classes.
818 ScopedObjectAccess soa(self);
819 // garbage is created during ClassLinker::Init
820
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700821 StackHandleScope<2> hs(soa.Self());
822 Handle<mirror::Class> c(
823 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "[Ljava/lang/Object;")));
824 Handle<mirror::Class> c2(
825 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "[Ljava/lang/String;")));
Andreas Gampe525cde22014-04-22 15:44:50 -0700826
827 EXPECT_FALSE(self->IsExceptionPending());
828
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700829 Invoke3(reinterpret_cast<size_t>(c.Get()), reinterpret_cast<size_t>(c.Get()), 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -0700830 art_quick_check_cast, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700831
832 EXPECT_FALSE(self->IsExceptionPending());
833
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700834 Invoke3(reinterpret_cast<size_t>(c2.Get()), reinterpret_cast<size_t>(c2.Get()), 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -0700835 art_quick_check_cast, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700836
837 EXPECT_FALSE(self->IsExceptionPending());
838
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700839 Invoke3(reinterpret_cast<size_t>(c.Get()), reinterpret_cast<size_t>(c2.Get()), 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -0700840 art_quick_check_cast, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700841
842 EXPECT_FALSE(self->IsExceptionPending());
843
844 // TODO: Make the following work. But that would require correct managed frames.
845
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700846 Invoke3(reinterpret_cast<size_t>(c2.Get()), reinterpret_cast<size_t>(c.Get()), 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -0700847 art_quick_check_cast, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700848
849 EXPECT_TRUE(self->IsExceptionPending());
850 self->ClearException();
851
852#else
853 LOG(INFO) << "Skipping check_cast as I don't know how to do that on " << kRuntimeISA;
854 // Force-print to std::cout so it's also outside the logcat.
855 std::cout << "Skipping check_cast as I don't know how to do that on " << kRuntimeISA << std::endl;
856#endif
857}
858
859
Andreas Gampe525cde22014-04-22 15:44:50 -0700860TEST_F(StubTest, APutObj) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200861#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
862 (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe525cde22014-04-22 15:44:50 -0700863 Thread* self = Thread::Current();
Andreas Gampe29b38412014-08-13 00:15:43 -0700864
865 // Do not check non-checked ones, we'd need handlers and stuff...
866 const uintptr_t art_quick_aput_obj_with_null_and_bound_check =
867 StubTest::GetEntrypoint(self, kQuickAputObjectWithNullAndBoundCheck);
868
Andreas Gampe525cde22014-04-22 15:44:50 -0700869 // Create an object
870 ScopedObjectAccess soa(self);
871 // garbage is created during ClassLinker::Init
872
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700873 StackHandleScope<5> hs(soa.Self());
874 Handle<mirror::Class> c(
875 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "Ljava/lang/Object;")));
876 Handle<mirror::Class> ca(
877 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "[Ljava/lang/String;")));
Andreas Gampe525cde22014-04-22 15:44:50 -0700878
879 // Build a string array of size 1
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700880 Handle<mirror::ObjectArray<mirror::Object>> array(
881 hs.NewHandle(mirror::ObjectArray<mirror::Object>::Alloc(soa.Self(), ca.Get(), 10)));
Andreas Gampe525cde22014-04-22 15:44:50 -0700882
883 // Build a string -> should be assignable
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700884 Handle<mirror::String> str_obj(
885 hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), "hello, world!")));
Andreas Gampe525cde22014-04-22 15:44:50 -0700886
887 // Build a generic object -> should fail assigning
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700888 Handle<mirror::Object> obj_obj(hs.NewHandle(c->AllocObject(soa.Self())));
Andreas Gampe525cde22014-04-22 15:44:50 -0700889
890 // Play with it...
891
892 // 1) Success cases
Andreas Gampef4e910b2014-04-29 16:55:52 -0700893 // 1.1) Assign str_obj to array[0..3]
Andreas Gampe525cde22014-04-22 15:44:50 -0700894
895 EXPECT_FALSE(self->IsExceptionPending());
896
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700897 Invoke3(reinterpret_cast<size_t>(array.Get()), 0U, reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampe29b38412014-08-13 00:15:43 -0700898 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700899
900 EXPECT_FALSE(self->IsExceptionPending());
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700901 EXPECT_EQ(str_obj.Get(), array->Get(0));
Andreas Gampe525cde22014-04-22 15:44:50 -0700902
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700903 Invoke3(reinterpret_cast<size_t>(array.Get()), 1U, reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampe29b38412014-08-13 00:15:43 -0700904 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampef4e910b2014-04-29 16:55:52 -0700905
906 EXPECT_FALSE(self->IsExceptionPending());
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700907 EXPECT_EQ(str_obj.Get(), array->Get(1));
Andreas Gampef4e910b2014-04-29 16:55:52 -0700908
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700909 Invoke3(reinterpret_cast<size_t>(array.Get()), 2U, reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampe29b38412014-08-13 00:15:43 -0700910 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampef4e910b2014-04-29 16:55:52 -0700911
912 EXPECT_FALSE(self->IsExceptionPending());
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700913 EXPECT_EQ(str_obj.Get(), array->Get(2));
Andreas Gampef4e910b2014-04-29 16:55:52 -0700914
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700915 Invoke3(reinterpret_cast<size_t>(array.Get()), 3U, reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampe29b38412014-08-13 00:15:43 -0700916 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampef4e910b2014-04-29 16:55:52 -0700917
918 EXPECT_FALSE(self->IsExceptionPending());
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700919 EXPECT_EQ(str_obj.Get(), array->Get(3));
Andreas Gampef4e910b2014-04-29 16:55:52 -0700920
921 // 1.2) Assign null to array[0..3]
Andreas Gampe525cde22014-04-22 15:44:50 -0700922
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700923 Invoke3(reinterpret_cast<size_t>(array.Get()), 0U, reinterpret_cast<size_t>(nullptr),
Andreas Gampe29b38412014-08-13 00:15:43 -0700924 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700925
926 EXPECT_FALSE(self->IsExceptionPending());
Andreas Gampef4e910b2014-04-29 16:55:52 -0700927 EXPECT_EQ(nullptr, array->Get(0));
928
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700929 Invoke3(reinterpret_cast<size_t>(array.Get()), 1U, reinterpret_cast<size_t>(nullptr),
Andreas Gampe29b38412014-08-13 00:15:43 -0700930 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampef4e910b2014-04-29 16:55:52 -0700931
932 EXPECT_FALSE(self->IsExceptionPending());
933 EXPECT_EQ(nullptr, array->Get(1));
934
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700935 Invoke3(reinterpret_cast<size_t>(array.Get()), 2U, reinterpret_cast<size_t>(nullptr),
Andreas Gampe29b38412014-08-13 00:15:43 -0700936 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampef4e910b2014-04-29 16:55:52 -0700937
938 EXPECT_FALSE(self->IsExceptionPending());
939 EXPECT_EQ(nullptr, array->Get(2));
940
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700941 Invoke3(reinterpret_cast<size_t>(array.Get()), 3U, reinterpret_cast<size_t>(nullptr),
Andreas Gampe29b38412014-08-13 00:15:43 -0700942 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampef4e910b2014-04-29 16:55:52 -0700943
944 EXPECT_FALSE(self->IsExceptionPending());
945 EXPECT_EQ(nullptr, array->Get(3));
Andreas Gampe525cde22014-04-22 15:44:50 -0700946
947 // TODO: Check _which_ exception is thrown. Then make 3) check that it's the right check order.
948
949 // 2) Failure cases (str into str[])
950 // 2.1) Array = null
951 // TODO: Throwing NPE needs actual DEX code
952
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700953// Invoke3(reinterpret_cast<size_t>(nullptr), 0U, reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampe525cde22014-04-22 15:44:50 -0700954// reinterpret_cast<uintptr_t>(&art_quick_aput_obj_with_null_and_bound_check), self);
955//
956// EXPECT_TRUE(self->IsExceptionPending());
957// self->ClearException();
958
959 // 2.2) Index < 0
960
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700961 Invoke3(reinterpret_cast<size_t>(array.Get()), static_cast<size_t>(-1),
962 reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampe29b38412014-08-13 00:15:43 -0700963 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700964
965 EXPECT_TRUE(self->IsExceptionPending());
966 self->ClearException();
967
968 // 2.3) Index > 0
969
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700970 Invoke3(reinterpret_cast<size_t>(array.Get()), 10U, reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampe29b38412014-08-13 00:15:43 -0700971 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700972
973 EXPECT_TRUE(self->IsExceptionPending());
974 self->ClearException();
975
976 // 3) Failure cases (obj into str[])
977
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700978 Invoke3(reinterpret_cast<size_t>(array.Get()), 0U, reinterpret_cast<size_t>(obj_obj.Get()),
Andreas Gampe29b38412014-08-13 00:15:43 -0700979 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700980
981 EXPECT_TRUE(self->IsExceptionPending());
982 self->ClearException();
983
984 // Tests done.
985#else
986 LOG(INFO) << "Skipping aput_obj as I don't know how to do that on " << kRuntimeISA;
987 // Force-print to std::cout so it's also outside the logcat.
988 std::cout << "Skipping aput_obj as I don't know how to do that on " << kRuntimeISA << std::endl;
989#endif
990}
991
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700992TEST_F(StubTest, AllocObject) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200993#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
994 (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe369810a2015-01-14 19:53:31 -0800995 // This will lead to OOM error messages in the log.
996 ScopedLogSeverity sls(LogSeverity::FATAL);
997
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700998 // TODO: Check the "Unresolved" allocation stubs
999
1000 Thread* self = Thread::Current();
1001 // Create an object
1002 ScopedObjectAccess soa(self);
1003 // garbage is created during ClassLinker::Init
1004
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001005 StackHandleScope<2> hs(soa.Self());
1006 Handle<mirror::Class> c(
1007 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "Ljava/lang/Object;")));
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001008
1009 // Play with it...
1010
1011 EXPECT_FALSE(self->IsExceptionPending());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001012 {
1013 // Use an arbitrary method from c to use as referrer
1014 size_t result = Invoke3(static_cast<size_t>(c->GetDexTypeIndex()), // type_idx
Mathieu Chartiere401d142015-04-22 13:56:20 -07001015 // arbitrary
Andreas Gampe542451c2016-07-26 09:02:02 -07001016 reinterpret_cast<size_t>(c->GetVirtualMethod(0, kRuntimePointerSize)),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001017 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001018 StubTest::GetEntrypoint(self, kQuickAllocObject),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001019 self);
1020
1021 EXPECT_FALSE(self->IsExceptionPending());
1022 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
1023 mirror::Object* obj = reinterpret_cast<mirror::Object*>(result);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001024 EXPECT_EQ(c.Get(), obj->GetClass());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001025 VerifyObject(obj);
1026 }
1027
1028 {
Mathieu Chartier2cebb242015-04-21 16:50:40 -07001029 // We can use null in the second argument as we do not need a method here (not used in
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001030 // resolved/initialized cases)
Mathieu Chartiere401d142015-04-22 13:56:20 -07001031 size_t result = Invoke3(reinterpret_cast<size_t>(c.Get()), 0u, 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001032 StubTest::GetEntrypoint(self, kQuickAllocObjectResolved),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001033 self);
1034
1035 EXPECT_FALSE(self->IsExceptionPending());
1036 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
1037 mirror::Object* obj = reinterpret_cast<mirror::Object*>(result);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001038 EXPECT_EQ(c.Get(), obj->GetClass());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001039 VerifyObject(obj);
1040 }
1041
1042 {
Mathieu Chartier2cebb242015-04-21 16:50:40 -07001043 // We can use null in the second argument as we do not need a method here (not used in
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001044 // resolved/initialized cases)
Mathieu Chartiere401d142015-04-22 13:56:20 -07001045 size_t result = Invoke3(reinterpret_cast<size_t>(c.Get()), 0u, 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001046 StubTest::GetEntrypoint(self, kQuickAllocObjectInitialized),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001047 self);
1048
1049 EXPECT_FALSE(self->IsExceptionPending());
1050 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
1051 mirror::Object* obj = reinterpret_cast<mirror::Object*>(result);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001052 EXPECT_EQ(c.Get(), obj->GetClass());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001053 VerifyObject(obj);
1054 }
1055
1056 // Failure tests.
1057
1058 // Out-of-memory.
1059 {
1060 Runtime::Current()->GetHeap()->SetIdealFootprint(1 * GB);
1061
1062 // Array helps to fill memory faster.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001063 Handle<mirror::Class> ca(
1064 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "[Ljava/lang/Object;")));
1065
1066 // Use arbitrary large amount for now.
1067 static const size_t kMaxHandles = 1000000;
Ian Rogers700a4022014-05-19 16:49:03 -07001068 std::unique_ptr<StackHandleScope<kMaxHandles>> hsp(new StackHandleScope<kMaxHandles>(self));
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001069
1070 std::vector<Handle<mirror::Object>> handles;
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001071 // Start allocating with 128K
1072 size_t length = 128 * KB / 4;
1073 while (length > 10) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001074 Handle<mirror::Object> h(hsp->NewHandle<mirror::Object>(
1075 mirror::ObjectArray<mirror::Object>::Alloc(soa.Self(), ca.Get(), length / 4)));
1076 if (self->IsExceptionPending() || h.Get() == nullptr) {
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001077 self->ClearException();
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001078
1079 // Try a smaller length
1080 length = length / 8;
1081 // Use at most half the reported free space.
1082 size_t mem = Runtime::Current()->GetHeap()->GetFreeMemory();
1083 if (length * 8 > mem) {
1084 length = mem / 8;
1085 }
1086 } else {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001087 handles.push_back(h);
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001088 }
1089 }
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001090 LOG(INFO) << "Used " << handles.size() << " arrays to fill space.";
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001091
1092 // Allocate simple objects till it fails.
1093 while (!self->IsExceptionPending()) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001094 Handle<mirror::Object> h = hsp->NewHandle(c->AllocObject(soa.Self()));
1095 if (!self->IsExceptionPending() && h.Get() != nullptr) {
1096 handles.push_back(h);
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001097 }
1098 }
1099 self->ClearException();
1100
Mathieu Chartiere401d142015-04-22 13:56:20 -07001101 size_t result = Invoke3(reinterpret_cast<size_t>(c.Get()), 0u, 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001102 StubTest::GetEntrypoint(self, kQuickAllocObjectInitialized),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001103 self);
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001104 EXPECT_TRUE(self->IsExceptionPending());
1105 self->ClearException();
1106 EXPECT_EQ(reinterpret_cast<size_t>(nullptr), result);
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001107 }
1108
1109 // Tests done.
1110#else
1111 LOG(INFO) << "Skipping alloc_object as I don't know how to do that on " << kRuntimeISA;
1112 // Force-print to std::cout so it's also outside the logcat.
1113 std::cout << "Skipping alloc_object as I don't know how to do that on " << kRuntimeISA << std::endl;
1114#endif
1115}
1116
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001117TEST_F(StubTest, AllocObjectArray) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001118#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1119 (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001120 // TODO: Check the "Unresolved" allocation stubs
1121
Andreas Gampe369810a2015-01-14 19:53:31 -08001122 // This will lead to OOM error messages in the log.
1123 ScopedLogSeverity sls(LogSeverity::FATAL);
1124
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001125 Thread* self = Thread::Current();
1126 // Create an object
1127 ScopedObjectAccess soa(self);
1128 // garbage is created during ClassLinker::Init
1129
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001130 StackHandleScope<2> hs(self);
1131 Handle<mirror::Class> c(
1132 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "[Ljava/lang/Object;")));
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001133
1134 // Needed to have a linked method.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001135 Handle<mirror::Class> c_obj(
1136 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "Ljava/lang/Object;")));
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001137
1138 // Play with it...
1139
1140 EXPECT_FALSE(self->IsExceptionPending());
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001141
1142 // For some reason this does not work, as the type_idx is artificial and outside what the
1143 // resolved types of c_obj allow...
1144
Ian Rogerscf7f1912014-10-22 22:06:39 -07001145 if ((false)) {
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001146 // Use an arbitrary method from c to use as referrer
Andreas Gampe542451c2016-07-26 09:02:02 -07001147 size_t result = Invoke3(
1148 static_cast<size_t>(c->GetDexTypeIndex()), // type_idx
1149 10U,
1150 // arbitrary
1151 reinterpret_cast<size_t>(c_obj->GetVirtualMethod(0, kRuntimePointerSize)),
1152 StubTest::GetEntrypoint(self, kQuickAllocArray),
1153 self);
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001154
1155 EXPECT_FALSE(self->IsExceptionPending());
1156 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
1157 mirror::Array* obj = reinterpret_cast<mirror::Array*>(result);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001158 EXPECT_EQ(c.Get(), obj->GetClass());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001159 VerifyObject(obj);
1160 EXPECT_EQ(obj->GetLength(), 10);
1161 }
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001162
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001163 {
Mathieu Chartier2cebb242015-04-21 16:50:40 -07001164 // We can use null in the second argument as we do not need a method here (not used in
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001165 // resolved/initialized cases)
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08001166 size_t result = Invoke3(reinterpret_cast<size_t>(c.Get()), 10U,
1167 reinterpret_cast<size_t>(nullptr),
Andreas Gampe29b38412014-08-13 00:15:43 -07001168 StubTest::GetEntrypoint(self, kQuickAllocArrayResolved),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001169 self);
Nicolas Geoffray14691c52015-03-05 10:40:17 +00001170 EXPECT_FALSE(self->IsExceptionPending()) << PrettyTypeOf(self->GetException());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001171 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
1172 mirror::Object* obj = reinterpret_cast<mirror::Object*>(result);
1173 EXPECT_TRUE(obj->IsArrayInstance());
1174 EXPECT_TRUE(obj->IsObjectArray());
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001175 EXPECT_EQ(c.Get(), obj->GetClass());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001176 VerifyObject(obj);
1177 mirror::Array* array = reinterpret_cast<mirror::Array*>(result);
1178 EXPECT_EQ(array->GetLength(), 10);
1179 }
1180
1181 // Failure tests.
1182
1183 // Out-of-memory.
1184 {
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08001185 size_t result = Invoke3(reinterpret_cast<size_t>(c.Get()),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001186 GB, // that should fail...
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08001187 reinterpret_cast<size_t>(nullptr),
Andreas Gampe29b38412014-08-13 00:15:43 -07001188 StubTest::GetEntrypoint(self, kQuickAllocArrayResolved),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001189 self);
1190
1191 EXPECT_TRUE(self->IsExceptionPending());
1192 self->ClearException();
1193 EXPECT_EQ(reinterpret_cast<size_t>(nullptr), result);
1194 }
1195
1196 // Tests done.
1197#else
1198 LOG(INFO) << "Skipping alloc_array as I don't know how to do that on " << kRuntimeISA;
1199 // Force-print to std::cout so it's also outside the logcat.
1200 std::cout << "Skipping alloc_array as I don't know how to do that on " << kRuntimeISA << std::endl;
1201#endif
1202}
1203
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001204
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001205TEST_F(StubTest, StringCompareTo) {
Scott Wakelingc25cbf12016-04-18 09:00:11 +01001206 // There is no StringCompareTo runtime entrypoint for __arm__ or __aarch64__.
1207#if defined(__i386__) || defined(__mips__) || \
1208 (defined(__x86_64__) && !defined(__APPLE__))
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001209 // TODO: Check the "Unresolved" allocation stubs
1210
1211 Thread* self = Thread::Current();
Andreas Gampe29b38412014-08-13 00:15:43 -07001212
1213 const uintptr_t art_quick_string_compareto = StubTest::GetEntrypoint(self, kQuickStringCompareTo);
1214
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001215 ScopedObjectAccess soa(self);
1216 // garbage is created during ClassLinker::Init
1217
1218 // Create some strings
1219 // Use array so we can index into it and use a matrix for expected results
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -07001220 // Setup: The first half is standard. The second half uses a non-zero offset.
1221 // TODO: Shared backing arrays.
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001222 const char* c[] = { "", "", "a", "aa", "ab",
Serban Constantinescu86797a72014-06-19 16:17:56 +01001223 "aacaacaacaacaacaac", // This one's under the default limit to go to __memcmp16.
1224 "aacaacaacaacaacaacaacaacaacaacaacaac", // This one's over.
1225 "aacaacaacaacaacaacaacaacaacaacaacaaca" }; // As is this one. We need a separate one to
1226 // defeat object-equal optimizations.
Jeff Hao848f70a2014-01-15 13:49:50 -08001227 static constexpr size_t kStringCount = arraysize(c);
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001228
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001229 StackHandleScope<kStringCount> hs(self);
1230 Handle<mirror::String> s[kStringCount];
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001231
Jeff Hao848f70a2014-01-15 13:49:50 -08001232 for (size_t i = 0; i < kStringCount; ++i) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001233 s[i] = hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), c[i]));
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001234 }
1235
1236 // TODO: wide characters
1237
1238 // Matrix of expectations. First component is first parameter. Note we only check against the
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -07001239 // sign, not the value. As we are testing random offsets, we need to compute this and need to
1240 // rely on String::CompareTo being correct.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001241 int32_t expected[kStringCount][kStringCount];
1242 for (size_t x = 0; x < kStringCount; ++x) {
1243 for (size_t y = 0; y < kStringCount; ++y) {
1244 expected[x][y] = s[x]->CompareTo(s[y].Get());
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -07001245 }
1246 }
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001247
1248 // Play with it...
1249
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001250 for (size_t x = 0; x < kStringCount; ++x) {
1251 for (size_t y = 0; y < kStringCount; ++y) {
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001252 // Test string_compareto x y
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001253 size_t result = Invoke3(reinterpret_cast<size_t>(s[x].Get()),
1254 reinterpret_cast<size_t>(s[y].Get()), 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001255 art_quick_string_compareto, self);
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001256
1257 EXPECT_FALSE(self->IsExceptionPending());
1258
1259 // The result is a 32b signed integer
1260 union {
1261 size_t r;
1262 int32_t i;
1263 } conv;
1264 conv.r = result;
1265 int32_t e = expected[x][y];
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -07001266 EXPECT_TRUE(e == 0 ? conv.i == 0 : true) << "x=" << c[x] << " y=" << c[y] << " res=" <<
1267 conv.r;
1268 EXPECT_TRUE(e < 0 ? conv.i < 0 : true) << "x=" << c[x] << " y=" << c[y] << " res=" <<
1269 conv.r;
1270 EXPECT_TRUE(e > 0 ? conv.i > 0 : true) << "x=" << c[x] << " y=" << c[y] << " res=" <<
1271 conv.r;
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001272 }
1273 }
1274
Andreas Gampe7177d7c2014-05-02 12:10:02 -07001275 // TODO: Deallocate things.
1276
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001277 // Tests done.
1278#else
1279 LOG(INFO) << "Skipping string_compareto as I don't know how to do that on " << kRuntimeISA;
1280 // Force-print to std::cout so it's also outside the logcat.
1281 std::cout << "Skipping string_compareto as I don't know how to do that on " << kRuntimeISA <<
1282 std::endl;
1283#endif
1284}
1285
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001286
Mathieu Chartierc7853442015-03-27 14:35:38 -07001287static void GetSetBooleanStatic(ArtField* f, Thread* self,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001288 ArtMethod* referrer, StubTest* test)
Mathieu Chartier90443472015-07-16 20:32:27 -07001289 SHARED_REQUIRES(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001290#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1291 (defined(__x86_64__) && !defined(__APPLE__))
Fred Shih37f05ef2014-07-16 18:38:08 -07001292 constexpr size_t num_values = 5;
1293 uint8_t values[num_values] = { 0, 1, 2, 128, 0xFF };
1294
1295 for (size_t i = 0; i < num_values; ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001296 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001297 static_cast<size_t>(values[i]),
1298 0U,
1299 StubTest::GetEntrypoint(self, kQuickSet8Static),
1300 self,
1301 referrer);
1302
Mathieu Chartierc7853442015-03-27 14:35:38 -07001303 size_t res = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001304 0U, 0U,
1305 StubTest::GetEntrypoint(self, kQuickGetBooleanStatic),
1306 self,
1307 referrer);
1308 // Boolean currently stores bools as uint8_t, be more zealous about asserting correct writes/gets.
1309 EXPECT_EQ(values[i], static_cast<uint8_t>(res)) << "Iteration " << i;
1310 }
1311#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001312 UNUSED(f, self, referrer, test);
Fred Shih37f05ef2014-07-16 18:38:08 -07001313 LOG(INFO) << "Skipping set_boolean_static as I don't know how to do that on " << kRuntimeISA;
1314 // Force-print to std::cout so it's also outside the logcat.
1315 std::cout << "Skipping set_boolean_static as I don't know how to do that on " << kRuntimeISA << std::endl;
1316#endif
1317}
Mathieu Chartiere401d142015-04-22 13:56:20 -07001318static void GetSetByteStatic(ArtField* f, Thread* self, ArtMethod* referrer,
Mathieu Chartierc7853442015-03-27 14:35:38 -07001319 StubTest* test)
Mathieu Chartier90443472015-07-16 20:32:27 -07001320 SHARED_REQUIRES(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001321#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1322 (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001323 int8_t values[] = { -128, -64, 0, 64, 127 };
Fred Shih37f05ef2014-07-16 18:38:08 -07001324
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001325 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001326 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001327 static_cast<size_t>(values[i]),
1328 0U,
1329 StubTest::GetEntrypoint(self, kQuickSet8Static),
1330 self,
1331 referrer);
1332
Mathieu Chartierc7853442015-03-27 14:35:38 -07001333 size_t res = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001334 0U, 0U,
1335 StubTest::GetEntrypoint(self, kQuickGetByteStatic),
1336 self,
1337 referrer);
1338 EXPECT_EQ(values[i], static_cast<int8_t>(res)) << "Iteration " << i;
1339 }
1340#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001341 UNUSED(f, self, referrer, test);
Fred Shih37f05ef2014-07-16 18:38:08 -07001342 LOG(INFO) << "Skipping set_byte_static as I don't know how to do that on " << kRuntimeISA;
1343 // Force-print to std::cout so it's also outside the logcat.
1344 std::cout << "Skipping set_byte_static as I don't know how to do that on " << kRuntimeISA << std::endl;
1345#endif
1346}
1347
1348
Mathieu Chartierc7853442015-03-27 14:35:38 -07001349static void GetSetBooleanInstance(Handle<mirror::Object>* obj, ArtField* f, Thread* self,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001350 ArtMethod* referrer, StubTest* test)
Mathieu Chartier90443472015-07-16 20:32:27 -07001351 SHARED_REQUIRES(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001352#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1353 (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001354 uint8_t values[] = { 0, true, 2, 128, 0xFF };
Fred Shih37f05ef2014-07-16 18:38:08 -07001355
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001356 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001357 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001358 reinterpret_cast<size_t>(obj->Get()),
1359 static_cast<size_t>(values[i]),
1360 StubTest::GetEntrypoint(self, kQuickSet8Instance),
1361 self,
1362 referrer);
1363
Mathieu Chartierc7853442015-03-27 14:35:38 -07001364 uint8_t res = f->GetBoolean(obj->Get());
Fred Shih37f05ef2014-07-16 18:38:08 -07001365 EXPECT_EQ(values[i], res) << "Iteration " << i;
1366
Mathieu Chartierc7853442015-03-27 14:35:38 -07001367 f->SetBoolean<false>(obj->Get(), res);
Fred Shih37f05ef2014-07-16 18:38:08 -07001368
Mathieu Chartierc7853442015-03-27 14:35:38 -07001369 size_t res2 = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001370 reinterpret_cast<size_t>(obj->Get()),
1371 0U,
1372 StubTest::GetEntrypoint(self, kQuickGetBooleanInstance),
1373 self,
1374 referrer);
1375 EXPECT_EQ(res, static_cast<uint8_t>(res2));
1376 }
1377#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001378 UNUSED(obj, f, self, referrer, test);
Fred Shih37f05ef2014-07-16 18:38:08 -07001379 LOG(INFO) << "Skipping set_boolean_instance as I don't know how to do that on " << kRuntimeISA;
1380 // Force-print to std::cout so it's also outside the logcat.
1381 std::cout << "Skipping set_boolean_instance as I don't know how to do that on " << kRuntimeISA << std::endl;
1382#endif
1383}
Mathieu Chartierc7853442015-03-27 14:35:38 -07001384static void GetSetByteInstance(Handle<mirror::Object>* obj, ArtField* f,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001385 Thread* self, ArtMethod* referrer, StubTest* test)
Mathieu Chartier90443472015-07-16 20:32:27 -07001386 SHARED_REQUIRES(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001387#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1388 (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001389 int8_t values[] = { -128, -64, 0, 64, 127 };
Fred Shih37f05ef2014-07-16 18:38:08 -07001390
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001391 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001392 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001393 reinterpret_cast<size_t>(obj->Get()),
1394 static_cast<size_t>(values[i]),
1395 StubTest::GetEntrypoint(self, kQuickSet8Instance),
1396 self,
1397 referrer);
1398
Mathieu Chartierc7853442015-03-27 14:35:38 -07001399 int8_t res = f->GetByte(obj->Get());
Fred Shih37f05ef2014-07-16 18:38:08 -07001400 EXPECT_EQ(res, values[i]) << "Iteration " << i;
Mathieu Chartierc7853442015-03-27 14:35:38 -07001401 f->SetByte<false>(obj->Get(), ++res);
Fred Shih37f05ef2014-07-16 18:38:08 -07001402
Mathieu Chartierc7853442015-03-27 14:35:38 -07001403 size_t res2 = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001404 reinterpret_cast<size_t>(obj->Get()),
1405 0U,
1406 StubTest::GetEntrypoint(self, kQuickGetByteInstance),
1407 self,
1408 referrer);
1409 EXPECT_EQ(res, static_cast<int8_t>(res2));
1410 }
1411#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001412 UNUSED(obj, f, self, referrer, test);
Fred Shih37f05ef2014-07-16 18:38:08 -07001413 LOG(INFO) << "Skipping set_byte_instance as I don't know how to do that on " << kRuntimeISA;
1414 // Force-print to std::cout so it's also outside the logcat.
1415 std::cout << "Skipping set_byte_instance as I don't know how to do that on " << kRuntimeISA << std::endl;
1416#endif
1417}
1418
Mathieu Chartiere401d142015-04-22 13:56:20 -07001419static void GetSetCharStatic(ArtField* f, Thread* self, ArtMethod* referrer,
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001420 StubTest* test)
Mathieu Chartier90443472015-07-16 20:32:27 -07001421 SHARED_REQUIRES(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001422#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1423 (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001424 uint16_t values[] = { 0, 1, 2, 255, 32768, 0xFFFF };
Fred Shih37f05ef2014-07-16 18:38:08 -07001425
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001426 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001427 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001428 static_cast<size_t>(values[i]),
1429 0U,
1430 StubTest::GetEntrypoint(self, kQuickSet16Static),
1431 self,
1432 referrer);
1433
Mathieu Chartierc7853442015-03-27 14:35:38 -07001434 size_t res = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001435 0U, 0U,
1436 StubTest::GetEntrypoint(self, kQuickGetCharStatic),
1437 self,
1438 referrer);
1439
1440 EXPECT_EQ(values[i], static_cast<uint16_t>(res)) << "Iteration " << i;
1441 }
1442#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001443 UNUSED(f, self, referrer, test);
Fred Shih37f05ef2014-07-16 18:38:08 -07001444 LOG(INFO) << "Skipping set_char_static as I don't know how to do that on " << kRuntimeISA;
1445 // Force-print to std::cout so it's also outside the logcat.
1446 std::cout << "Skipping set_char_static as I don't know how to do that on " << kRuntimeISA << std::endl;
1447#endif
1448}
Mathieu Chartierc7853442015-03-27 14:35:38 -07001449static void GetSetShortStatic(ArtField* f, Thread* self,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001450 ArtMethod* referrer, StubTest* test)
Mathieu Chartier90443472015-07-16 20:32:27 -07001451 SHARED_REQUIRES(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001452#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1453 (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001454 int16_t values[] = { -0x7FFF, -32768, 0, 255, 32767, 0x7FFE };
Fred Shih37f05ef2014-07-16 18:38:08 -07001455
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001456 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001457 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001458 static_cast<size_t>(values[i]),
1459 0U,
1460 StubTest::GetEntrypoint(self, kQuickSet16Static),
1461 self,
1462 referrer);
1463
Mathieu Chartierc7853442015-03-27 14:35:38 -07001464 size_t res = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001465 0U, 0U,
1466 StubTest::GetEntrypoint(self, kQuickGetShortStatic),
1467 self,
1468 referrer);
1469
1470 EXPECT_EQ(static_cast<int16_t>(res), values[i]) << "Iteration " << i;
1471 }
1472#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001473 UNUSED(f, self, referrer, test);
Fred Shih37f05ef2014-07-16 18:38:08 -07001474 LOG(INFO) << "Skipping set_short_static as I don't know how to do that on " << kRuntimeISA;
1475 // Force-print to std::cout so it's also outside the logcat.
1476 std::cout << "Skipping set_short_static as I don't know how to do that on " << kRuntimeISA << std::endl;
1477#endif
1478}
1479
Mathieu Chartierc7853442015-03-27 14:35:38 -07001480static void GetSetCharInstance(Handle<mirror::Object>* obj, ArtField* f,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001481 Thread* self, ArtMethod* referrer, StubTest* test)
Mathieu Chartier90443472015-07-16 20:32:27 -07001482 SHARED_REQUIRES(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001483#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1484 (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001485 uint16_t values[] = { 0, 1, 2, 255, 32768, 0xFFFF };
Fred Shih37f05ef2014-07-16 18:38:08 -07001486
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001487 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001488 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001489 reinterpret_cast<size_t>(obj->Get()),
1490 static_cast<size_t>(values[i]),
1491 StubTest::GetEntrypoint(self, kQuickSet16Instance),
1492 self,
1493 referrer);
1494
Mathieu Chartierc7853442015-03-27 14:35:38 -07001495 uint16_t res = f->GetChar(obj->Get());
Fred Shih37f05ef2014-07-16 18:38:08 -07001496 EXPECT_EQ(res, values[i]) << "Iteration " << i;
Mathieu Chartierc7853442015-03-27 14:35:38 -07001497 f->SetChar<false>(obj->Get(), ++res);
Fred Shih37f05ef2014-07-16 18:38:08 -07001498
Mathieu Chartierc7853442015-03-27 14:35:38 -07001499 size_t res2 = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001500 reinterpret_cast<size_t>(obj->Get()),
1501 0U,
1502 StubTest::GetEntrypoint(self, kQuickGetCharInstance),
1503 self,
1504 referrer);
1505 EXPECT_EQ(res, static_cast<uint16_t>(res2));
1506 }
1507#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001508 UNUSED(obj, f, self, referrer, test);
Fred Shih37f05ef2014-07-16 18:38:08 -07001509 LOG(INFO) << "Skipping set_char_instance as I don't know how to do that on " << kRuntimeISA;
1510 // Force-print to std::cout so it's also outside the logcat.
1511 std::cout << "Skipping set_char_instance as I don't know how to do that on " << kRuntimeISA << std::endl;
1512#endif
1513}
Mathieu Chartierc7853442015-03-27 14:35:38 -07001514static void GetSetShortInstance(Handle<mirror::Object>* obj, ArtField* f,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001515 Thread* self, ArtMethod* referrer, StubTest* test)
Mathieu Chartier90443472015-07-16 20:32:27 -07001516 SHARED_REQUIRES(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001517#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1518 (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001519 int16_t values[] = { -0x7FFF, -32768, 0, 255, 32767, 0x7FFE };
Fred Shih37f05ef2014-07-16 18:38:08 -07001520
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001521 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001522 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001523 reinterpret_cast<size_t>(obj->Get()),
1524 static_cast<size_t>(values[i]),
1525 StubTest::GetEntrypoint(self, kQuickSet16Instance),
1526 self,
1527 referrer);
1528
Mathieu Chartierc7853442015-03-27 14:35:38 -07001529 int16_t res = f->GetShort(obj->Get());
Fred Shih37f05ef2014-07-16 18:38:08 -07001530 EXPECT_EQ(res, values[i]) << "Iteration " << i;
Mathieu Chartierc7853442015-03-27 14:35:38 -07001531 f->SetShort<false>(obj->Get(), ++res);
Fred Shih37f05ef2014-07-16 18:38:08 -07001532
Mathieu Chartierc7853442015-03-27 14:35:38 -07001533 size_t res2 = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001534 reinterpret_cast<size_t>(obj->Get()),
1535 0U,
1536 StubTest::GetEntrypoint(self, kQuickGetShortInstance),
1537 self,
1538 referrer);
1539 EXPECT_EQ(res, static_cast<int16_t>(res2));
1540 }
1541#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001542 UNUSED(obj, f, self, referrer, test);
Fred Shih37f05ef2014-07-16 18:38:08 -07001543 LOG(INFO) << "Skipping set_short_instance as I don't know how to do that on " << kRuntimeISA;
1544 // Force-print to std::cout so it's also outside the logcat.
1545 std::cout << "Skipping set_short_instance as I don't know how to do that on " << kRuntimeISA << std::endl;
1546#endif
1547}
1548
Mathieu Chartiere401d142015-04-22 13:56:20 -07001549static void GetSet32Static(ArtField* f, Thread* self, ArtMethod* referrer,
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001550 StubTest* test)
Mathieu Chartier90443472015-07-16 20:32:27 -07001551 SHARED_REQUIRES(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001552#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1553 (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001554 uint32_t values[] = { 0, 1, 2, 255, 32768, 1000000, 0xFFFFFFFF };
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001555
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001556 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001557 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001558 static_cast<size_t>(values[i]),
1559 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001560 StubTest::GetEntrypoint(self, kQuickSet32Static),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001561 self,
1562 referrer);
1563
Mathieu Chartierc7853442015-03-27 14:35:38 -07001564 size_t res = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001565 0U, 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001566 StubTest::GetEntrypoint(self, kQuickGet32Static),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001567 self,
1568 referrer);
1569
Goran Jakovljevic04568812015-04-23 15:27:23 +02001570#if defined(__mips__) && defined(__LP64__)
1571 EXPECT_EQ(static_cast<uint32_t>(res), values[i]) << "Iteration " << i;
1572#else
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001573 EXPECT_EQ(res, values[i]) << "Iteration " << i;
Goran Jakovljevic04568812015-04-23 15:27:23 +02001574#endif
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001575 }
1576#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001577 UNUSED(f, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001578 LOG(INFO) << "Skipping set32static as I don't know how to do that on " << kRuntimeISA;
1579 // Force-print to std::cout so it's also outside the logcat.
1580 std::cout << "Skipping set32static as I don't know how to do that on " << kRuntimeISA << std::endl;
1581#endif
1582}
1583
1584
Mathieu Chartierc7853442015-03-27 14:35:38 -07001585static void GetSet32Instance(Handle<mirror::Object>* obj, ArtField* f,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001586 Thread* self, ArtMethod* referrer, StubTest* test)
Mathieu Chartier90443472015-07-16 20:32:27 -07001587 SHARED_REQUIRES(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001588#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1589 (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001590 uint32_t values[] = { 0, 1, 2, 255, 32768, 1000000, 0xFFFFFFFF };
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001591
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001592 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001593 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001594 reinterpret_cast<size_t>(obj->Get()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001595 static_cast<size_t>(values[i]),
Andreas Gampe29b38412014-08-13 00:15:43 -07001596 StubTest::GetEntrypoint(self, kQuickSet32Instance),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001597 self,
1598 referrer);
1599
Mathieu Chartierc7853442015-03-27 14:35:38 -07001600 int32_t res = f->GetInt(obj->Get());
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001601 EXPECT_EQ(res, static_cast<int32_t>(values[i])) << "Iteration " << i;
1602
1603 res++;
Mathieu Chartierc7853442015-03-27 14:35:38 -07001604 f->SetInt<false>(obj->Get(), res);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001605
Mathieu Chartierc7853442015-03-27 14:35:38 -07001606 size_t res2 = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001607 reinterpret_cast<size_t>(obj->Get()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001608 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001609 StubTest::GetEntrypoint(self, kQuickGet32Instance),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001610 self,
1611 referrer);
1612 EXPECT_EQ(res, static_cast<int32_t>(res2));
1613 }
1614#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001615 UNUSED(obj, f, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001616 LOG(INFO) << "Skipping set32instance as I don't know how to do that on " << kRuntimeISA;
1617 // Force-print to std::cout so it's also outside the logcat.
1618 std::cout << "Skipping set32instance as I don't know how to do that on " << kRuntimeISA << std::endl;
1619#endif
1620}
1621
1622
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001623#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1624 (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001625
1626static void set_and_check_static(uint32_t f_idx, mirror::Object* val, Thread* self,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001627 ArtMethod* referrer, StubTest* test)
Mathieu Chartier90443472015-07-16 20:32:27 -07001628 SHARED_REQUIRES(Locks::mutator_lock_) {
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001629 test->Invoke3WithReferrer(static_cast<size_t>(f_idx),
1630 reinterpret_cast<size_t>(val),
1631 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001632 StubTest::GetEntrypoint(self, kQuickSetObjStatic),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001633 self,
1634 referrer);
1635
1636 size_t res = test->Invoke3WithReferrer(static_cast<size_t>(f_idx),
1637 0U, 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001638 StubTest::GetEntrypoint(self, kQuickGetObjStatic),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001639 self,
1640 referrer);
1641
1642 EXPECT_EQ(res, reinterpret_cast<size_t>(val)) << "Value " << val;
1643}
1644#endif
1645
Mathieu Chartiere401d142015-04-22 13:56:20 -07001646static void GetSetObjStatic(ArtField* f, Thread* self, ArtMethod* referrer,
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001647 StubTest* test)
Mathieu Chartier90443472015-07-16 20:32:27 -07001648 SHARED_REQUIRES(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001649#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1650 (defined(__x86_64__) && !defined(__APPLE__))
Mathieu Chartierc7853442015-03-27 14:35:38 -07001651 set_and_check_static(f->GetDexFieldIndex(), nullptr, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001652
1653 // Allocate a string object for simplicity.
1654 mirror::String* str = mirror::String::AllocFromModifiedUtf8(self, "Test");
Mathieu Chartierc7853442015-03-27 14:35:38 -07001655 set_and_check_static(f->GetDexFieldIndex(), str, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001656
Mathieu Chartierc7853442015-03-27 14:35:38 -07001657 set_and_check_static(f->GetDexFieldIndex(), nullptr, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001658#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001659 UNUSED(f, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001660 LOG(INFO) << "Skipping setObjstatic as I don't know how to do that on " << kRuntimeISA;
1661 // Force-print to std::cout so it's also outside the logcat.
1662 std::cout << "Skipping setObjstatic as I don't know how to do that on " << kRuntimeISA << std::endl;
1663#endif
1664}
1665
1666
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001667#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1668 (defined(__x86_64__) && !defined(__APPLE__))
Mathieu Chartierc7853442015-03-27 14:35:38 -07001669static void set_and_check_instance(ArtField* f, mirror::Object* trg,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001670 mirror::Object* val, Thread* self, ArtMethod* referrer,
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001671 StubTest* test)
Mathieu Chartier90443472015-07-16 20:32:27 -07001672 SHARED_REQUIRES(Locks::mutator_lock_) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001673 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001674 reinterpret_cast<size_t>(trg),
1675 reinterpret_cast<size_t>(val),
Andreas Gampe29b38412014-08-13 00:15:43 -07001676 StubTest::GetEntrypoint(self, kQuickSetObjInstance),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001677 self,
1678 referrer);
1679
Mathieu Chartierc7853442015-03-27 14:35:38 -07001680 size_t res = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001681 reinterpret_cast<size_t>(trg),
1682 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001683 StubTest::GetEntrypoint(self, kQuickGetObjInstance),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001684 self,
1685 referrer);
1686
1687 EXPECT_EQ(res, reinterpret_cast<size_t>(val)) << "Value " << val;
1688
Mathieu Chartierc7853442015-03-27 14:35:38 -07001689 EXPECT_EQ(val, f->GetObj(trg));
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001690}
1691#endif
1692
Mathieu Chartierc7853442015-03-27 14:35:38 -07001693static void GetSetObjInstance(Handle<mirror::Object>* obj, ArtField* f,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001694 Thread* self, ArtMethod* referrer, StubTest* test)
Mathieu Chartier90443472015-07-16 20:32:27 -07001695 SHARED_REQUIRES(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001696#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1697 (defined(__x86_64__) && !defined(__APPLE__))
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001698 set_and_check_instance(f, obj->Get(), nullptr, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001699
1700 // Allocate a string object for simplicity.
1701 mirror::String* str = mirror::String::AllocFromModifiedUtf8(self, "Test");
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001702 set_and_check_instance(f, obj->Get(), str, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001703
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001704 set_and_check_instance(f, obj->Get(), nullptr, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001705#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001706 UNUSED(obj, f, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001707 LOG(INFO) << "Skipping setObjinstance as I don't know how to do that on " << kRuntimeISA;
1708 // Force-print to std::cout so it's also outside the logcat.
1709 std::cout << "Skipping setObjinstance as I don't know how to do that on " << kRuntimeISA << std::endl;
1710#endif
1711}
1712
1713
Calin Juravle872ab3f2015-10-02 07:27:51 +01001714// TODO: Complete these tests for 32b architectures
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001715
Mathieu Chartiere401d142015-04-22 13:56:20 -07001716static void GetSet64Static(ArtField* f, Thread* self, ArtMethod* referrer,
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001717 StubTest* test)
Mathieu Chartier90443472015-07-16 20:32:27 -07001718 SHARED_REQUIRES(Locks::mutator_lock_) {
Calin Juravle6e399ac2015-10-02 23:56:06 +01001719#if (defined(__x86_64__) && !defined(__APPLE__)) || (defined(__mips__) && defined(__LP64__)) \
1720 || defined(__aarch64__)
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001721 uint64_t values[] = { 0, 1, 2, 255, 32768, 1000000, 0xFFFFFFFF, 0xFFFFFFFFFFFF };
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001722
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001723 for (size_t i = 0; i < arraysize(values); ++i) {
Calin Juravle6e399ac2015-10-02 23:56:06 +01001724 // 64 bit FieldSet stores the set value in the second register.
1725 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Calin Juravle24cc1b32015-10-06 11:46:58 +01001726 0U,
1727 values[i],
1728 StubTest::GetEntrypoint(self, kQuickSet64Static),
1729 self,
1730 referrer);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001731
Mathieu Chartierc7853442015-03-27 14:35:38 -07001732 size_t res = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001733 0U, 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001734 StubTest::GetEntrypoint(self, kQuickGet64Static),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001735 self,
1736 referrer);
1737
1738 EXPECT_EQ(res, values[i]) << "Iteration " << i;
1739 }
1740#else
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001741 UNUSED(f, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001742 LOG(INFO) << "Skipping set64static as I don't know how to do that on " << kRuntimeISA;
1743 // Force-print to std::cout so it's also outside the logcat.
1744 std::cout << "Skipping set64static as I don't know how to do that on " << kRuntimeISA << std::endl;
1745#endif
1746}
1747
1748
Mathieu Chartierc7853442015-03-27 14:35:38 -07001749static void GetSet64Instance(Handle<mirror::Object>* obj, ArtField* f,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001750 Thread* self, ArtMethod* referrer, StubTest* test)
Mathieu Chartier90443472015-07-16 20:32:27 -07001751 SHARED_REQUIRES(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001752#if (defined(__x86_64__) && !defined(__APPLE__)) || (defined(__mips__) && defined(__LP64__)) || \
1753 defined(__aarch64__)
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001754 uint64_t values[] = { 0, 1, 2, 255, 32768, 1000000, 0xFFFFFFFF, 0xFFFFFFFFFFFF };
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001755
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001756 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001757 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001758 reinterpret_cast<size_t>(obj->Get()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001759 static_cast<size_t>(values[i]),
Andreas Gampe29b38412014-08-13 00:15:43 -07001760 StubTest::GetEntrypoint(self, kQuickSet64Instance),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001761 self,
1762 referrer);
1763
Mathieu Chartierc7853442015-03-27 14:35:38 -07001764 int64_t res = f->GetLong(obj->Get());
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001765 EXPECT_EQ(res, static_cast<int64_t>(values[i])) << "Iteration " << i;
1766
1767 res++;
Mathieu Chartierc7853442015-03-27 14:35:38 -07001768 f->SetLong<false>(obj->Get(), res);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001769
Mathieu Chartierc7853442015-03-27 14:35:38 -07001770 size_t res2 = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001771 reinterpret_cast<size_t>(obj->Get()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001772 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001773 StubTest::GetEntrypoint(self, kQuickGet64Instance),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001774 self,
1775 referrer);
1776 EXPECT_EQ(res, static_cast<int64_t>(res2));
1777 }
1778#else
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001779 UNUSED(obj, f, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001780 LOG(INFO) << "Skipping set64instance as I don't know how to do that on " << kRuntimeISA;
1781 // Force-print to std::cout so it's also outside the logcat.
1782 std::cout << "Skipping set64instance as I don't know how to do that on " << kRuntimeISA << std::endl;
1783#endif
1784}
1785
1786static void TestFields(Thread* self, StubTest* test, Primitive::Type test_type) {
1787 // garbage is created during ClassLinker::Init
1788
1789 JNIEnv* env = Thread::Current()->GetJniEnv();
1790 jclass jc = env->FindClass("AllFields");
Mathieu Chartier2cebb242015-04-21 16:50:40 -07001791 CHECK(jc != nullptr);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001792 jobject o = env->AllocObject(jc);
Mathieu Chartier2cebb242015-04-21 16:50:40 -07001793 CHECK(o != nullptr);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001794
1795 ScopedObjectAccess soa(self);
Mathieu Chartiere401d142015-04-22 13:56:20 -07001796 StackHandleScope<3> hs(self);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001797 Handle<mirror::Object> obj(hs.NewHandle(soa.Decode<mirror::Object*>(o)));
1798 Handle<mirror::Class> c(hs.NewHandle(obj->GetClass()));
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001799 // Need a method as a referrer
Andreas Gampe542451c2016-07-26 09:02:02 -07001800 ArtMethod* m = c->GetDirectMethod(0, kRuntimePointerSize);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001801
1802 // Play with it...
1803
1804 // Static fields.
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001805 for (ArtField& f : c->GetSFields()) {
1806 Primitive::Type type = f.GetTypeAsPrimitiveType();
Mathieu Chartierc7853442015-03-27 14:35:38 -07001807 if (test_type != type) {
1808 continue;
1809 }
1810 switch (type) {
1811 case Primitive::Type::kPrimBoolean:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001812 GetSetBooleanStatic(&f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001813 break;
1814 case Primitive::Type::kPrimByte:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001815 GetSetByteStatic(&f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001816 break;
1817 case Primitive::Type::kPrimChar:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001818 GetSetCharStatic(&f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001819 break;
1820 case Primitive::Type::kPrimShort:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001821 GetSetShortStatic(&f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001822 break;
1823 case Primitive::Type::kPrimInt:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001824 GetSet32Static(&f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001825 break;
1826 case Primitive::Type::kPrimLong:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001827 GetSet64Static(&f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001828 break;
1829 case Primitive::Type::kPrimNot:
1830 // Don't try array.
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001831 if (f.GetTypeDescriptor()[0] != '[') {
1832 GetSetObjStatic(&f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001833 }
1834 break;
1835 default:
1836 break; // Skip.
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001837 }
1838 }
1839
1840 // Instance fields.
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001841 for (ArtField& f : c->GetIFields()) {
1842 Primitive::Type type = f.GetTypeAsPrimitiveType();
Mathieu Chartierc7853442015-03-27 14:35:38 -07001843 if (test_type != type) {
1844 continue;
1845 }
1846 switch (type) {
1847 case Primitive::Type::kPrimBoolean:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001848 GetSetBooleanInstance(&obj, &f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001849 break;
1850 case Primitive::Type::kPrimByte:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001851 GetSetByteInstance(&obj, &f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001852 break;
1853 case Primitive::Type::kPrimChar:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001854 GetSetCharInstance(&obj, &f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001855 break;
1856 case Primitive::Type::kPrimShort:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001857 GetSetShortInstance(&obj, &f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001858 break;
1859 case Primitive::Type::kPrimInt:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001860 GetSet32Instance(&obj, &f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001861 break;
1862 case Primitive::Type::kPrimLong:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001863 GetSet64Instance(&obj, &f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001864 break;
1865 case Primitive::Type::kPrimNot:
1866 // Don't try array.
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001867 if (f.GetTypeDescriptor()[0] != '[') {
1868 GetSetObjInstance(&obj, &f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001869 }
1870 break;
1871 default:
1872 break; // Skip.
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001873 }
1874 }
1875
1876 // TODO: Deallocate things.
1877}
1878
Fred Shih37f05ef2014-07-16 18:38:08 -07001879TEST_F(StubTest, Fields8) {
Fred Shih37f05ef2014-07-16 18:38:08 -07001880 Thread* self = Thread::Current();
1881
1882 self->TransitionFromSuspendedToRunnable();
1883 LoadDex("AllFields");
1884 bool started = runtime_->Start();
1885 CHECK(started);
1886
1887 TestFields(self, this, Primitive::Type::kPrimBoolean);
1888 TestFields(self, this, Primitive::Type::kPrimByte);
1889}
1890
1891TEST_F(StubTest, Fields16) {
Fred Shih37f05ef2014-07-16 18:38:08 -07001892 Thread* self = Thread::Current();
1893
1894 self->TransitionFromSuspendedToRunnable();
1895 LoadDex("AllFields");
1896 bool started = runtime_->Start();
1897 CHECK(started);
1898
1899 TestFields(self, this, Primitive::Type::kPrimChar);
1900 TestFields(self, this, Primitive::Type::kPrimShort);
1901}
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001902
1903TEST_F(StubTest, Fields32) {
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001904 Thread* self = Thread::Current();
1905
1906 self->TransitionFromSuspendedToRunnable();
1907 LoadDex("AllFields");
1908 bool started = runtime_->Start();
1909 CHECK(started);
1910
1911 TestFields(self, this, Primitive::Type::kPrimInt);
1912}
1913
1914TEST_F(StubTest, FieldsObj) {
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001915 Thread* self = Thread::Current();
1916
1917 self->TransitionFromSuspendedToRunnable();
1918 LoadDex("AllFields");
1919 bool started = runtime_->Start();
1920 CHECK(started);
1921
1922 TestFields(self, this, Primitive::Type::kPrimNot);
1923}
1924
1925TEST_F(StubTest, Fields64) {
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001926 Thread* self = Thread::Current();
1927
1928 self->TransitionFromSuspendedToRunnable();
1929 LoadDex("AllFields");
1930 bool started = runtime_->Start();
1931 CHECK(started);
1932
1933 TestFields(self, this, Primitive::Type::kPrimLong);
1934}
1935
Vladimir Marko9d07e3d2016-03-31 12:02:28 +01001936// Disabled, b/27991555 .
1937// FIXME: Hacking the entry point to point to art_quick_to_interpreter_bridge is broken.
1938// The bridge calls through to GetCalleeSaveMethodCaller() which looks up the pre-header
1939// and gets a bogus OatQuickMethodHeader* pointing into our assembly code just before
1940// the bridge and uses that to check for inlined frames, crashing in the process.
1941TEST_F(StubTest, DISABLED_IMT) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001942#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1943 (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe51f76352014-05-21 08:28:48 -07001944 Thread* self = Thread::Current();
1945
1946 ScopedObjectAccess soa(self);
1947 StackHandleScope<7> hs(self);
1948
1949 JNIEnv* env = Thread::Current()->GetJniEnv();
1950
1951 // ArrayList
1952
1953 // Load ArrayList and used methods (JNI).
1954 jclass arraylist_jclass = env->FindClass("java/util/ArrayList");
1955 ASSERT_NE(nullptr, arraylist_jclass);
1956 jmethodID arraylist_constructor = env->GetMethodID(arraylist_jclass, "<init>", "()V");
1957 ASSERT_NE(nullptr, arraylist_constructor);
Mathieu Chartiere401d142015-04-22 13:56:20 -07001958 jmethodID contains_jmethod = env->GetMethodID(
1959 arraylist_jclass, "contains", "(Ljava/lang/Object;)Z");
Andreas Gampe51f76352014-05-21 08:28:48 -07001960 ASSERT_NE(nullptr, contains_jmethod);
1961 jmethodID add_jmethod = env->GetMethodID(arraylist_jclass, "add", "(Ljava/lang/Object;)Z");
1962 ASSERT_NE(nullptr, add_jmethod);
1963
Mathieu Chartiere401d142015-04-22 13:56:20 -07001964 // Get representation.
1965 ArtMethod* contains_amethod = soa.DecodeMethod(contains_jmethod);
Andreas Gampe51f76352014-05-21 08:28:48 -07001966
1967 // Patch up ArrayList.contains.
Mathieu Chartiere401d142015-04-22 13:56:20 -07001968 if (contains_amethod->GetEntryPointFromQuickCompiledCode() == nullptr) {
1969 contains_amethod->SetEntryPointFromQuickCompiledCode(reinterpret_cast<void*>(
Andreas Gampe29b38412014-08-13 00:15:43 -07001970 StubTest::GetEntrypoint(self, kQuickQuickToInterpreterBridge)));
Andreas Gampe51f76352014-05-21 08:28:48 -07001971 }
1972
1973 // List
1974
1975 // Load List and used methods (JNI).
1976 jclass list_jclass = env->FindClass("java/util/List");
1977 ASSERT_NE(nullptr, list_jclass);
Mathieu Chartiere401d142015-04-22 13:56:20 -07001978 jmethodID inf_contains_jmethod = env->GetMethodID(
1979 list_jclass, "contains", "(Ljava/lang/Object;)Z");
Andreas Gampe51f76352014-05-21 08:28:48 -07001980 ASSERT_NE(nullptr, inf_contains_jmethod);
1981
1982 // Get mirror representation.
Mathieu Chartiere401d142015-04-22 13:56:20 -07001983 ArtMethod* inf_contains = soa.DecodeMethod(inf_contains_jmethod);
Andreas Gampe51f76352014-05-21 08:28:48 -07001984
1985 // Object
1986
1987 jclass obj_jclass = env->FindClass("java/lang/Object");
1988 ASSERT_NE(nullptr, obj_jclass);
1989 jmethodID obj_constructor = env->GetMethodID(obj_jclass, "<init>", "()V");
1990 ASSERT_NE(nullptr, obj_constructor);
1991
Andreas Gampe51f76352014-05-21 08:28:48 -07001992 // Create instances.
1993
1994 jobject jarray_list = env->NewObject(arraylist_jclass, arraylist_constructor);
1995 ASSERT_NE(nullptr, jarray_list);
1996 Handle<mirror::Object> array_list(hs.NewHandle(soa.Decode<mirror::Object*>(jarray_list)));
1997
1998 jobject jobj = env->NewObject(obj_jclass, obj_constructor);
1999 ASSERT_NE(nullptr, jobj);
2000 Handle<mirror::Object> obj(hs.NewHandle(soa.Decode<mirror::Object*>(jobj)));
2001
Andreas Gampe1a7e2922014-05-21 15:37:53 -07002002 // Invocation tests.
2003
2004 // 1. imt_conflict
2005
2006 // Contains.
Nicolas Geoffray1004faa2016-03-23 14:28:30 +00002007
2008 // We construct the ImtConflictTable ourselves, as we cannot go into the runtime stub
2009 // that will create it: the runtime stub expects to be called by compiled code.
Nicolas Geoffray1004faa2016-03-23 14:28:30 +00002010 LinearAlloc* linear_alloc = Runtime::Current()->GetLinearAlloc();
2011 ArtMethod* conflict_method = Runtime::Current()->CreateImtConflictMethod(linear_alloc);
Mathieu Chartiere42888f2016-04-14 10:49:19 -07002012 ImtConflictTable* empty_conflict_table =
2013 Runtime::Current()->GetClassLinker()->CreateImtConflictTable(/*count*/0u, linear_alloc);
Nicolas Geoffray1004faa2016-03-23 14:28:30 +00002014 void* data = linear_alloc->Alloc(
2015 self,
Andreas Gampe542451c2016-07-26 09:02:02 -07002016 ImtConflictTable::ComputeSizeWithOneMoreEntry(empty_conflict_table, kRuntimePointerSize));
Nicolas Geoffray1004faa2016-03-23 14:28:30 +00002017 ImtConflictTable* new_table = new (data) ImtConflictTable(
Andreas Gampe542451c2016-07-26 09:02:02 -07002018 empty_conflict_table, inf_contains, contains_amethod, kRuntimePointerSize);
2019 conflict_method->SetImtConflictTable(new_table, kRuntimePointerSize);
Nicolas Geoffray1004faa2016-03-23 14:28:30 +00002020
Andreas Gampe51f76352014-05-21 08:28:48 -07002021 size_t result =
Nicolas Geoffray1004faa2016-03-23 14:28:30 +00002022 Invoke3WithReferrerAndHidden(reinterpret_cast<size_t>(conflict_method),
2023 reinterpret_cast<size_t>(array_list.Get()),
Andreas Gampe51f76352014-05-21 08:28:48 -07002024 reinterpret_cast<size_t>(obj.Get()),
Andreas Gampe29b38412014-08-13 00:15:43 -07002025 StubTest::GetEntrypoint(self, kQuickQuickImtConflictTrampoline),
Nicolas Geoffray1004faa2016-03-23 14:28:30 +00002026 self,
2027 contains_amethod,
Mathieu Chartiere401d142015-04-22 13:56:20 -07002028 static_cast<size_t>(inf_contains->GetDexMethodIndex()));
Andreas Gampe51f76352014-05-21 08:28:48 -07002029
2030 ASSERT_FALSE(self->IsExceptionPending());
2031 EXPECT_EQ(static_cast<size_t>(JNI_FALSE), result);
2032
2033 // Add object.
2034
2035 env->CallBooleanMethod(jarray_list, add_jmethod, jobj);
2036
Nicolas Geoffray14691c52015-03-05 10:40:17 +00002037 ASSERT_FALSE(self->IsExceptionPending()) << PrettyTypeOf(self->GetException());
Andreas Gampe51f76352014-05-21 08:28:48 -07002038
Andreas Gampe1a7e2922014-05-21 15:37:53 -07002039 // Contains.
Andreas Gampe51f76352014-05-21 08:28:48 -07002040
Nicolas Geoffray1004faa2016-03-23 14:28:30 +00002041 result =
2042 Invoke3WithReferrerAndHidden(reinterpret_cast<size_t>(conflict_method),
2043 reinterpret_cast<size_t>(array_list.Get()),
2044 reinterpret_cast<size_t>(obj.Get()),
2045 StubTest::GetEntrypoint(self, kQuickQuickImtConflictTrampoline),
2046 self,
2047 contains_amethod,
2048 static_cast<size_t>(inf_contains->GetDexMethodIndex()));
Andreas Gampe51f76352014-05-21 08:28:48 -07002049
2050 ASSERT_FALSE(self->IsExceptionPending());
2051 EXPECT_EQ(static_cast<size_t>(JNI_TRUE), result);
Nicolas Geoffray1004faa2016-03-23 14:28:30 +00002052
Andreas Gampe1a7e2922014-05-21 15:37:53 -07002053 // 2. regular interface trampoline
2054
Nicolas Geoffray1004faa2016-03-23 14:28:30 +00002055 result = Invoke3WithReferrer(static_cast<size_t>(inf_contains->GetDexMethodIndex()),
2056 reinterpret_cast<size_t>(array_list.Get()),
2057 reinterpret_cast<size_t>(obj.Get()),
2058 StubTest::GetEntrypoint(self,
2059 kQuickInvokeInterfaceTrampolineWithAccessCheck),
2060 self, contains_amethod);
Andreas Gampe1a7e2922014-05-21 15:37:53 -07002061
2062 ASSERT_FALSE(self->IsExceptionPending());
Nicolas Geoffray1004faa2016-03-23 14:28:30 +00002063 EXPECT_EQ(static_cast<size_t>(JNI_TRUE), result);
Andreas Gampe1a7e2922014-05-21 15:37:53 -07002064
Mathieu Chartiere401d142015-04-22 13:56:20 -07002065 result = Invoke3WithReferrer(
2066 static_cast<size_t>(inf_contains->GetDexMethodIndex()),
2067 reinterpret_cast<size_t>(array_list.Get()), reinterpret_cast<size_t>(array_list.Get()),
2068 StubTest::GetEntrypoint(self, kQuickInvokeInterfaceTrampolineWithAccessCheck), self,
2069 contains_amethod);
Andreas Gampe1a7e2922014-05-21 15:37:53 -07002070
2071 ASSERT_FALSE(self->IsExceptionPending());
2072 EXPECT_EQ(static_cast<size_t>(JNI_FALSE), result);
Andreas Gampe51f76352014-05-21 08:28:48 -07002073#else
Andreas Gampe6aac3552014-06-09 14:55:53 -07002074 LOG(INFO) << "Skipping imt as I don't know how to do that on " << kRuntimeISA;
Andreas Gampe51f76352014-05-21 08:28:48 -07002075 // Force-print to std::cout so it's also outside the logcat.
Andreas Gampe6aac3552014-06-09 14:55:53 -07002076 std::cout << "Skipping imt as I don't know how to do that on " << kRuntimeISA << std::endl;
2077#endif
2078}
2079
Andreas Gampe6aac3552014-06-09 14:55:53 -07002080TEST_F(StubTest, StringIndexOf) {
Chris Larsencf283da2016-01-19 16:45:35 -08002081#if defined(__arm__) || defined(__aarch64__) || defined(__mips__)
Andreas Gampe6aac3552014-06-09 14:55:53 -07002082 Thread* self = Thread::Current();
2083 ScopedObjectAccess soa(self);
2084 // garbage is created during ClassLinker::Init
2085
2086 // Create some strings
2087 // Use array so we can index into it and use a matrix for expected results
2088 // Setup: The first half is standard. The second half uses a non-zero offset.
2089 // TODO: Shared backing arrays.
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07002090 const char* c_str[] = { "", "a", "ba", "cba", "dcba", "edcba", "asdfghjkl" };
2091 static constexpr size_t kStringCount = arraysize(c_str);
2092 const char c_char[] = { 'a', 'b', 'c', 'd', 'e' };
2093 static constexpr size_t kCharCount = arraysize(c_char);
Andreas Gampe6aac3552014-06-09 14:55:53 -07002094
2095 StackHandleScope<kStringCount> hs(self);
2096 Handle<mirror::String> s[kStringCount];
2097
2098 for (size_t i = 0; i < kStringCount; ++i) {
2099 s[i] = hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), c_str[i]));
2100 }
2101
2102 // Matrix of expectations. First component is first parameter. Note we only check against the
2103 // sign, not the value. As we are testing random offsets, we need to compute this and need to
2104 // rely on String::CompareTo being correct.
2105 static constexpr size_t kMaxLen = 9;
2106 DCHECK_LE(strlen(c_str[kStringCount-1]), kMaxLen) << "Please fix the indexof test.";
2107
2108 // Last dimension: start, offset by 1.
2109 int32_t expected[kStringCount][kCharCount][kMaxLen + 3];
2110 for (size_t x = 0; x < kStringCount; ++x) {
2111 for (size_t y = 0; y < kCharCount; ++y) {
2112 for (size_t z = 0; z <= kMaxLen + 2; ++z) {
2113 expected[x][y][z] = s[x]->FastIndexOf(c_char[y], static_cast<int32_t>(z) - 1);
2114 }
2115 }
2116 }
2117
2118 // Play with it...
2119
2120 for (size_t x = 0; x < kStringCount; ++x) {
2121 for (size_t y = 0; y < kCharCount; ++y) {
2122 for (size_t z = 0; z <= kMaxLen + 2; ++z) {
2123 int32_t start = static_cast<int32_t>(z) - 1;
2124
2125 // Test string_compareto x y
2126 size_t result = Invoke3(reinterpret_cast<size_t>(s[x].Get()), c_char[y], start,
Andreas Gampe29b38412014-08-13 00:15:43 -07002127 StubTest::GetEntrypoint(self, kQuickIndexOf), self);
Andreas Gampe6aac3552014-06-09 14:55:53 -07002128
2129 EXPECT_FALSE(self->IsExceptionPending());
2130
2131 // The result is a 32b signed integer
2132 union {
2133 size_t r;
2134 int32_t i;
2135 } conv;
2136 conv.r = result;
2137
2138 EXPECT_EQ(expected[x][y][z], conv.i) << "Wrong result for " << c_str[x] << " / " <<
2139 c_char[y] << " @ " << start;
2140 }
2141 }
2142 }
2143
2144 // TODO: Deallocate things.
2145
2146 // Tests done.
2147#else
2148 LOG(INFO) << "Skipping indexof as I don't know how to do that on " << kRuntimeISA;
2149 // Force-print to std::cout so it's also outside the logcat.
2150 std::cout << "Skipping indexof as I don't know how to do that on " << kRuntimeISA << std::endl;
Andreas Gampe51f76352014-05-21 08:28:48 -07002151#endif
2152}
2153
Roland Levillain02b75802016-07-13 11:54:35 +01002154// TODO: Exercise the ReadBarrierMarkRegX entry points.
2155
Man Cao1aee9002015-07-14 22:31:42 -07002156TEST_F(StubTest, ReadBarrier) {
2157#if defined(ART_USE_READ_BARRIER) && (defined(__i386__) || defined(__arm__) || \
2158 defined(__aarch64__) || defined(__mips__) || (defined(__x86_64__) && !defined(__APPLE__)))
2159 Thread* self = Thread::Current();
2160
2161 const uintptr_t readBarrierSlow = StubTest::GetEntrypoint(self, kQuickReadBarrierSlow);
2162
2163 // Create an object
2164 ScopedObjectAccess soa(self);
2165 // garbage is created during ClassLinker::Init
2166
2167 StackHandleScope<2> hs(soa.Self());
2168 Handle<mirror::Class> c(
2169 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "Ljava/lang/Object;")));
2170
2171 // Build an object instance
2172 Handle<mirror::Object> obj(hs.NewHandle(c->AllocObject(soa.Self())));
2173
2174 EXPECT_FALSE(self->IsExceptionPending());
2175
2176 size_t result = Invoke3(0U, reinterpret_cast<size_t>(obj.Get()),
2177 mirror::Object::ClassOffset().SizeValue(), readBarrierSlow, self);
2178
2179 EXPECT_FALSE(self->IsExceptionPending());
2180 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
2181 mirror::Class* klass = reinterpret_cast<mirror::Class*>(result);
2182 EXPECT_EQ(klass, obj->GetClass());
2183
2184 // Tests done.
2185#else
2186 LOG(INFO) << "Skipping read_barrier_slow";
2187 // Force-print to std::cout so it's also outside the logcat.
2188 std::cout << "Skipping read_barrier_slow" << std::endl;
2189#endif
2190}
2191
Roland Levillain0d5a2812015-11-13 10:07:31 +00002192TEST_F(StubTest, ReadBarrierForRoot) {
2193#if defined(ART_USE_READ_BARRIER) && (defined(__i386__) || defined(__arm__) || \
2194 defined(__aarch64__) || defined(__mips__) || (defined(__x86_64__) && !defined(__APPLE__)))
2195 Thread* self = Thread::Current();
2196
2197 const uintptr_t readBarrierForRootSlow =
2198 StubTest::GetEntrypoint(self, kQuickReadBarrierForRootSlow);
2199
2200 // Create an object
2201 ScopedObjectAccess soa(self);
2202 // garbage is created during ClassLinker::Init
2203
2204 StackHandleScope<1> hs(soa.Self());
2205
2206 Handle<mirror::String> obj(
2207 hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), "hello, world!")));
2208
2209 EXPECT_FALSE(self->IsExceptionPending());
2210
2211 GcRoot<mirror::Class>& root = mirror::String::java_lang_String_;
2212 size_t result = Invoke3(reinterpret_cast<size_t>(&root), 0U, 0U, readBarrierForRootSlow, self);
2213
2214 EXPECT_FALSE(self->IsExceptionPending());
2215 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
2216 mirror::Class* klass = reinterpret_cast<mirror::Class*>(result);
2217 EXPECT_EQ(klass, obj->GetClass());
2218
2219 // Tests done.
2220#else
2221 LOG(INFO) << "Skipping read_barrier_for_root_slow";
2222 // Force-print to std::cout so it's also outside the logcat.
2223 std::cout << "Skipping read_barrier_for_root_slow" << std::endl;
2224#endif
2225}
2226
Andreas Gampe525cde22014-04-22 15:44:50 -07002227} // namespace art