blob: 393dfe6d194ba6a124a55855a9224062a1cb7d02 [file] [log] [blame]
Andreas Gampe525cde22014-04-22 15:44:50 -07001/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
Ian Rogerse63db272014-07-15 15:36:11 -070017#include <cstdio>
18
Mathieu Chartierc7853442015-03-27 14:35:38 -070019#include "art_field-inl.h"
Mathieu Chartiere401d142015-04-22 13:56:20 -070020#include "art_method-inl.h"
Andreas Gampe542451c2016-07-26 09:02:02 -070021#include "base/enums.h"
Vladimir Marko3481ba22015-04-13 12:22:36 +010022#include "class_linker-inl.h"
Andreas Gampe525cde22014-04-22 15:44:50 -070023#include "common_runtime_test.h"
Andreas Gampe29b38412014-08-13 00:15:43 -070024#include "entrypoints/quick/quick_entrypoints_enum.h"
Andreas Gampe75a7db62016-09-26 12:04:26 -070025#include "imt_conflict_table.h"
Andreas Gampe13b27842016-11-07 16:48:23 -080026#include "jni_internal.h"
Nicolas Geoffray1004faa2016-03-23 14:28:30 +000027#include "linear_alloc.h"
Andreas Gampe51f76352014-05-21 08:28:48 -070028#include "mirror/class-inl.h"
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -070029#include "mirror/string-inl.h"
Mathieu Chartier0795f232016-09-27 18:43:30 -070030#include "scoped_thread_state_change-inl.h"
Andreas Gampe525cde22014-04-22 15:44:50 -070031
32namespace art {
33
34
35class StubTest : public CommonRuntimeTest {
36 protected:
37 // We need callee-save methods set up in the Runtime for exceptions.
38 void SetUp() OVERRIDE {
39 // Do the normal setup.
40 CommonRuntimeTest::SetUp();
41
42 {
43 // Create callee-save methods
44 ScopedObjectAccess soa(Thread::Current());
Vladimir Marko7624d252014-05-02 14:40:15 +010045 runtime_->SetInstructionSet(kRuntimeISA);
Andreas Gampe525cde22014-04-22 15:44:50 -070046 for (int i = 0; i < Runtime::kLastCalleeSaveType; i++) {
47 Runtime::CalleeSaveType type = Runtime::CalleeSaveType(i);
48 if (!runtime_->HasCalleeSaveMethod(type)) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -070049 runtime_->SetCalleeSaveMethod(runtime_->CreateCalleeSaveMethod(), type);
Andreas Gampe525cde22014-04-22 15:44:50 -070050 }
51 }
52 }
53 }
54
Ian Rogerse63db272014-07-15 15:36:11 -070055 void SetUpRuntimeOptions(RuntimeOptions *options) OVERRIDE {
Andreas Gampe00c1e6d2014-04-25 15:47:13 -070056 // Use a smaller heap
57 for (std::pair<std::string, const void*>& pair : *options) {
58 if (pair.first.find("-Xmx") == 0) {
59 pair.first = "-Xmx4M"; // Smallest we can go.
60 }
61 }
Andreas Gampe51f76352014-05-21 08:28:48 -070062 options->push_back(std::make_pair("-Xint", nullptr));
Andreas Gampe00c1e6d2014-04-25 15:47:13 -070063 }
Andreas Gampe525cde22014-04-22 15:44:50 -070064
Mathieu Chartier119c6bd2014-05-09 14:11:47 -070065 // Helper function needed since TEST_F makes a new class.
66 Thread::tls_ptr_sized_values* GetTlsPtr(Thread* self) {
67 return &self->tlsPtr_;
68 }
69
Andreas Gampe4fc046e2014-05-06 16:56:39 -070070 public:
Andreas Gampe525cde22014-04-22 15:44:50 -070071 size_t Invoke3(size_t arg0, size_t arg1, size_t arg2, uintptr_t code, Thread* self) {
Andreas Gampe6cf80102014-05-19 11:32:41 -070072 return Invoke3WithReferrer(arg0, arg1, arg2, code, self, nullptr);
Andreas Gampe525cde22014-04-22 15:44:50 -070073 }
Andreas Gampe6e4e59c2014-05-05 20:11:02 -070074
Andreas Gampe6e4e59c2014-05-05 20:11:02 -070075 // TODO: Set up a frame according to referrer's specs.
76 size_t Invoke3WithReferrer(size_t arg0, size_t arg1, size_t arg2, uintptr_t code, Thread* self,
Mathieu Chartiere401d142015-04-22 13:56:20 -070077 ArtMethod* referrer) {
Andreas Gampe9537ba22015-10-12 14:29:38 -070078 return Invoke3WithReferrerAndHidden(arg0, arg1, arg2, code, self, referrer, 0);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -070079 }
80
Andreas Gampe51f76352014-05-21 08:28:48 -070081 // TODO: Set up a frame according to referrer's specs.
82 size_t Invoke3WithReferrerAndHidden(size_t arg0, size_t arg1, size_t arg2, uintptr_t code,
Mathieu Chartiere401d142015-04-22 13:56:20 -070083 Thread* self, ArtMethod* referrer, size_t hidden) {
Andreas Gampe51f76352014-05-21 08:28:48 -070084 // Push a transition back into managed code onto the linked list in thread.
85 ManagedStack fragment;
86 self->PushManagedStackFragment(&fragment);
87
88 size_t result;
89 size_t fpr_result = 0;
90#if defined(__i386__)
91 // TODO: Set the thread?
Andreas Gampe9537ba22015-10-12 14:29:38 -070092#define PUSH(reg) "push " # reg "\n\t .cfi_adjust_cfa_offset 4\n\t"
93#define POP(reg) "pop " # reg "\n\t .cfi_adjust_cfa_offset -4\n\t"
Andreas Gampe51f76352014-05-21 08:28:48 -070094 __asm__ __volatile__(
Andreas Gampe9537ba22015-10-12 14:29:38 -070095 "movd %[hidden], %%xmm7\n\t" // This is a memory op, so do this early. If it is off of
96 // esp, then we won't be able to access it after spilling.
97
98 // Spill 6 registers.
99 PUSH(%%ebx)
100 PUSH(%%ecx)
101 PUSH(%%edx)
102 PUSH(%%esi)
103 PUSH(%%edi)
104 PUSH(%%ebp)
105
106 // Store the inputs to the stack, but keep the referrer up top, less work.
107 PUSH(%[referrer]) // Align stack.
108 PUSH(%[referrer]) // Store referrer
109
110 PUSH(%[arg0])
111 PUSH(%[arg1])
112 PUSH(%[arg2])
113 PUSH(%[code])
114 // Now read them back into the required registers.
115 POP(%%edi)
116 POP(%%edx)
117 POP(%%ecx)
118 POP(%%eax)
119 // Call is prepared now.
120
Andreas Gampe51f76352014-05-21 08:28:48 -0700121 "call *%%edi\n\t" // Call the stub
Andreas Gampe9537ba22015-10-12 14:29:38 -0700122 "addl $8, %%esp\n\t" // Pop referrer and padding.
123 ".cfi_adjust_cfa_offset -8\n\t"
124
125 // Restore 6 registers.
126 POP(%%ebp)
127 POP(%%edi)
128 POP(%%esi)
129 POP(%%edx)
130 POP(%%ecx)
131 POP(%%ebx)
132
Andreas Gampe51f76352014-05-21 08:28:48 -0700133 : "=a" (result)
134 // Use the result from eax
Andreas Gampe9537ba22015-10-12 14:29:38 -0700135 : [arg0] "r"(arg0), [arg1] "r"(arg1), [arg2] "r"(arg2), [code] "r"(code),
136 [referrer]"r"(referrer), [hidden]"m"(hidden)
Andreas Gampe2f6e3512014-06-07 01:32:33 -0700137 // This places code into edi, arg0 into eax, arg1 into ecx, and arg2 into edx
Andreas Gampe9537ba22015-10-12 14:29:38 -0700138 : "memory", "xmm7"); // clobber.
139#undef PUSH
140#undef POP
Andreas Gampe51f76352014-05-21 08:28:48 -0700141#elif defined(__arm__)
142 __asm__ __volatile__(
143 "push {r1-r12, lr}\n\t" // Save state, 13*4B = 52B
144 ".cfi_adjust_cfa_offset 52\n\t"
145 "push {r9}\n\t"
146 ".cfi_adjust_cfa_offset 4\n\t"
147 "mov r9, %[referrer]\n\n"
148 "str r9, [sp, #-8]!\n\t" // Push referrer, +8B padding so 16B aligned
149 ".cfi_adjust_cfa_offset 8\n\t"
150 "ldr r9, [sp, #8]\n\t"
151
152 // Push everything on the stack, so we don't rely on the order. What a mess. :-(
153 "sub sp, sp, #24\n\t"
154 "str %[arg0], [sp]\n\t"
155 "str %[arg1], [sp, #4]\n\t"
156 "str %[arg2], [sp, #8]\n\t"
157 "str %[code], [sp, #12]\n\t"
158 "str %[self], [sp, #16]\n\t"
159 "str %[hidden], [sp, #20]\n\t"
160 "ldr r0, [sp]\n\t"
161 "ldr r1, [sp, #4]\n\t"
162 "ldr r2, [sp, #8]\n\t"
163 "ldr r3, [sp, #12]\n\t"
164 "ldr r9, [sp, #16]\n\t"
165 "ldr r12, [sp, #20]\n\t"
166 "add sp, sp, #24\n\t"
167
168 "blx r3\n\t" // Call the stub
Mathieu Chartier2cebb242015-04-21 16:50:40 -0700169 "add sp, sp, #12\n\t" // Pop null and padding
Andreas Gampe51f76352014-05-21 08:28:48 -0700170 ".cfi_adjust_cfa_offset -12\n\t"
171 "pop {r1-r12, lr}\n\t" // Restore state
172 ".cfi_adjust_cfa_offset -52\n\t"
173 "mov %[result], r0\n\t" // Save the result
174 : [result] "=r" (result)
175 // Use the result from r0
Andreas Gampe2f6e3512014-06-07 01:32:33 -0700176 : [arg0] "r"(arg0), [arg1] "r"(arg1), [arg2] "r"(arg2), [code] "r"(code), [self] "r"(self),
177 [referrer] "r"(referrer), [hidden] "r"(hidden)
Andreas Gampeff7b1142015-08-03 10:25:06 -0700178 : "r0", "memory"); // clobber.
Andreas Gampe51f76352014-05-21 08:28:48 -0700179#elif defined(__aarch64__)
180 __asm__ __volatile__(
Andreas Gampef39b3782014-06-03 14:38:30 -0700181 // Spill x0-x7 which we say we don't clobber. May contain args.
Bilyan Borisov970e1912016-02-10 11:51:17 +0000182 "sub sp, sp, #80\n\t"
183 ".cfi_adjust_cfa_offset 80\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700184 "stp x0, x1, [sp]\n\t"
185 "stp x2, x3, [sp, #16]\n\t"
186 "stp x4, x5, [sp, #32]\n\t"
187 "stp x6, x7, [sp, #48]\n\t"
Bilyan Borisov970e1912016-02-10 11:51:17 +0000188 // To be extra defensive, store x20. We do this because some of the stubs might make a
189 // transition into the runtime via the blr instruction below and *not* save x20.
190 "str x20, [sp, #64]\n\t"
191 // 8 byte buffer
Andreas Gampe51f76352014-05-21 08:28:48 -0700192
Andreas Gampef39b3782014-06-03 14:38:30 -0700193 "sub sp, sp, #16\n\t" // Reserve stack space, 16B aligned
194 ".cfi_adjust_cfa_offset 16\n\t"
195 "str %[referrer], [sp]\n\t" // referrer
Andreas Gampe51f76352014-05-21 08:28:48 -0700196
197 // Push everything on the stack, so we don't rely on the order. What a mess. :-(
198 "sub sp, sp, #48\n\t"
199 ".cfi_adjust_cfa_offset 48\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700200 // All things are "r" constraints, so direct str/stp should work.
201 "stp %[arg0], %[arg1], [sp]\n\t"
202 "stp %[arg2], %[code], [sp, #16]\n\t"
203 "stp %[self], %[hidden], [sp, #32]\n\t"
Andreas Gampe51f76352014-05-21 08:28:48 -0700204
205 // Now we definitely have x0-x3 free, use it to garble d8 - d15
206 "movk x0, #0xfad0\n\t"
207 "movk x0, #0xebad, lsl #16\n\t"
208 "movk x0, #0xfad0, lsl #32\n\t"
209 "movk x0, #0xebad, lsl #48\n\t"
210 "fmov d8, x0\n\t"
211 "add x0, x0, 1\n\t"
212 "fmov d9, x0\n\t"
213 "add x0, x0, 1\n\t"
214 "fmov d10, x0\n\t"
215 "add x0, x0, 1\n\t"
216 "fmov d11, x0\n\t"
217 "add x0, x0, 1\n\t"
218 "fmov d12, x0\n\t"
219 "add x0, x0, 1\n\t"
220 "fmov d13, x0\n\t"
221 "add x0, x0, 1\n\t"
222 "fmov d14, x0\n\t"
223 "add x0, x0, 1\n\t"
224 "fmov d15, x0\n\t"
225
Andreas Gampef39b3782014-06-03 14:38:30 -0700226 // Load call params into the right registers.
227 "ldp x0, x1, [sp]\n\t"
228 "ldp x2, x3, [sp, #16]\n\t"
Serban Constantinescu9bd88b02015-04-22 16:24:46 +0100229 "ldp x19, x17, [sp, #32]\n\t"
Andreas Gampe51f76352014-05-21 08:28:48 -0700230 "add sp, sp, #48\n\t"
231 ".cfi_adjust_cfa_offset -48\n\t"
232
Andreas Gampe51f76352014-05-21 08:28:48 -0700233 "blr x3\n\t" // Call the stub
Andreas Gampef39b3782014-06-03 14:38:30 -0700234 "mov x8, x0\n\t" // Store result
235 "add sp, sp, #16\n\t" // Drop the quick "frame"
236 ".cfi_adjust_cfa_offset -16\n\t"
Andreas Gampe51f76352014-05-21 08:28:48 -0700237
238 // Test d8 - d15. We can use x1 and x2.
239 "movk x1, #0xfad0\n\t"
240 "movk x1, #0xebad, lsl #16\n\t"
241 "movk x1, #0xfad0, lsl #32\n\t"
242 "movk x1, #0xebad, lsl #48\n\t"
243 "fmov x2, d8\n\t"
244 "cmp x1, x2\n\t"
245 "b.ne 1f\n\t"
246 "add x1, x1, 1\n\t"
247
248 "fmov x2, d9\n\t"
249 "cmp x1, x2\n\t"
250 "b.ne 1f\n\t"
251 "add x1, x1, 1\n\t"
252
253 "fmov x2, d10\n\t"
254 "cmp x1, x2\n\t"
255 "b.ne 1f\n\t"
256 "add x1, x1, 1\n\t"
257
258 "fmov x2, d11\n\t"
259 "cmp x1, x2\n\t"
260 "b.ne 1f\n\t"
261 "add x1, x1, 1\n\t"
262
263 "fmov x2, d12\n\t"
264 "cmp x1, x2\n\t"
265 "b.ne 1f\n\t"
266 "add x1, x1, 1\n\t"
267
268 "fmov x2, d13\n\t"
269 "cmp x1, x2\n\t"
270 "b.ne 1f\n\t"
271 "add x1, x1, 1\n\t"
272
273 "fmov x2, d14\n\t"
274 "cmp x1, x2\n\t"
275 "b.ne 1f\n\t"
276 "add x1, x1, 1\n\t"
277
278 "fmov x2, d15\n\t"
279 "cmp x1, x2\n\t"
280 "b.ne 1f\n\t"
281
Andreas Gampef39b3782014-06-03 14:38:30 -0700282 "mov x9, #0\n\t" // Use x9 as flag, in clobber list
Andreas Gampe51f76352014-05-21 08:28:48 -0700283
284 // Finish up.
285 "2:\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700286 "ldp x0, x1, [sp]\n\t" // Restore stuff not named clobbered, may contain fpr_result
287 "ldp x2, x3, [sp, #16]\n\t"
288 "ldp x4, x5, [sp, #32]\n\t"
289 "ldp x6, x7, [sp, #48]\n\t"
Bilyan Borisov970e1912016-02-10 11:51:17 +0000290 "ldr x20, [sp, #64]\n\t"
291 "add sp, sp, #80\n\t" // Free stack space, now sp as on entry
292 ".cfi_adjust_cfa_offset -80\n\t"
Andreas Gampe51f76352014-05-21 08:28:48 -0700293
Andreas Gampef39b3782014-06-03 14:38:30 -0700294 "str x9, %[fpr_result]\n\t" // Store the FPR comparison result
295 "mov %[result], x8\n\t" // Store the call result
296
Andreas Gampe51f76352014-05-21 08:28:48 -0700297 "b 3f\n\t" // Goto end
298
299 // Failed fpr verification.
300 "1:\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700301 "mov x9, #1\n\t"
Andreas Gampe51f76352014-05-21 08:28:48 -0700302 "b 2b\n\t" // Goto finish-up
303
304 // End
305 "3:\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700306 : [result] "=r" (result)
307 // Use the result from r0
Andreas Gampe51f76352014-05-21 08:28:48 -0700308 : [arg0] "0"(arg0), [arg1] "r"(arg1), [arg2] "r"(arg2), [code] "r"(code), [self] "r"(self),
Andreas Gampef39b3782014-06-03 14:38:30 -0700309 [referrer] "r"(referrer), [hidden] "r"(hidden), [fpr_result] "m" (fpr_result)
Bilyan Borisov970e1912016-02-10 11:51:17 +0000310 // Leave one register unclobbered, which is needed for compiling with
311 // -fstack-protector-strong. According to AAPCS64 registers x9-x15 are caller-saved,
312 // which means we should unclobber one of the callee-saved registers that are unused.
313 // Here we use x20.
314 : "x8", "x9", "x10", "x11", "x12", "x13", "x14", "x15", "x16", "x17", "x18", "x19",
Andreas Gampef39b3782014-06-03 14:38:30 -0700315 "x21", "x22", "x23", "x24", "x25", "x26", "x27", "x28", "x30",
316 "d0", "d1", "d2", "d3", "d4", "d5", "d6", "d7",
317 "d8", "d9", "d10", "d11", "d12", "d13", "d14", "d15",
318 "d16", "d17", "d18", "d19", "d20", "d21", "d22", "d23",
Andreas Gampe2f6e3512014-06-07 01:32:33 -0700319 "d24", "d25", "d26", "d27", "d28", "d29", "d30", "d31",
Bilyan Borisov970e1912016-02-10 11:51:17 +0000320 "memory");
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200321#elif defined(__mips__) && !defined(__LP64__)
322 __asm__ __volatile__ (
323 // Spill a0-a3 and t0-t7 which we say we don't clobber. May contain args.
324 "addiu $sp, $sp, -64\n\t"
325 "sw $a0, 0($sp)\n\t"
326 "sw $a1, 4($sp)\n\t"
327 "sw $a2, 8($sp)\n\t"
328 "sw $a3, 12($sp)\n\t"
329 "sw $t0, 16($sp)\n\t"
330 "sw $t1, 20($sp)\n\t"
331 "sw $t2, 24($sp)\n\t"
332 "sw $t3, 28($sp)\n\t"
333 "sw $t4, 32($sp)\n\t"
334 "sw $t5, 36($sp)\n\t"
335 "sw $t6, 40($sp)\n\t"
336 "sw $t7, 44($sp)\n\t"
337 // Spill gp register since it is caller save.
338 "sw $gp, 52($sp)\n\t"
339
340 "addiu $sp, $sp, -16\n\t" // Reserve stack space, 16B aligned.
341 "sw %[referrer], 0($sp)\n\t"
342
343 // Push everything on the stack, so we don't rely on the order.
344 "addiu $sp, $sp, -24\n\t"
345 "sw %[arg0], 0($sp)\n\t"
346 "sw %[arg1], 4($sp)\n\t"
347 "sw %[arg2], 8($sp)\n\t"
348 "sw %[code], 12($sp)\n\t"
349 "sw %[self], 16($sp)\n\t"
350 "sw %[hidden], 20($sp)\n\t"
351
352 // Load call params into the right registers.
353 "lw $a0, 0($sp)\n\t"
354 "lw $a1, 4($sp)\n\t"
355 "lw $a2, 8($sp)\n\t"
356 "lw $t9, 12($sp)\n\t"
357 "lw $s1, 16($sp)\n\t"
Alexey Frunze1b8464d2016-11-12 17:22:05 -0800358 "lw $t7, 20($sp)\n\t"
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200359 "addiu $sp, $sp, 24\n\t"
360
361 "jalr $t9\n\t" // Call the stub.
362 "nop\n\t"
363 "addiu $sp, $sp, 16\n\t" // Drop the quick "frame".
364
365 // Restore stuff not named clobbered.
366 "lw $a0, 0($sp)\n\t"
367 "lw $a1, 4($sp)\n\t"
368 "lw $a2, 8($sp)\n\t"
369 "lw $a3, 12($sp)\n\t"
370 "lw $t0, 16($sp)\n\t"
371 "lw $t1, 20($sp)\n\t"
372 "lw $t2, 24($sp)\n\t"
373 "lw $t3, 28($sp)\n\t"
374 "lw $t4, 32($sp)\n\t"
375 "lw $t5, 36($sp)\n\t"
376 "lw $t6, 40($sp)\n\t"
377 "lw $t7, 44($sp)\n\t"
378 // Restore gp.
379 "lw $gp, 52($sp)\n\t"
380 "addiu $sp, $sp, 64\n\t" // Free stack space, now sp as on entry.
381
382 "move %[result], $v0\n\t" // Store the call result.
383 : [result] "=r" (result)
384 : [arg0] "r"(arg0), [arg1] "r"(arg1), [arg2] "r"(arg2), [code] "r"(code), [self] "r"(self),
385 [referrer] "r"(referrer), [hidden] "r"(hidden)
386 : "at", "v0", "v1", "s0", "s1", "s2", "s3", "s4", "s5", "s6", "s7", "t8", "t9", "k0", "k1",
387 "fp", "ra",
Nicolas Geoffrayc5b4b322015-09-15 16:36:50 +0100388 "$f0", "$f1", "$f2", "$f3", "$f4", "$f5", "$f6", "$f7", "$f8", "$f9", "$f10", "$f11",
389 "$f12", "$f13", "$f14", "$f15", "$f16", "$f17", "$f18", "$f19", "$f20", "$f21", "$f22",
390 "$f23", "$f24", "$f25", "$f26", "$f27", "$f28", "$f29", "$f30", "$f31",
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200391 "memory"); // clobber.
392#elif defined(__mips__) && defined(__LP64__)
393 __asm__ __volatile__ (
Goran Jakovljevic745f3cd2016-01-15 14:08:47 +0100394 // Spill a0-a7 which we say we don't clobber. May contain args.
395 "daddiu $sp, $sp, -64\n\t"
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200396 "sd $a0, 0($sp)\n\t"
397 "sd $a1, 8($sp)\n\t"
398 "sd $a2, 16($sp)\n\t"
399 "sd $a3, 24($sp)\n\t"
400 "sd $a4, 32($sp)\n\t"
401 "sd $a5, 40($sp)\n\t"
402 "sd $a6, 48($sp)\n\t"
403 "sd $a7, 56($sp)\n\t"
404
405 "daddiu $sp, $sp, -16\n\t" // Reserve stack space, 16B aligned.
406 "sd %[referrer], 0($sp)\n\t"
407
408 // Push everything on the stack, so we don't rely on the order.
409 "daddiu $sp, $sp, -48\n\t"
410 "sd %[arg0], 0($sp)\n\t"
411 "sd %[arg1], 8($sp)\n\t"
412 "sd %[arg2], 16($sp)\n\t"
413 "sd %[code], 24($sp)\n\t"
414 "sd %[self], 32($sp)\n\t"
415 "sd %[hidden], 40($sp)\n\t"
416
417 // Load call params into the right registers.
418 "ld $a0, 0($sp)\n\t"
419 "ld $a1, 8($sp)\n\t"
420 "ld $a2, 16($sp)\n\t"
421 "ld $t9, 24($sp)\n\t"
422 "ld $s1, 32($sp)\n\t"
423 "ld $t0, 40($sp)\n\t"
424 "daddiu $sp, $sp, 48\n\t"
425
426 "jalr $t9\n\t" // Call the stub.
427 "nop\n\t"
428 "daddiu $sp, $sp, 16\n\t" // Drop the quick "frame".
429
430 // Restore stuff not named clobbered.
431 "ld $a0, 0($sp)\n\t"
432 "ld $a1, 8($sp)\n\t"
433 "ld $a2, 16($sp)\n\t"
434 "ld $a3, 24($sp)\n\t"
435 "ld $a4, 32($sp)\n\t"
436 "ld $a5, 40($sp)\n\t"
437 "ld $a6, 48($sp)\n\t"
438 "ld $a7, 56($sp)\n\t"
Goran Jakovljevic745f3cd2016-01-15 14:08:47 +0100439 "daddiu $sp, $sp, 64\n\t"
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200440
441 "move %[result], $v0\n\t" // Store the call result.
442 : [result] "=r" (result)
443 : [arg0] "r"(arg0), [arg1] "r"(arg1), [arg2] "r"(arg2), [code] "r"(code), [self] "r"(self),
444 [referrer] "r"(referrer), [hidden] "r"(hidden)
Goran Jakovljevic745f3cd2016-01-15 14:08:47 +0100445 // Instead aliases t0-t3, register names $12-$15 has been used in the clobber list because
446 // t0-t3 are ambiguous.
447 : "at", "v0", "v1", "$12", "$13", "$14", "$15", "s0", "s1", "s2", "s3", "s4", "s5", "s6",
448 "s7", "t8", "t9", "k0", "k1", "fp", "ra",
Goran Jakovljevic4d44e532015-11-27 11:20:20 +0100449 "$f0", "$f1", "$f2", "$f3", "$f4", "$f5", "$f6", "$f7", "$f8", "$f9", "$f10", "$f11",
450 "$f12", "$f13", "$f14", "$f15", "$f16", "$f17", "$f18", "$f19", "$f20", "$f21", "$f22",
451 "$f23", "$f24", "$f25", "$f26", "$f27", "$f28", "$f29", "$f30", "$f31",
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200452 "memory"); // clobber.
Andreas Gampe9537ba22015-10-12 14:29:38 -0700453#elif defined(__x86_64__) && !defined(__APPLE__)
454#define PUSH(reg) "pushq " # reg "\n\t .cfi_adjust_cfa_offset 8\n\t"
455#define POP(reg) "popq " # reg "\n\t .cfi_adjust_cfa_offset -8\n\t"
456 // Note: Uses the native convention. We do a callee-save regimen by manually spilling and
457 // restoring almost all registers.
Andreas Gampe51f76352014-05-21 08:28:48 -0700458 // TODO: Set the thread?
459 __asm__ __volatile__(
Andreas Gampe9537ba22015-10-12 14:29:38 -0700460 // Spill almost everything (except rax, rsp). 14 registers.
461 PUSH(%%rbx)
462 PUSH(%%rcx)
463 PUSH(%%rdx)
464 PUSH(%%rsi)
465 PUSH(%%rdi)
466 PUSH(%%rbp)
467 PUSH(%%r8)
468 PUSH(%%r9)
469 PUSH(%%r10)
470 PUSH(%%r11)
471 PUSH(%%r12)
472 PUSH(%%r13)
473 PUSH(%%r14)
474 PUSH(%%r15)
475
476 PUSH(%[referrer]) // Push referrer & 16B alignment padding
477 PUSH(%[referrer])
478
479 // Now juggle the input registers.
480 PUSH(%[arg0])
481 PUSH(%[arg1])
482 PUSH(%[arg2])
483 PUSH(%[hidden])
484 PUSH(%[code])
485 POP(%%r8)
486 POP(%%rax)
487 POP(%%rdx)
488 POP(%%rsi)
489 POP(%%rdi)
490
491 "call *%%r8\n\t" // Call the stub
492 "addq $16, %%rsp\n\t" // Pop null and padding
Andreas Gampe51f76352014-05-21 08:28:48 -0700493 ".cfi_adjust_cfa_offset -16\n\t"
Andreas Gampe9537ba22015-10-12 14:29:38 -0700494
495 POP(%%r15)
496 POP(%%r14)
497 POP(%%r13)
498 POP(%%r12)
499 POP(%%r11)
500 POP(%%r10)
501 POP(%%r9)
502 POP(%%r8)
503 POP(%%rbp)
504 POP(%%rdi)
505 POP(%%rsi)
506 POP(%%rdx)
507 POP(%%rcx)
508 POP(%%rbx)
509
Andreas Gampe51f76352014-05-21 08:28:48 -0700510 : "=a" (result)
511 // Use the result from rax
Andreas Gampe9537ba22015-10-12 14:29:38 -0700512 : [arg0] "r"(arg0), [arg1] "r"(arg1), [arg2] "r"(arg2), [code] "r"(code),
513 [referrer] "r"(referrer), [hidden] "r"(hidden)
514 // This places arg0 into rdi, arg1 into rsi, arg2 into rdx, and code into some other
515 // register. We can't use "b" (rbx), as ASAN uses this for the frame pointer.
516 : "memory"); // We spill and restore (almost) all registers, so only mention memory here.
517#undef PUSH
518#undef POP
Andreas Gampe51f76352014-05-21 08:28:48 -0700519#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -0800520 UNUSED(arg0, arg1, arg2, code, referrer, hidden);
Andreas Gampe51f76352014-05-21 08:28:48 -0700521 LOG(WARNING) << "Was asked to invoke for an architecture I do not understand.";
522 result = 0;
523#endif
524 // Pop transition.
525 self->PopManagedStackFragment(fragment);
526
527 fp_result = fpr_result;
528 EXPECT_EQ(0U, fp_result);
529
530 return result;
531 }
532
Andreas Gampe29b38412014-08-13 00:15:43 -0700533 static uintptr_t GetEntrypoint(Thread* self, QuickEntrypointEnum entrypoint) {
534 int32_t offset;
Andreas Gampe542451c2016-07-26 09:02:02 -0700535 offset = GetThreadOffset<kRuntimePointerSize>(entrypoint).Int32Value();
Andreas Gampe29b38412014-08-13 00:15:43 -0700536 return *reinterpret_cast<uintptr_t*>(reinterpret_cast<uint8_t*>(self) + offset);
537 }
538
Andreas Gampe6cf80102014-05-19 11:32:41 -0700539 protected:
540 size_t fp_result;
Andreas Gampe525cde22014-04-22 15:44:50 -0700541};
542
543
Andreas Gampe525cde22014-04-22 15:44:50 -0700544TEST_F(StubTest, Memcpy) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200545#if defined(__i386__) || (defined(__x86_64__) && !defined(__APPLE__)) || defined(__mips__)
Andreas Gampe525cde22014-04-22 15:44:50 -0700546 Thread* self = Thread::Current();
547
548 uint32_t orig[20];
549 uint32_t trg[20];
550 for (size_t i = 0; i < 20; ++i) {
551 orig[i] = i;
552 trg[i] = 0;
553 }
554
555 Invoke3(reinterpret_cast<size_t>(&trg[4]), reinterpret_cast<size_t>(&orig[4]),
Andreas Gampe29b38412014-08-13 00:15:43 -0700556 10 * sizeof(uint32_t), StubTest::GetEntrypoint(self, kQuickMemcpy), self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700557
558 EXPECT_EQ(orig[0], trg[0]);
559
560 for (size_t i = 1; i < 4; ++i) {
561 EXPECT_NE(orig[i], trg[i]);
562 }
563
564 for (size_t i = 4; i < 14; ++i) {
565 EXPECT_EQ(orig[i], trg[i]);
566 }
567
568 for (size_t i = 14; i < 20; ++i) {
569 EXPECT_NE(orig[i], trg[i]);
570 }
571
572 // TODO: Test overlapping?
573
574#else
575 LOG(INFO) << "Skipping memcpy as I don't know how to do that on " << kRuntimeISA;
576 // Force-print to std::cout so it's also outside the logcat.
577 std::cout << "Skipping memcpy as I don't know how to do that on " << kRuntimeISA << std::endl;
578#endif
579}
580
Andreas Gampe525cde22014-04-22 15:44:50 -0700581TEST_F(StubTest, LockObject) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200582#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
583 (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -0700584 static constexpr size_t kThinLockLoops = 100;
585
Andreas Gampe525cde22014-04-22 15:44:50 -0700586 Thread* self = Thread::Current();
Andreas Gampe29b38412014-08-13 00:15:43 -0700587
588 const uintptr_t art_quick_lock_object = StubTest::GetEntrypoint(self, kQuickLockObject);
589
Andreas Gampe525cde22014-04-22 15:44:50 -0700590 // Create an object
591 ScopedObjectAccess soa(self);
592 // garbage is created during ClassLinker::Init
593
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700594 StackHandleScope<2> hs(soa.Self());
595 Handle<mirror::String> obj(
596 hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), "hello, world!")));
Andreas Gampe525cde22014-04-22 15:44:50 -0700597 LockWord lock = obj->GetLockWord(false);
598 LockWord::LockState old_state = lock.GetState();
599 EXPECT_EQ(LockWord::LockState::kUnlocked, old_state);
600
Andreas Gampe29b38412014-08-13 00:15:43 -0700601 Invoke3(reinterpret_cast<size_t>(obj.Get()), 0U, 0U, art_quick_lock_object, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700602
603 LockWord lock_after = obj->GetLockWord(false);
604 LockWord::LockState new_state = lock_after.GetState();
605 EXPECT_EQ(LockWord::LockState::kThinLocked, new_state);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700606 EXPECT_EQ(lock_after.ThinLockCount(), 0U); // Thin lock starts count at zero
607
608 for (size_t i = 1; i < kThinLockLoops; ++i) {
Andreas Gampe29b38412014-08-13 00:15:43 -0700609 Invoke3(reinterpret_cast<size_t>(obj.Get()), 0U, 0U, art_quick_lock_object, self);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700610
611 // Check we're at lock count i
612
613 LockWord l_inc = obj->GetLockWord(false);
614 LockWord::LockState l_inc_state = l_inc.GetState();
615 EXPECT_EQ(LockWord::LockState::kThinLocked, l_inc_state);
616 EXPECT_EQ(l_inc.ThinLockCount(), i);
617 }
618
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700619 // Force a fat lock by running identity hashcode to fill up lock word.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700620 Handle<mirror::String> obj2(hs.NewHandle(
621 mirror::String::AllocFromModifiedUtf8(soa.Self(), "hello, world!")));
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700622
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700623 obj2->IdentityHashCode();
624
Andreas Gampe29b38412014-08-13 00:15:43 -0700625 Invoke3(reinterpret_cast<size_t>(obj2.Get()), 0U, 0U, art_quick_lock_object, self);
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700626
627 LockWord lock_after2 = obj2->GetLockWord(false);
628 LockWord::LockState new_state2 = lock_after2.GetState();
629 EXPECT_EQ(LockWord::LockState::kFatLocked, new_state2);
630 EXPECT_NE(lock_after2.FatLockMonitor(), static_cast<Monitor*>(nullptr));
631
632 // Test done.
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700633#else
634 LOG(INFO) << "Skipping lock_object as I don't know how to do that on " << kRuntimeISA;
635 // Force-print to std::cout so it's also outside the logcat.
636 std::cout << "Skipping lock_object as I don't know how to do that on " << kRuntimeISA << std::endl;
637#endif
638}
639
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700640
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700641class RandGen {
642 public:
643 explicit RandGen(uint32_t seed) : val_(seed) {}
644
645 uint32_t next() {
646 val_ = val_ * 48271 % 2147483647 + 13;
647 return val_;
648 }
649
650 uint32_t val_;
651};
652
653
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700654// NO_THREAD_SAFETY_ANALYSIS as we do not want to grab exclusive mutator lock for MonitorInfo.
655static void TestUnlockObject(StubTest* test) NO_THREAD_SAFETY_ANALYSIS {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200656#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
657 (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -0700658 static constexpr size_t kThinLockLoops = 100;
659
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700660 Thread* self = Thread::Current();
Andreas Gampe29b38412014-08-13 00:15:43 -0700661
662 const uintptr_t art_quick_lock_object = StubTest::GetEntrypoint(self, kQuickLockObject);
663 const uintptr_t art_quick_unlock_object = StubTest::GetEntrypoint(self, kQuickUnlockObject);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700664 // Create an object
665 ScopedObjectAccess soa(self);
666 // garbage is created during ClassLinker::Init
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700667 static constexpr size_t kNumberOfLocks = 10; // Number of objects = lock
668 StackHandleScope<kNumberOfLocks + 1> hs(self);
669 Handle<mirror::String> obj(
670 hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), "hello, world!")));
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700671 LockWord lock = obj->GetLockWord(false);
672 LockWord::LockState old_state = lock.GetState();
673 EXPECT_EQ(LockWord::LockState::kUnlocked, old_state);
674
Andreas Gampe29b38412014-08-13 00:15:43 -0700675 test->Invoke3(reinterpret_cast<size_t>(obj.Get()), 0U, 0U, art_quick_unlock_object, self);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700676 // This should be an illegal monitor state.
677 EXPECT_TRUE(self->IsExceptionPending());
678 self->ClearException();
679
680 LockWord lock_after = obj->GetLockWord(false);
681 LockWord::LockState new_state = lock_after.GetState();
682 EXPECT_EQ(LockWord::LockState::kUnlocked, new_state);
Andreas Gampe525cde22014-04-22 15:44:50 -0700683
Andreas Gampe29b38412014-08-13 00:15:43 -0700684 test->Invoke3(reinterpret_cast<size_t>(obj.Get()), 0U, 0U, art_quick_lock_object, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700685
686 LockWord lock_after2 = obj->GetLockWord(false);
687 LockWord::LockState new_state2 = lock_after2.GetState();
688 EXPECT_EQ(LockWord::LockState::kThinLocked, new_state2);
689
Andreas Gampe29b38412014-08-13 00:15:43 -0700690 test->Invoke3(reinterpret_cast<size_t>(obj.Get()), 0U, 0U, art_quick_unlock_object, self);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700691
692 LockWord lock_after3 = obj->GetLockWord(false);
693 LockWord::LockState new_state3 = lock_after3.GetState();
694 EXPECT_EQ(LockWord::LockState::kUnlocked, new_state3);
695
696 // Stress test:
697 // Keep a number of objects and their locks in flight. Randomly lock or unlock one of them in
698 // each step.
699
700 RandGen r(0x1234);
701
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700702 constexpr size_t kIterations = 10000; // Number of iterations
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700703 constexpr size_t kMoveToFat = 1000; // Chance of 1:kMoveFat to make a lock fat.
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700704
705 size_t counts[kNumberOfLocks];
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700706 bool fat[kNumberOfLocks]; // Whether a lock should be thin or fat.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700707 Handle<mirror::String> objects[kNumberOfLocks];
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700708
709 // Initialize = allocate.
710 for (size_t i = 0; i < kNumberOfLocks; ++i) {
711 counts[i] = 0;
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700712 fat[i] = false;
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700713 objects[i] = hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), ""));
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700714 }
715
716 for (size_t i = 0; i < kIterations; ++i) {
717 // Select which lock to update.
718 size_t index = r.next() % kNumberOfLocks;
719
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700720 // Make lock fat?
721 if (!fat[index] && (r.next() % kMoveToFat == 0)) {
722 fat[index] = true;
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700723 objects[index]->IdentityHashCode();
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700724
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700725 LockWord lock_iter = objects[index]->GetLockWord(false);
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700726 LockWord::LockState iter_state = lock_iter.GetState();
727 if (counts[index] == 0) {
728 EXPECT_EQ(LockWord::LockState::kHashCode, iter_state);
729 } else {
730 EXPECT_EQ(LockWord::LockState::kFatLocked, iter_state);
731 }
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700732 } else {
Andreas Gampe277ccbd2014-11-03 21:36:10 -0800733 bool take_lock; // Whether to lock or unlock in this step.
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700734 if (counts[index] == 0) {
Andreas Gampe277ccbd2014-11-03 21:36:10 -0800735 take_lock = true;
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700736 } else if (counts[index] == kThinLockLoops) {
Andreas Gampe277ccbd2014-11-03 21:36:10 -0800737 take_lock = false;
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700738 } else {
739 // Randomly.
Andreas Gampe277ccbd2014-11-03 21:36:10 -0800740 take_lock = r.next() % 2 == 0;
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700741 }
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700742
Andreas Gampe277ccbd2014-11-03 21:36:10 -0800743 if (take_lock) {
Andreas Gampe29b38412014-08-13 00:15:43 -0700744 test->Invoke3(reinterpret_cast<size_t>(objects[index].Get()), 0U, 0U, art_quick_lock_object,
745 self);
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700746 counts[index]++;
747 } else {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700748 test->Invoke3(reinterpret_cast<size_t>(objects[index].Get()), 0U, 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -0700749 art_quick_unlock_object, self);
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700750 counts[index]--;
751 }
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700752
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700753 EXPECT_FALSE(self->IsExceptionPending());
754
755 // Check the new state.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700756 LockWord lock_iter = objects[index]->GetLockWord(true);
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700757 LockWord::LockState iter_state = lock_iter.GetState();
758 if (fat[index]) {
759 // Abuse MonitorInfo.
760 EXPECT_EQ(LockWord::LockState::kFatLocked, iter_state) << index;
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700761 MonitorInfo info(objects[index].Get());
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700762 EXPECT_EQ(counts[index], info.entry_count_) << index;
763 } else {
764 if (counts[index] > 0) {
765 EXPECT_EQ(LockWord::LockState::kThinLocked, iter_state);
766 EXPECT_EQ(counts[index] - 1, lock_iter.ThinLockCount());
767 } else {
768 EXPECT_EQ(LockWord::LockState::kUnlocked, iter_state);
769 }
770 }
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700771 }
772 }
773
774 // Unlock the remaining count times and then check it's unlocked. Then deallocate.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700775 // Go reverse order to correctly handle Handles.
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700776 for (size_t i = 0; i < kNumberOfLocks; ++i) {
777 size_t index = kNumberOfLocks - 1 - i;
778 size_t count = counts[index];
779 while (count > 0) {
Andreas Gampe29b38412014-08-13 00:15:43 -0700780 test->Invoke3(reinterpret_cast<size_t>(objects[index].Get()), 0U, 0U, art_quick_unlock_object,
781 self);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700782 count--;
783 }
784
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700785 LockWord lock_after4 = objects[index]->GetLockWord(false);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700786 LockWord::LockState new_state4 = lock_after4.GetState();
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700787 EXPECT_TRUE(LockWord::LockState::kUnlocked == new_state4
788 || LockWord::LockState::kFatLocked == new_state4);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700789 }
790
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700791 // Test done.
Andreas Gampe525cde22014-04-22 15:44:50 -0700792#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -0800793 UNUSED(test);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700794 LOG(INFO) << "Skipping unlock_object as I don't know how to do that on " << kRuntimeISA;
Andreas Gampe525cde22014-04-22 15:44:50 -0700795 // Force-print to std::cout so it's also outside the logcat.
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700796 std::cout << "Skipping unlock_object as I don't know how to do that on " << kRuntimeISA << std::endl;
Andreas Gampe525cde22014-04-22 15:44:50 -0700797#endif
798}
799
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700800TEST_F(StubTest, UnlockObject) {
Andreas Gampe369810a2015-01-14 19:53:31 -0800801 // This will lead to monitor error messages in the log.
802 ScopedLogSeverity sls(LogSeverity::FATAL);
803
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700804 TestUnlockObject(this);
805}
Andreas Gampe525cde22014-04-22 15:44:50 -0700806
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200807#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
808 (defined(__x86_64__) && !defined(__APPLE__))
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800809extern "C" void art_quick_check_instance_of(void);
Andreas Gampe525cde22014-04-22 15:44:50 -0700810#endif
811
812TEST_F(StubTest, CheckCast) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200813#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
814 (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe525cde22014-04-22 15:44:50 -0700815 Thread* self = Thread::Current();
Andreas Gampe29b38412014-08-13 00:15:43 -0700816
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800817 const uintptr_t art_quick_check_instance_of =
818 StubTest::GetEntrypoint(self, kQuickCheckInstanceOf);
Andreas Gampe29b38412014-08-13 00:15:43 -0700819
Andreas Gampe525cde22014-04-22 15:44:50 -0700820 // Find some classes.
821 ScopedObjectAccess soa(self);
822 // garbage is created during ClassLinker::Init
823
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800824 VariableSizedHandleScope hs(soa.Self());
825 Handle<mirror::Class> klass_obj(
826 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "Ljava/lang/Object;")));
827 Handle<mirror::Class> klass_str(
828 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "Ljava/lang/String;")));
829 Handle<mirror::Class> klass_list(
830 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "Ljava/util/List;")));
831 Handle<mirror::Class> klass_cloneable(
832 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "Ljava/lang/Cloneable;")));
833 Handle<mirror::Class> klass_array_list(
834 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "Ljava/util/ArrayList;")));
835 Handle<mirror::Object> obj(hs.NewHandle(klass_obj->AllocObject(soa.Self())));
836 Handle<mirror::String> string(hs.NewHandle(
837 mirror::String::AllocFromModifiedUtf8(soa.Self(), "ABCD")));
838 Handle<mirror::Object> array_list(hs.NewHandle(klass_array_list->AllocObject(soa.Self())));
Andreas Gampe525cde22014-04-22 15:44:50 -0700839
840 EXPECT_FALSE(self->IsExceptionPending());
841
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800842 Invoke3(reinterpret_cast<size_t>(obj.Get()),
843 reinterpret_cast<size_t>(klass_obj.Get()),
Mathieu Chartier2ecfd272016-11-01 10:10:05 -0700844 0U,
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800845 art_quick_check_instance_of,
Mathieu Chartier2ecfd272016-11-01 10:10:05 -0700846 self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700847 EXPECT_FALSE(self->IsExceptionPending());
848
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800849 // Expected true: Test string instance of java.lang.String.
850 Invoke3(reinterpret_cast<size_t>(string.Get()),
851 reinterpret_cast<size_t>(klass_str.Get()),
Mathieu Chartier2ecfd272016-11-01 10:10:05 -0700852 0U,
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800853 art_quick_check_instance_of,
Mathieu Chartier2ecfd272016-11-01 10:10:05 -0700854 self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700855 EXPECT_FALSE(self->IsExceptionPending());
856
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800857 // Expected true: Test string instance of java.lang.Object.
858 Invoke3(reinterpret_cast<size_t>(string.Get()),
859 reinterpret_cast<size_t>(klass_obj.Get()),
Mathieu Chartier2ecfd272016-11-01 10:10:05 -0700860 0U,
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800861 art_quick_check_instance_of,
Mathieu Chartier2ecfd272016-11-01 10:10:05 -0700862 self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700863 EXPECT_FALSE(self->IsExceptionPending());
864
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800865 // Expected false: Test object instance of java.lang.String.
866 Invoke3(reinterpret_cast<size_t>(obj.Get()),
867 reinterpret_cast<size_t>(klass_str.Get()),
Mathieu Chartier2ecfd272016-11-01 10:10:05 -0700868 0U,
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800869 art_quick_check_instance_of,
Mathieu Chartier2ecfd272016-11-01 10:10:05 -0700870 self);
871 EXPECT_TRUE(self->IsExceptionPending());
872 self->ClearException();
873
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800874 Invoke3(reinterpret_cast<size_t>(array_list.Get()),
875 reinterpret_cast<size_t>(klass_list.Get()),
Mathieu Chartier2ecfd272016-11-01 10:10:05 -0700876 0U,
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800877 art_quick_check_instance_of,
878 self);
879 EXPECT_FALSE(self->IsExceptionPending());
880
881 Invoke3(reinterpret_cast<size_t>(array_list.Get()),
882 reinterpret_cast<size_t>(klass_cloneable.Get()),
883 0U,
884 art_quick_check_instance_of,
885 self);
886 EXPECT_FALSE(self->IsExceptionPending());
887
888 Invoke3(reinterpret_cast<size_t>(string.Get()),
889 reinterpret_cast<size_t>(klass_array_list.Get()),
890 0U,
891 art_quick_check_instance_of,
892 self);
893 EXPECT_TRUE(self->IsExceptionPending());
894 self->ClearException();
895
896 Invoke3(reinterpret_cast<size_t>(string.Get()),
897 reinterpret_cast<size_t>(klass_cloneable.Get()),
898 0U,
899 art_quick_check_instance_of,
Mathieu Chartier2ecfd272016-11-01 10:10:05 -0700900 self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700901 EXPECT_TRUE(self->IsExceptionPending());
902 self->ClearException();
903
904#else
905 LOG(INFO) << "Skipping check_cast as I don't know how to do that on " << kRuntimeISA;
906 // Force-print to std::cout so it's also outside the logcat.
907 std::cout << "Skipping check_cast as I don't know how to do that on " << kRuntimeISA << std::endl;
908#endif
909}
910
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700911TEST_F(StubTest, AllocObject) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200912#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
913 (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe369810a2015-01-14 19:53:31 -0800914 // This will lead to OOM error messages in the log.
915 ScopedLogSeverity sls(LogSeverity::FATAL);
916
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700917 // TODO: Check the "Unresolved" allocation stubs
918
919 Thread* self = Thread::Current();
920 // Create an object
921 ScopedObjectAccess soa(self);
922 // garbage is created during ClassLinker::Init
923
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700924 StackHandleScope<2> hs(soa.Self());
925 Handle<mirror::Class> c(
926 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "Ljava/lang/Object;")));
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700927
928 // Play with it...
929
930 EXPECT_FALSE(self->IsExceptionPending());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700931 {
Nicolas Geoffray0d3998b2017-01-12 15:35:12 +0000932 size_t result = Invoke3(reinterpret_cast<size_t>(c.Get()), 0u, 0U,
933 StubTest::GetEntrypoint(self, kQuickAllocObjectWithChecks),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700934 self);
935
936 EXPECT_FALSE(self->IsExceptionPending());
937 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
938 mirror::Object* obj = reinterpret_cast<mirror::Object*>(result);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700939 EXPECT_EQ(c.Get(), obj->GetClass());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700940 VerifyObject(obj);
941 }
942
943 {
Mathieu Chartiere401d142015-04-22 13:56:20 -0700944 size_t result = Invoke3(reinterpret_cast<size_t>(c.Get()), 0u, 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -0700945 StubTest::GetEntrypoint(self, kQuickAllocObjectResolved),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700946 self);
947
948 EXPECT_FALSE(self->IsExceptionPending());
949 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
950 mirror::Object* obj = reinterpret_cast<mirror::Object*>(result);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700951 EXPECT_EQ(c.Get(), obj->GetClass());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700952 VerifyObject(obj);
953 }
954
955 {
Mathieu Chartiere401d142015-04-22 13:56:20 -0700956 size_t result = Invoke3(reinterpret_cast<size_t>(c.Get()), 0u, 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -0700957 StubTest::GetEntrypoint(self, kQuickAllocObjectInitialized),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700958 self);
959
960 EXPECT_FALSE(self->IsExceptionPending());
961 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
962 mirror::Object* obj = reinterpret_cast<mirror::Object*>(result);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700963 EXPECT_EQ(c.Get(), obj->GetClass());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700964 VerifyObject(obj);
965 }
966
967 // Failure tests.
968
969 // Out-of-memory.
970 {
971 Runtime::Current()->GetHeap()->SetIdealFootprint(1 * GB);
972
973 // Array helps to fill memory faster.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700974 Handle<mirror::Class> ca(
975 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "[Ljava/lang/Object;")));
976
977 // Use arbitrary large amount for now.
978 static const size_t kMaxHandles = 1000000;
Ian Rogers700a4022014-05-19 16:49:03 -0700979 std::unique_ptr<StackHandleScope<kMaxHandles>> hsp(new StackHandleScope<kMaxHandles>(self));
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700980
981 std::vector<Handle<mirror::Object>> handles;
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700982 // Start allocating with 128K
983 size_t length = 128 * KB / 4;
984 while (length > 10) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700985 Handle<mirror::Object> h(hsp->NewHandle<mirror::Object>(
986 mirror::ObjectArray<mirror::Object>::Alloc(soa.Self(), ca.Get(), length / 4)));
987 if (self->IsExceptionPending() || h.Get() == nullptr) {
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700988 self->ClearException();
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700989
990 // Try a smaller length
991 length = length / 8;
992 // Use at most half the reported free space.
993 size_t mem = Runtime::Current()->GetHeap()->GetFreeMemory();
994 if (length * 8 > mem) {
995 length = mem / 8;
996 }
997 } else {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700998 handles.push_back(h);
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700999 }
1000 }
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001001 LOG(INFO) << "Used " << handles.size() << " arrays to fill space.";
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001002
1003 // Allocate simple objects till it fails.
1004 while (!self->IsExceptionPending()) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001005 Handle<mirror::Object> h = hsp->NewHandle(c->AllocObject(soa.Self()));
1006 if (!self->IsExceptionPending() && h.Get() != nullptr) {
1007 handles.push_back(h);
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001008 }
1009 }
1010 self->ClearException();
1011
Mathieu Chartiere401d142015-04-22 13:56:20 -07001012 size_t result = Invoke3(reinterpret_cast<size_t>(c.Get()), 0u, 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001013 StubTest::GetEntrypoint(self, kQuickAllocObjectInitialized),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001014 self);
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001015 EXPECT_TRUE(self->IsExceptionPending());
1016 self->ClearException();
1017 EXPECT_EQ(reinterpret_cast<size_t>(nullptr), result);
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001018 }
1019
1020 // Tests done.
1021#else
1022 LOG(INFO) << "Skipping alloc_object as I don't know how to do that on " << kRuntimeISA;
1023 // Force-print to std::cout so it's also outside the logcat.
1024 std::cout << "Skipping alloc_object as I don't know how to do that on " << kRuntimeISA << std::endl;
1025#endif
1026}
1027
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001028TEST_F(StubTest, AllocObjectArray) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001029#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1030 (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001031 // TODO: Check the "Unresolved" allocation stubs
1032
Andreas Gampe369810a2015-01-14 19:53:31 -08001033 // This will lead to OOM error messages in the log.
1034 ScopedLogSeverity sls(LogSeverity::FATAL);
1035
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001036 Thread* self = Thread::Current();
1037 // Create an object
1038 ScopedObjectAccess soa(self);
1039 // garbage is created during ClassLinker::Init
1040
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001041 StackHandleScope<2> hs(self);
1042 Handle<mirror::Class> c(
1043 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "[Ljava/lang/Object;")));
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001044
1045 // Needed to have a linked method.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001046 Handle<mirror::Class> c_obj(
1047 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "Ljava/lang/Object;")));
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001048
1049 // Play with it...
1050
1051 EXPECT_FALSE(self->IsExceptionPending());
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001052
1053 // For some reason this does not work, as the type_idx is artificial and outside what the
1054 // resolved types of c_obj allow...
1055
Ian Rogerscf7f1912014-10-22 22:06:39 -07001056 if ((false)) {
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001057 // Use an arbitrary method from c to use as referrer
Andreas Gampe542451c2016-07-26 09:02:02 -07001058 size_t result = Invoke3(
Andreas Gampea5b09a62016-11-17 15:21:22 -08001059 static_cast<size_t>(c->GetDexTypeIndex().index_), // type_idx
Andreas Gampe542451c2016-07-26 09:02:02 -07001060 10U,
1061 // arbitrary
1062 reinterpret_cast<size_t>(c_obj->GetVirtualMethod(0, kRuntimePointerSize)),
1063 StubTest::GetEntrypoint(self, kQuickAllocArray),
1064 self);
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001065
1066 EXPECT_FALSE(self->IsExceptionPending());
1067 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
1068 mirror::Array* obj = reinterpret_cast<mirror::Array*>(result);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001069 EXPECT_EQ(c.Get(), obj->GetClass());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001070 VerifyObject(obj);
1071 EXPECT_EQ(obj->GetLength(), 10);
1072 }
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001073
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001074 {
Mathieu Chartier2cebb242015-04-21 16:50:40 -07001075 // We can use null in the second argument as we do not need a method here (not used in
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001076 // resolved/initialized cases)
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08001077 size_t result = Invoke3(reinterpret_cast<size_t>(c.Get()), 10U,
1078 reinterpret_cast<size_t>(nullptr),
Andreas Gampe29b38412014-08-13 00:15:43 -07001079 StubTest::GetEntrypoint(self, kQuickAllocArrayResolved),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001080 self);
David Sehr709b0702016-10-13 09:12:37 -07001081 EXPECT_FALSE(self->IsExceptionPending()) << mirror::Object::PrettyTypeOf(self->GetException());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001082 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
1083 mirror::Object* obj = reinterpret_cast<mirror::Object*>(result);
1084 EXPECT_TRUE(obj->IsArrayInstance());
1085 EXPECT_TRUE(obj->IsObjectArray());
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001086 EXPECT_EQ(c.Get(), obj->GetClass());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001087 VerifyObject(obj);
1088 mirror::Array* array = reinterpret_cast<mirror::Array*>(result);
1089 EXPECT_EQ(array->GetLength(), 10);
1090 }
1091
1092 // Failure tests.
1093
1094 // Out-of-memory.
1095 {
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08001096 size_t result = Invoke3(reinterpret_cast<size_t>(c.Get()),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001097 GB, // that should fail...
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08001098 reinterpret_cast<size_t>(nullptr),
Andreas Gampe29b38412014-08-13 00:15:43 -07001099 StubTest::GetEntrypoint(self, kQuickAllocArrayResolved),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001100 self);
1101
1102 EXPECT_TRUE(self->IsExceptionPending());
1103 self->ClearException();
1104 EXPECT_EQ(reinterpret_cast<size_t>(nullptr), result);
1105 }
1106
1107 // Tests done.
1108#else
1109 LOG(INFO) << "Skipping alloc_array as I don't know how to do that on " << kRuntimeISA;
1110 // Force-print to std::cout so it's also outside the logcat.
1111 std::cout << "Skipping alloc_array as I don't know how to do that on " << kRuntimeISA << std::endl;
1112#endif
1113}
1114
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001115
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001116TEST_F(StubTest, StringCompareTo) {
jessicahandojo3aaa37b2016-07-29 14:46:37 -07001117 TEST_DISABLED_FOR_STRING_COMPRESSION();
Scott Wakelingc25cbf12016-04-18 09:00:11 +01001118 // There is no StringCompareTo runtime entrypoint for __arm__ or __aarch64__.
1119#if defined(__i386__) || defined(__mips__) || \
1120 (defined(__x86_64__) && !defined(__APPLE__))
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001121 // TODO: Check the "Unresolved" allocation stubs
1122
1123 Thread* self = Thread::Current();
Andreas Gampe29b38412014-08-13 00:15:43 -07001124
1125 const uintptr_t art_quick_string_compareto = StubTest::GetEntrypoint(self, kQuickStringCompareTo);
1126
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001127 ScopedObjectAccess soa(self);
1128 // garbage is created during ClassLinker::Init
1129
1130 // Create some strings
1131 // Use array so we can index into it and use a matrix for expected results
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -07001132 // Setup: The first half is standard. The second half uses a non-zero offset.
1133 // TODO: Shared backing arrays.
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001134 const char* c[] = { "", "", "a", "aa", "ab",
Serban Constantinescu86797a72014-06-19 16:17:56 +01001135 "aacaacaacaacaacaac", // This one's under the default limit to go to __memcmp16.
1136 "aacaacaacaacaacaacaacaacaacaacaacaac", // This one's over.
1137 "aacaacaacaacaacaacaacaacaacaacaacaaca" }; // As is this one. We need a separate one to
1138 // defeat object-equal optimizations.
Jeff Hao848f70a2014-01-15 13:49:50 -08001139 static constexpr size_t kStringCount = arraysize(c);
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001140
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001141 StackHandleScope<kStringCount> hs(self);
1142 Handle<mirror::String> s[kStringCount];
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001143
Jeff Hao848f70a2014-01-15 13:49:50 -08001144 for (size_t i = 0; i < kStringCount; ++i) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001145 s[i] = hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), c[i]));
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001146 }
1147
1148 // TODO: wide characters
1149
1150 // Matrix of expectations. First component is first parameter. Note we only check against the
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -07001151 // sign, not the value. As we are testing random offsets, we need to compute this and need to
1152 // rely on String::CompareTo being correct.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001153 int32_t expected[kStringCount][kStringCount];
1154 for (size_t x = 0; x < kStringCount; ++x) {
1155 for (size_t y = 0; y < kStringCount; ++y) {
1156 expected[x][y] = s[x]->CompareTo(s[y].Get());
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -07001157 }
1158 }
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001159
1160 // Play with it...
1161
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001162 for (size_t x = 0; x < kStringCount; ++x) {
1163 for (size_t y = 0; y < kStringCount; ++y) {
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001164 // Test string_compareto x y
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001165 size_t result = Invoke3(reinterpret_cast<size_t>(s[x].Get()),
1166 reinterpret_cast<size_t>(s[y].Get()), 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001167 art_quick_string_compareto, self);
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001168
1169 EXPECT_FALSE(self->IsExceptionPending());
1170
1171 // The result is a 32b signed integer
1172 union {
1173 size_t r;
1174 int32_t i;
1175 } conv;
1176 conv.r = result;
1177 int32_t e = expected[x][y];
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -07001178 EXPECT_TRUE(e == 0 ? conv.i == 0 : true) << "x=" << c[x] << " y=" << c[y] << " res=" <<
1179 conv.r;
1180 EXPECT_TRUE(e < 0 ? conv.i < 0 : true) << "x=" << c[x] << " y=" << c[y] << " res=" <<
1181 conv.r;
1182 EXPECT_TRUE(e > 0 ? conv.i > 0 : true) << "x=" << c[x] << " y=" << c[y] << " res=" <<
1183 conv.r;
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001184 }
1185 }
1186
Andreas Gampe7177d7c2014-05-02 12:10:02 -07001187 // TODO: Deallocate things.
1188
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001189 // Tests done.
1190#else
1191 LOG(INFO) << "Skipping string_compareto as I don't know how to do that on " << kRuntimeISA;
1192 // Force-print to std::cout so it's also outside the logcat.
1193 std::cout << "Skipping string_compareto as I don't know how to do that on " << kRuntimeISA <<
1194 std::endl;
1195#endif
1196}
1197
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001198
Mathieu Chartierc7853442015-03-27 14:35:38 -07001199static void GetSetBooleanStatic(ArtField* f, Thread* self,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001200 ArtMethod* referrer, StubTest* test)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07001201 REQUIRES_SHARED(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001202#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1203 (defined(__x86_64__) && !defined(__APPLE__))
Fred Shih37f05ef2014-07-16 18:38:08 -07001204 constexpr size_t num_values = 5;
1205 uint8_t values[num_values] = { 0, 1, 2, 128, 0xFF };
1206
1207 for (size_t i = 0; i < num_values; ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001208 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001209 static_cast<size_t>(values[i]),
1210 0U,
1211 StubTest::GetEntrypoint(self, kQuickSet8Static),
1212 self,
1213 referrer);
1214
Mathieu Chartierc7853442015-03-27 14:35:38 -07001215 size_t res = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001216 0U, 0U,
1217 StubTest::GetEntrypoint(self, kQuickGetBooleanStatic),
1218 self,
1219 referrer);
1220 // Boolean currently stores bools as uint8_t, be more zealous about asserting correct writes/gets.
1221 EXPECT_EQ(values[i], static_cast<uint8_t>(res)) << "Iteration " << i;
1222 }
1223#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001224 UNUSED(f, self, referrer, test);
Fred Shih37f05ef2014-07-16 18:38:08 -07001225 LOG(INFO) << "Skipping set_boolean_static as I don't know how to do that on " << kRuntimeISA;
1226 // Force-print to std::cout so it's also outside the logcat.
1227 std::cout << "Skipping set_boolean_static as I don't know how to do that on " << kRuntimeISA << std::endl;
1228#endif
1229}
Mathieu Chartiere401d142015-04-22 13:56:20 -07001230static void GetSetByteStatic(ArtField* f, Thread* self, ArtMethod* referrer,
Mathieu Chartierc7853442015-03-27 14:35:38 -07001231 StubTest* test)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07001232 REQUIRES_SHARED(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001233#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1234 (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001235 int8_t values[] = { -128, -64, 0, 64, 127 };
Fred Shih37f05ef2014-07-16 18:38:08 -07001236
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001237 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001238 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001239 static_cast<size_t>(values[i]),
1240 0U,
1241 StubTest::GetEntrypoint(self, kQuickSet8Static),
1242 self,
1243 referrer);
1244
Mathieu Chartierc7853442015-03-27 14:35:38 -07001245 size_t res = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001246 0U, 0U,
1247 StubTest::GetEntrypoint(self, kQuickGetByteStatic),
1248 self,
1249 referrer);
1250 EXPECT_EQ(values[i], static_cast<int8_t>(res)) << "Iteration " << i;
1251 }
1252#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001253 UNUSED(f, self, referrer, test);
Fred Shih37f05ef2014-07-16 18:38:08 -07001254 LOG(INFO) << "Skipping set_byte_static as I don't know how to do that on " << kRuntimeISA;
1255 // Force-print to std::cout so it's also outside the logcat.
1256 std::cout << "Skipping set_byte_static as I don't know how to do that on " << kRuntimeISA << std::endl;
1257#endif
1258}
1259
1260
Mathieu Chartierc7853442015-03-27 14:35:38 -07001261static void GetSetBooleanInstance(Handle<mirror::Object>* obj, ArtField* f, Thread* self,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001262 ArtMethod* referrer, StubTest* test)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07001263 REQUIRES_SHARED(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001264#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1265 (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001266 uint8_t values[] = { 0, true, 2, 128, 0xFF };
Fred Shih37f05ef2014-07-16 18:38:08 -07001267
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001268 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001269 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001270 reinterpret_cast<size_t>(obj->Get()),
1271 static_cast<size_t>(values[i]),
1272 StubTest::GetEntrypoint(self, kQuickSet8Instance),
1273 self,
1274 referrer);
1275
Mathieu Chartierc7853442015-03-27 14:35:38 -07001276 uint8_t res = f->GetBoolean(obj->Get());
Fred Shih37f05ef2014-07-16 18:38:08 -07001277 EXPECT_EQ(values[i], res) << "Iteration " << i;
1278
Mathieu Chartierc7853442015-03-27 14:35:38 -07001279 f->SetBoolean<false>(obj->Get(), res);
Fred Shih37f05ef2014-07-16 18:38:08 -07001280
Mathieu Chartierc7853442015-03-27 14:35:38 -07001281 size_t res2 = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001282 reinterpret_cast<size_t>(obj->Get()),
1283 0U,
1284 StubTest::GetEntrypoint(self, kQuickGetBooleanInstance),
1285 self,
1286 referrer);
1287 EXPECT_EQ(res, static_cast<uint8_t>(res2));
1288 }
1289#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001290 UNUSED(obj, f, self, referrer, test);
Fred Shih37f05ef2014-07-16 18:38:08 -07001291 LOG(INFO) << "Skipping set_boolean_instance as I don't know how to do that on " << kRuntimeISA;
1292 // Force-print to std::cout so it's also outside the logcat.
1293 std::cout << "Skipping set_boolean_instance as I don't know how to do that on " << kRuntimeISA << std::endl;
1294#endif
1295}
Mathieu Chartierc7853442015-03-27 14:35:38 -07001296static void GetSetByteInstance(Handle<mirror::Object>* obj, ArtField* f,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001297 Thread* self, ArtMethod* referrer, StubTest* test)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07001298 REQUIRES_SHARED(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001299#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1300 (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001301 int8_t values[] = { -128, -64, 0, 64, 127 };
Fred Shih37f05ef2014-07-16 18:38:08 -07001302
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001303 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001304 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001305 reinterpret_cast<size_t>(obj->Get()),
1306 static_cast<size_t>(values[i]),
1307 StubTest::GetEntrypoint(self, kQuickSet8Instance),
1308 self,
1309 referrer);
1310
Mathieu Chartierc7853442015-03-27 14:35:38 -07001311 int8_t res = f->GetByte(obj->Get());
Fred Shih37f05ef2014-07-16 18:38:08 -07001312 EXPECT_EQ(res, values[i]) << "Iteration " << i;
Mathieu Chartierc7853442015-03-27 14:35:38 -07001313 f->SetByte<false>(obj->Get(), ++res);
Fred Shih37f05ef2014-07-16 18:38:08 -07001314
Mathieu Chartierc7853442015-03-27 14:35:38 -07001315 size_t res2 = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001316 reinterpret_cast<size_t>(obj->Get()),
1317 0U,
1318 StubTest::GetEntrypoint(self, kQuickGetByteInstance),
1319 self,
1320 referrer);
1321 EXPECT_EQ(res, static_cast<int8_t>(res2));
1322 }
1323#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001324 UNUSED(obj, f, self, referrer, test);
Fred Shih37f05ef2014-07-16 18:38:08 -07001325 LOG(INFO) << "Skipping set_byte_instance as I don't know how to do that on " << kRuntimeISA;
1326 // Force-print to std::cout so it's also outside the logcat.
1327 std::cout << "Skipping set_byte_instance as I don't know how to do that on " << kRuntimeISA << std::endl;
1328#endif
1329}
1330
Mathieu Chartiere401d142015-04-22 13:56:20 -07001331static void GetSetCharStatic(ArtField* f, Thread* self, ArtMethod* referrer,
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001332 StubTest* test)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07001333 REQUIRES_SHARED(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001334#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1335 (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001336 uint16_t values[] = { 0, 1, 2, 255, 32768, 0xFFFF };
Fred Shih37f05ef2014-07-16 18:38:08 -07001337
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001338 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001339 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001340 static_cast<size_t>(values[i]),
1341 0U,
1342 StubTest::GetEntrypoint(self, kQuickSet16Static),
1343 self,
1344 referrer);
1345
Mathieu Chartierc7853442015-03-27 14:35:38 -07001346 size_t res = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001347 0U, 0U,
1348 StubTest::GetEntrypoint(self, kQuickGetCharStatic),
1349 self,
1350 referrer);
1351
1352 EXPECT_EQ(values[i], static_cast<uint16_t>(res)) << "Iteration " << i;
1353 }
1354#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001355 UNUSED(f, self, referrer, test);
Fred Shih37f05ef2014-07-16 18:38:08 -07001356 LOG(INFO) << "Skipping set_char_static as I don't know how to do that on " << kRuntimeISA;
1357 // Force-print to std::cout so it's also outside the logcat.
1358 std::cout << "Skipping set_char_static as I don't know how to do that on " << kRuntimeISA << std::endl;
1359#endif
1360}
Mathieu Chartierc7853442015-03-27 14:35:38 -07001361static void GetSetShortStatic(ArtField* f, Thread* self,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001362 ArtMethod* referrer, StubTest* test)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07001363 REQUIRES_SHARED(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001364#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1365 (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001366 int16_t values[] = { -0x7FFF, -32768, 0, 255, 32767, 0x7FFE };
Fred Shih37f05ef2014-07-16 18:38:08 -07001367
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001368 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001369 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001370 static_cast<size_t>(values[i]),
1371 0U,
1372 StubTest::GetEntrypoint(self, kQuickSet16Static),
1373 self,
1374 referrer);
1375
Mathieu Chartierc7853442015-03-27 14:35:38 -07001376 size_t res = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001377 0U, 0U,
1378 StubTest::GetEntrypoint(self, kQuickGetShortStatic),
1379 self,
1380 referrer);
1381
1382 EXPECT_EQ(static_cast<int16_t>(res), values[i]) << "Iteration " << i;
1383 }
1384#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001385 UNUSED(f, self, referrer, test);
Fred Shih37f05ef2014-07-16 18:38:08 -07001386 LOG(INFO) << "Skipping set_short_static as I don't know how to do that on " << kRuntimeISA;
1387 // Force-print to std::cout so it's also outside the logcat.
1388 std::cout << "Skipping set_short_static as I don't know how to do that on " << kRuntimeISA << std::endl;
1389#endif
1390}
1391
Mathieu Chartierc7853442015-03-27 14:35:38 -07001392static void GetSetCharInstance(Handle<mirror::Object>* obj, ArtField* f,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001393 Thread* self, ArtMethod* referrer, StubTest* test)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07001394 REQUIRES_SHARED(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001395#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1396 (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001397 uint16_t values[] = { 0, 1, 2, 255, 32768, 0xFFFF };
Fred Shih37f05ef2014-07-16 18:38:08 -07001398
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001399 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001400 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001401 reinterpret_cast<size_t>(obj->Get()),
1402 static_cast<size_t>(values[i]),
1403 StubTest::GetEntrypoint(self, kQuickSet16Instance),
1404 self,
1405 referrer);
1406
Mathieu Chartierc7853442015-03-27 14:35:38 -07001407 uint16_t res = f->GetChar(obj->Get());
Fred Shih37f05ef2014-07-16 18:38:08 -07001408 EXPECT_EQ(res, values[i]) << "Iteration " << i;
Mathieu Chartierc7853442015-03-27 14:35:38 -07001409 f->SetChar<false>(obj->Get(), ++res);
Fred Shih37f05ef2014-07-16 18:38:08 -07001410
Mathieu Chartierc7853442015-03-27 14:35:38 -07001411 size_t res2 = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001412 reinterpret_cast<size_t>(obj->Get()),
1413 0U,
1414 StubTest::GetEntrypoint(self, kQuickGetCharInstance),
1415 self,
1416 referrer);
1417 EXPECT_EQ(res, static_cast<uint16_t>(res2));
1418 }
1419#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001420 UNUSED(obj, f, self, referrer, test);
Fred Shih37f05ef2014-07-16 18:38:08 -07001421 LOG(INFO) << "Skipping set_char_instance as I don't know how to do that on " << kRuntimeISA;
1422 // Force-print to std::cout so it's also outside the logcat.
1423 std::cout << "Skipping set_char_instance as I don't know how to do that on " << kRuntimeISA << std::endl;
1424#endif
1425}
Mathieu Chartierc7853442015-03-27 14:35:38 -07001426static void GetSetShortInstance(Handle<mirror::Object>* obj, ArtField* f,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001427 Thread* self, ArtMethod* referrer, StubTest* test)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07001428 REQUIRES_SHARED(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001429#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1430 (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001431 int16_t values[] = { -0x7FFF, -32768, 0, 255, 32767, 0x7FFE };
Fred Shih37f05ef2014-07-16 18:38:08 -07001432
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001433 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001434 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001435 reinterpret_cast<size_t>(obj->Get()),
1436 static_cast<size_t>(values[i]),
1437 StubTest::GetEntrypoint(self, kQuickSet16Instance),
1438 self,
1439 referrer);
1440
Mathieu Chartierc7853442015-03-27 14:35:38 -07001441 int16_t res = f->GetShort(obj->Get());
Fred Shih37f05ef2014-07-16 18:38:08 -07001442 EXPECT_EQ(res, values[i]) << "Iteration " << i;
Mathieu Chartierc7853442015-03-27 14:35:38 -07001443 f->SetShort<false>(obj->Get(), ++res);
Fred Shih37f05ef2014-07-16 18:38:08 -07001444
Mathieu Chartierc7853442015-03-27 14:35:38 -07001445 size_t res2 = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001446 reinterpret_cast<size_t>(obj->Get()),
1447 0U,
1448 StubTest::GetEntrypoint(self, kQuickGetShortInstance),
1449 self,
1450 referrer);
1451 EXPECT_EQ(res, static_cast<int16_t>(res2));
1452 }
1453#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001454 UNUSED(obj, f, self, referrer, test);
Fred Shih37f05ef2014-07-16 18:38:08 -07001455 LOG(INFO) << "Skipping set_short_instance as I don't know how to do that on " << kRuntimeISA;
1456 // Force-print to std::cout so it's also outside the logcat.
1457 std::cout << "Skipping set_short_instance as I don't know how to do that on " << kRuntimeISA << std::endl;
1458#endif
1459}
1460
Mathieu Chartiere401d142015-04-22 13:56:20 -07001461static void GetSet32Static(ArtField* f, Thread* self, ArtMethod* referrer,
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001462 StubTest* test)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07001463 REQUIRES_SHARED(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001464#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1465 (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001466 uint32_t values[] = { 0, 1, 2, 255, 32768, 1000000, 0xFFFFFFFF };
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001467
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001468 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001469 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001470 static_cast<size_t>(values[i]),
1471 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001472 StubTest::GetEntrypoint(self, kQuickSet32Static),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001473 self,
1474 referrer);
1475
Mathieu Chartierc7853442015-03-27 14:35:38 -07001476 size_t res = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001477 0U, 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001478 StubTest::GetEntrypoint(self, kQuickGet32Static),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001479 self,
1480 referrer);
1481
Goran Jakovljevic04568812015-04-23 15:27:23 +02001482#if defined(__mips__) && defined(__LP64__)
1483 EXPECT_EQ(static_cast<uint32_t>(res), values[i]) << "Iteration " << i;
1484#else
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001485 EXPECT_EQ(res, values[i]) << "Iteration " << i;
Goran Jakovljevic04568812015-04-23 15:27:23 +02001486#endif
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001487 }
1488#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001489 UNUSED(f, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001490 LOG(INFO) << "Skipping set32static as I don't know how to do that on " << kRuntimeISA;
1491 // Force-print to std::cout so it's also outside the logcat.
1492 std::cout << "Skipping set32static as I don't know how to do that on " << kRuntimeISA << std::endl;
1493#endif
1494}
1495
1496
Mathieu Chartierc7853442015-03-27 14:35:38 -07001497static void GetSet32Instance(Handle<mirror::Object>* obj, ArtField* f,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001498 Thread* self, ArtMethod* referrer, StubTest* test)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07001499 REQUIRES_SHARED(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001500#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1501 (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001502 uint32_t values[] = { 0, 1, 2, 255, 32768, 1000000, 0xFFFFFFFF };
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001503
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001504 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001505 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001506 reinterpret_cast<size_t>(obj->Get()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001507 static_cast<size_t>(values[i]),
Andreas Gampe29b38412014-08-13 00:15:43 -07001508 StubTest::GetEntrypoint(self, kQuickSet32Instance),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001509 self,
1510 referrer);
1511
Mathieu Chartierc7853442015-03-27 14:35:38 -07001512 int32_t res = f->GetInt(obj->Get());
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001513 EXPECT_EQ(res, static_cast<int32_t>(values[i])) << "Iteration " << i;
1514
1515 res++;
Mathieu Chartierc7853442015-03-27 14:35:38 -07001516 f->SetInt<false>(obj->Get(), res);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001517
Mathieu Chartierc7853442015-03-27 14:35:38 -07001518 size_t res2 = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001519 reinterpret_cast<size_t>(obj->Get()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001520 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001521 StubTest::GetEntrypoint(self, kQuickGet32Instance),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001522 self,
1523 referrer);
1524 EXPECT_EQ(res, static_cast<int32_t>(res2));
1525 }
1526#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001527 UNUSED(obj, f, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001528 LOG(INFO) << "Skipping set32instance as I don't know how to do that on " << kRuntimeISA;
1529 // Force-print to std::cout so it's also outside the logcat.
1530 std::cout << "Skipping set32instance as I don't know how to do that on " << kRuntimeISA << std::endl;
1531#endif
1532}
1533
1534
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001535#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1536 (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001537
1538static void set_and_check_static(uint32_t f_idx, mirror::Object* val, Thread* self,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001539 ArtMethod* referrer, StubTest* test)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07001540 REQUIRES_SHARED(Locks::mutator_lock_) {
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001541 test->Invoke3WithReferrer(static_cast<size_t>(f_idx),
1542 reinterpret_cast<size_t>(val),
1543 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001544 StubTest::GetEntrypoint(self, kQuickSetObjStatic),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001545 self,
1546 referrer);
1547
1548 size_t res = test->Invoke3WithReferrer(static_cast<size_t>(f_idx),
1549 0U, 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001550 StubTest::GetEntrypoint(self, kQuickGetObjStatic),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001551 self,
1552 referrer);
1553
1554 EXPECT_EQ(res, reinterpret_cast<size_t>(val)) << "Value " << val;
1555}
1556#endif
1557
Mathieu Chartiere401d142015-04-22 13:56:20 -07001558static void GetSetObjStatic(ArtField* f, Thread* self, ArtMethod* referrer,
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001559 StubTest* test)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07001560 REQUIRES_SHARED(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001561#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1562 (defined(__x86_64__) && !defined(__APPLE__))
Mathieu Chartierc7853442015-03-27 14:35:38 -07001563 set_and_check_static(f->GetDexFieldIndex(), nullptr, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001564
1565 // Allocate a string object for simplicity.
1566 mirror::String* str = mirror::String::AllocFromModifiedUtf8(self, "Test");
Mathieu Chartierc7853442015-03-27 14:35:38 -07001567 set_and_check_static(f->GetDexFieldIndex(), str, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001568
Mathieu Chartierc7853442015-03-27 14:35:38 -07001569 set_and_check_static(f->GetDexFieldIndex(), nullptr, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001570#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001571 UNUSED(f, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001572 LOG(INFO) << "Skipping setObjstatic as I don't know how to do that on " << kRuntimeISA;
1573 // Force-print to std::cout so it's also outside the logcat.
1574 std::cout << "Skipping setObjstatic as I don't know how to do that on " << kRuntimeISA << std::endl;
1575#endif
1576}
1577
1578
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001579#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1580 (defined(__x86_64__) && !defined(__APPLE__))
Mathieu Chartierc7853442015-03-27 14:35:38 -07001581static void set_and_check_instance(ArtField* f, mirror::Object* trg,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001582 mirror::Object* val, Thread* self, ArtMethod* referrer,
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001583 StubTest* test)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07001584 REQUIRES_SHARED(Locks::mutator_lock_) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001585 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001586 reinterpret_cast<size_t>(trg),
1587 reinterpret_cast<size_t>(val),
Andreas Gampe29b38412014-08-13 00:15:43 -07001588 StubTest::GetEntrypoint(self, kQuickSetObjInstance),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001589 self,
1590 referrer);
1591
Mathieu Chartierc7853442015-03-27 14:35:38 -07001592 size_t res = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001593 reinterpret_cast<size_t>(trg),
1594 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001595 StubTest::GetEntrypoint(self, kQuickGetObjInstance),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001596 self,
1597 referrer);
1598
1599 EXPECT_EQ(res, reinterpret_cast<size_t>(val)) << "Value " << val;
1600
Mathieu Chartier3398c782016-09-30 10:27:43 -07001601 EXPECT_OBJ_PTR_EQ(val, f->GetObj(trg));
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001602}
1603#endif
1604
Mathieu Chartierc7853442015-03-27 14:35:38 -07001605static void GetSetObjInstance(Handle<mirror::Object>* obj, ArtField* f,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001606 Thread* self, ArtMethod* referrer, StubTest* test)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07001607 REQUIRES_SHARED(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001608#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1609 (defined(__x86_64__) && !defined(__APPLE__))
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001610 set_and_check_instance(f, obj->Get(), nullptr, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001611
1612 // Allocate a string object for simplicity.
1613 mirror::String* str = mirror::String::AllocFromModifiedUtf8(self, "Test");
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001614 set_and_check_instance(f, obj->Get(), str, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001615
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001616 set_and_check_instance(f, obj->Get(), nullptr, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001617#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001618 UNUSED(obj, f, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001619 LOG(INFO) << "Skipping setObjinstance as I don't know how to do that on " << kRuntimeISA;
1620 // Force-print to std::cout so it's also outside the logcat.
1621 std::cout << "Skipping setObjinstance as I don't know how to do that on " << kRuntimeISA << std::endl;
1622#endif
1623}
1624
1625
Calin Juravle872ab3f2015-10-02 07:27:51 +01001626// TODO: Complete these tests for 32b architectures
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001627
Mathieu Chartiere401d142015-04-22 13:56:20 -07001628static void GetSet64Static(ArtField* f, Thread* self, ArtMethod* referrer,
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001629 StubTest* test)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07001630 REQUIRES_SHARED(Locks::mutator_lock_) {
Calin Juravle6e399ac2015-10-02 23:56:06 +01001631#if (defined(__x86_64__) && !defined(__APPLE__)) || (defined(__mips__) && defined(__LP64__)) \
1632 || defined(__aarch64__)
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001633 uint64_t values[] = { 0, 1, 2, 255, 32768, 1000000, 0xFFFFFFFF, 0xFFFFFFFFFFFF };
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001634
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001635 for (size_t i = 0; i < arraysize(values); ++i) {
Calin Juravle6e399ac2015-10-02 23:56:06 +01001636 // 64 bit FieldSet stores the set value in the second register.
1637 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Calin Juravle24cc1b32015-10-06 11:46:58 +01001638 0U,
1639 values[i],
1640 StubTest::GetEntrypoint(self, kQuickSet64Static),
1641 self,
1642 referrer);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001643
Mathieu Chartierc7853442015-03-27 14:35:38 -07001644 size_t res = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001645 0U, 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001646 StubTest::GetEntrypoint(self, kQuickGet64Static),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001647 self,
1648 referrer);
1649
1650 EXPECT_EQ(res, values[i]) << "Iteration " << i;
1651 }
1652#else
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001653 UNUSED(f, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001654 LOG(INFO) << "Skipping set64static as I don't know how to do that on " << kRuntimeISA;
1655 // Force-print to std::cout so it's also outside the logcat.
1656 std::cout << "Skipping set64static as I don't know how to do that on " << kRuntimeISA << std::endl;
1657#endif
1658}
1659
1660
Mathieu Chartierc7853442015-03-27 14:35:38 -07001661static void GetSet64Instance(Handle<mirror::Object>* obj, ArtField* f,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001662 Thread* self, ArtMethod* referrer, StubTest* test)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07001663 REQUIRES_SHARED(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001664#if (defined(__x86_64__) && !defined(__APPLE__)) || (defined(__mips__) && defined(__LP64__)) || \
1665 defined(__aarch64__)
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001666 uint64_t values[] = { 0, 1, 2, 255, 32768, 1000000, 0xFFFFFFFF, 0xFFFFFFFFFFFF };
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001667
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001668 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001669 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001670 reinterpret_cast<size_t>(obj->Get()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001671 static_cast<size_t>(values[i]),
Andreas Gampe29b38412014-08-13 00:15:43 -07001672 StubTest::GetEntrypoint(self, kQuickSet64Instance),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001673 self,
1674 referrer);
1675
Mathieu Chartierc7853442015-03-27 14:35:38 -07001676 int64_t res = f->GetLong(obj->Get());
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001677 EXPECT_EQ(res, static_cast<int64_t>(values[i])) << "Iteration " << i;
1678
1679 res++;
Mathieu Chartierc7853442015-03-27 14:35:38 -07001680 f->SetLong<false>(obj->Get(), res);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001681
Mathieu Chartierc7853442015-03-27 14:35:38 -07001682 size_t res2 = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001683 reinterpret_cast<size_t>(obj->Get()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001684 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001685 StubTest::GetEntrypoint(self, kQuickGet64Instance),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001686 self,
1687 referrer);
1688 EXPECT_EQ(res, static_cast<int64_t>(res2));
1689 }
1690#else
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001691 UNUSED(obj, f, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001692 LOG(INFO) << "Skipping set64instance as I don't know how to do that on " << kRuntimeISA;
1693 // Force-print to std::cout so it's also outside the logcat.
1694 std::cout << "Skipping set64instance as I don't know how to do that on " << kRuntimeISA << std::endl;
1695#endif
1696}
1697
1698static void TestFields(Thread* self, StubTest* test, Primitive::Type test_type) {
1699 // garbage is created during ClassLinker::Init
1700
1701 JNIEnv* env = Thread::Current()->GetJniEnv();
1702 jclass jc = env->FindClass("AllFields");
Mathieu Chartier2cebb242015-04-21 16:50:40 -07001703 CHECK(jc != nullptr);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001704 jobject o = env->AllocObject(jc);
Mathieu Chartier2cebb242015-04-21 16:50:40 -07001705 CHECK(o != nullptr);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001706
1707 ScopedObjectAccess soa(self);
Mathieu Chartiere401d142015-04-22 13:56:20 -07001708 StackHandleScope<3> hs(self);
Mathieu Chartier0795f232016-09-27 18:43:30 -07001709 Handle<mirror::Object> obj(hs.NewHandle(soa.Decode<mirror::Object>(o)));
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001710 Handle<mirror::Class> c(hs.NewHandle(obj->GetClass()));
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001711 // Need a method as a referrer
Andreas Gampe542451c2016-07-26 09:02:02 -07001712 ArtMethod* m = c->GetDirectMethod(0, kRuntimePointerSize);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001713
1714 // Play with it...
1715
1716 // Static fields.
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001717 for (ArtField& f : c->GetSFields()) {
1718 Primitive::Type type = f.GetTypeAsPrimitiveType();
Mathieu Chartierc7853442015-03-27 14:35:38 -07001719 if (test_type != type) {
1720 continue;
1721 }
1722 switch (type) {
1723 case Primitive::Type::kPrimBoolean:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001724 GetSetBooleanStatic(&f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001725 break;
1726 case Primitive::Type::kPrimByte:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001727 GetSetByteStatic(&f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001728 break;
1729 case Primitive::Type::kPrimChar:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001730 GetSetCharStatic(&f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001731 break;
1732 case Primitive::Type::kPrimShort:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001733 GetSetShortStatic(&f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001734 break;
1735 case Primitive::Type::kPrimInt:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001736 GetSet32Static(&f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001737 break;
1738 case Primitive::Type::kPrimLong:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001739 GetSet64Static(&f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001740 break;
1741 case Primitive::Type::kPrimNot:
1742 // Don't try array.
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001743 if (f.GetTypeDescriptor()[0] != '[') {
1744 GetSetObjStatic(&f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001745 }
1746 break;
1747 default:
1748 break; // Skip.
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001749 }
1750 }
1751
1752 // Instance fields.
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001753 for (ArtField& f : c->GetIFields()) {
1754 Primitive::Type type = f.GetTypeAsPrimitiveType();
Mathieu Chartierc7853442015-03-27 14:35:38 -07001755 if (test_type != type) {
1756 continue;
1757 }
1758 switch (type) {
1759 case Primitive::Type::kPrimBoolean:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001760 GetSetBooleanInstance(&obj, &f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001761 break;
1762 case Primitive::Type::kPrimByte:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001763 GetSetByteInstance(&obj, &f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001764 break;
1765 case Primitive::Type::kPrimChar:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001766 GetSetCharInstance(&obj, &f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001767 break;
1768 case Primitive::Type::kPrimShort:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001769 GetSetShortInstance(&obj, &f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001770 break;
1771 case Primitive::Type::kPrimInt:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001772 GetSet32Instance(&obj, &f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001773 break;
1774 case Primitive::Type::kPrimLong:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001775 GetSet64Instance(&obj, &f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001776 break;
1777 case Primitive::Type::kPrimNot:
1778 // Don't try array.
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001779 if (f.GetTypeDescriptor()[0] != '[') {
1780 GetSetObjInstance(&obj, &f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001781 }
1782 break;
1783 default:
1784 break; // Skip.
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001785 }
1786 }
1787
1788 // TODO: Deallocate things.
1789}
1790
Fred Shih37f05ef2014-07-16 18:38:08 -07001791TEST_F(StubTest, Fields8) {
Fred Shih37f05ef2014-07-16 18:38:08 -07001792 Thread* self = Thread::Current();
1793
1794 self->TransitionFromSuspendedToRunnable();
1795 LoadDex("AllFields");
1796 bool started = runtime_->Start();
1797 CHECK(started);
1798
1799 TestFields(self, this, Primitive::Type::kPrimBoolean);
1800 TestFields(self, this, Primitive::Type::kPrimByte);
1801}
1802
1803TEST_F(StubTest, Fields16) {
Fred Shih37f05ef2014-07-16 18:38:08 -07001804 Thread* self = Thread::Current();
1805
1806 self->TransitionFromSuspendedToRunnable();
1807 LoadDex("AllFields");
1808 bool started = runtime_->Start();
1809 CHECK(started);
1810
1811 TestFields(self, this, Primitive::Type::kPrimChar);
1812 TestFields(self, this, Primitive::Type::kPrimShort);
1813}
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001814
1815TEST_F(StubTest, Fields32) {
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001816 Thread* self = Thread::Current();
1817
1818 self->TransitionFromSuspendedToRunnable();
1819 LoadDex("AllFields");
1820 bool started = runtime_->Start();
1821 CHECK(started);
1822
1823 TestFields(self, this, Primitive::Type::kPrimInt);
1824}
1825
1826TEST_F(StubTest, FieldsObj) {
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001827 Thread* self = Thread::Current();
1828
1829 self->TransitionFromSuspendedToRunnable();
1830 LoadDex("AllFields");
1831 bool started = runtime_->Start();
1832 CHECK(started);
1833
1834 TestFields(self, this, Primitive::Type::kPrimNot);
1835}
1836
1837TEST_F(StubTest, Fields64) {
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001838 Thread* self = Thread::Current();
1839
1840 self->TransitionFromSuspendedToRunnable();
1841 LoadDex("AllFields");
1842 bool started = runtime_->Start();
1843 CHECK(started);
1844
1845 TestFields(self, this, Primitive::Type::kPrimLong);
1846}
1847
Vladimir Marko9d07e3d2016-03-31 12:02:28 +01001848// Disabled, b/27991555 .
1849// FIXME: Hacking the entry point to point to art_quick_to_interpreter_bridge is broken.
1850// The bridge calls through to GetCalleeSaveMethodCaller() which looks up the pre-header
1851// and gets a bogus OatQuickMethodHeader* pointing into our assembly code just before
1852// the bridge and uses that to check for inlined frames, crashing in the process.
1853TEST_F(StubTest, DISABLED_IMT) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001854#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1855 (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe51f76352014-05-21 08:28:48 -07001856 Thread* self = Thread::Current();
1857
1858 ScopedObjectAccess soa(self);
1859 StackHandleScope<7> hs(self);
1860
1861 JNIEnv* env = Thread::Current()->GetJniEnv();
1862
1863 // ArrayList
1864
1865 // Load ArrayList and used methods (JNI).
1866 jclass arraylist_jclass = env->FindClass("java/util/ArrayList");
1867 ASSERT_NE(nullptr, arraylist_jclass);
1868 jmethodID arraylist_constructor = env->GetMethodID(arraylist_jclass, "<init>", "()V");
1869 ASSERT_NE(nullptr, arraylist_constructor);
Mathieu Chartiere401d142015-04-22 13:56:20 -07001870 jmethodID contains_jmethod = env->GetMethodID(
1871 arraylist_jclass, "contains", "(Ljava/lang/Object;)Z");
Andreas Gampe51f76352014-05-21 08:28:48 -07001872 ASSERT_NE(nullptr, contains_jmethod);
1873 jmethodID add_jmethod = env->GetMethodID(arraylist_jclass, "add", "(Ljava/lang/Object;)Z");
1874 ASSERT_NE(nullptr, add_jmethod);
1875
Mathieu Chartiere401d142015-04-22 13:56:20 -07001876 // Get representation.
Andreas Gampe13b27842016-11-07 16:48:23 -08001877 ArtMethod* contains_amethod = jni::DecodeArtMethod(contains_jmethod);
Andreas Gampe51f76352014-05-21 08:28:48 -07001878
1879 // Patch up ArrayList.contains.
Mathieu Chartiere401d142015-04-22 13:56:20 -07001880 if (contains_amethod->GetEntryPointFromQuickCompiledCode() == nullptr) {
1881 contains_amethod->SetEntryPointFromQuickCompiledCode(reinterpret_cast<void*>(
Andreas Gampe29b38412014-08-13 00:15:43 -07001882 StubTest::GetEntrypoint(self, kQuickQuickToInterpreterBridge)));
Andreas Gampe51f76352014-05-21 08:28:48 -07001883 }
1884
1885 // List
1886
1887 // Load List and used methods (JNI).
1888 jclass list_jclass = env->FindClass("java/util/List");
1889 ASSERT_NE(nullptr, list_jclass);
Mathieu Chartiere401d142015-04-22 13:56:20 -07001890 jmethodID inf_contains_jmethod = env->GetMethodID(
1891 list_jclass, "contains", "(Ljava/lang/Object;)Z");
Andreas Gampe51f76352014-05-21 08:28:48 -07001892 ASSERT_NE(nullptr, inf_contains_jmethod);
1893
1894 // Get mirror representation.
Andreas Gampe13b27842016-11-07 16:48:23 -08001895 ArtMethod* inf_contains = jni::DecodeArtMethod(inf_contains_jmethod);
Andreas Gampe51f76352014-05-21 08:28:48 -07001896
1897 // Object
1898
1899 jclass obj_jclass = env->FindClass("java/lang/Object");
1900 ASSERT_NE(nullptr, obj_jclass);
1901 jmethodID obj_constructor = env->GetMethodID(obj_jclass, "<init>", "()V");
1902 ASSERT_NE(nullptr, obj_constructor);
1903
Andreas Gampe51f76352014-05-21 08:28:48 -07001904 // Create instances.
1905
1906 jobject jarray_list = env->NewObject(arraylist_jclass, arraylist_constructor);
1907 ASSERT_NE(nullptr, jarray_list);
Mathieu Chartier0795f232016-09-27 18:43:30 -07001908 Handle<mirror::Object> array_list(hs.NewHandle(soa.Decode<mirror::Object>(jarray_list)));
Andreas Gampe51f76352014-05-21 08:28:48 -07001909
1910 jobject jobj = env->NewObject(obj_jclass, obj_constructor);
1911 ASSERT_NE(nullptr, jobj);
Mathieu Chartier0795f232016-09-27 18:43:30 -07001912 Handle<mirror::Object> obj(hs.NewHandle(soa.Decode<mirror::Object>(jobj)));
Andreas Gampe51f76352014-05-21 08:28:48 -07001913
Andreas Gampe1a7e2922014-05-21 15:37:53 -07001914 // Invocation tests.
1915
1916 // 1. imt_conflict
1917
1918 // Contains.
Nicolas Geoffray1004faa2016-03-23 14:28:30 +00001919
1920 // We construct the ImtConflictTable ourselves, as we cannot go into the runtime stub
1921 // that will create it: the runtime stub expects to be called by compiled code.
Nicolas Geoffray1004faa2016-03-23 14:28:30 +00001922 LinearAlloc* linear_alloc = Runtime::Current()->GetLinearAlloc();
1923 ArtMethod* conflict_method = Runtime::Current()->CreateImtConflictMethod(linear_alloc);
Mathieu Chartiere42888f2016-04-14 10:49:19 -07001924 ImtConflictTable* empty_conflict_table =
1925 Runtime::Current()->GetClassLinker()->CreateImtConflictTable(/*count*/0u, linear_alloc);
Nicolas Geoffray1004faa2016-03-23 14:28:30 +00001926 void* data = linear_alloc->Alloc(
1927 self,
Andreas Gampe542451c2016-07-26 09:02:02 -07001928 ImtConflictTable::ComputeSizeWithOneMoreEntry(empty_conflict_table, kRuntimePointerSize));
Nicolas Geoffray1004faa2016-03-23 14:28:30 +00001929 ImtConflictTable* new_table = new (data) ImtConflictTable(
Andreas Gampe542451c2016-07-26 09:02:02 -07001930 empty_conflict_table, inf_contains, contains_amethod, kRuntimePointerSize);
1931 conflict_method->SetImtConflictTable(new_table, kRuntimePointerSize);
Nicolas Geoffray1004faa2016-03-23 14:28:30 +00001932
Andreas Gampe51f76352014-05-21 08:28:48 -07001933 size_t result =
Nicolas Geoffray1004faa2016-03-23 14:28:30 +00001934 Invoke3WithReferrerAndHidden(reinterpret_cast<size_t>(conflict_method),
1935 reinterpret_cast<size_t>(array_list.Get()),
Andreas Gampe51f76352014-05-21 08:28:48 -07001936 reinterpret_cast<size_t>(obj.Get()),
Andreas Gampe29b38412014-08-13 00:15:43 -07001937 StubTest::GetEntrypoint(self, kQuickQuickImtConflictTrampoline),
Nicolas Geoffray1004faa2016-03-23 14:28:30 +00001938 self,
1939 contains_amethod,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001940 static_cast<size_t>(inf_contains->GetDexMethodIndex()));
Andreas Gampe51f76352014-05-21 08:28:48 -07001941
1942 ASSERT_FALSE(self->IsExceptionPending());
1943 EXPECT_EQ(static_cast<size_t>(JNI_FALSE), result);
1944
1945 // Add object.
1946
1947 env->CallBooleanMethod(jarray_list, add_jmethod, jobj);
1948
David Sehr709b0702016-10-13 09:12:37 -07001949 ASSERT_FALSE(self->IsExceptionPending()) << mirror::Object::PrettyTypeOf(self->GetException());
Andreas Gampe51f76352014-05-21 08:28:48 -07001950
Andreas Gampe1a7e2922014-05-21 15:37:53 -07001951 // Contains.
Andreas Gampe51f76352014-05-21 08:28:48 -07001952
Nicolas Geoffray1004faa2016-03-23 14:28:30 +00001953 result =
1954 Invoke3WithReferrerAndHidden(reinterpret_cast<size_t>(conflict_method),
1955 reinterpret_cast<size_t>(array_list.Get()),
1956 reinterpret_cast<size_t>(obj.Get()),
1957 StubTest::GetEntrypoint(self, kQuickQuickImtConflictTrampoline),
1958 self,
1959 contains_amethod,
1960 static_cast<size_t>(inf_contains->GetDexMethodIndex()));
Andreas Gampe51f76352014-05-21 08:28:48 -07001961
1962 ASSERT_FALSE(self->IsExceptionPending());
1963 EXPECT_EQ(static_cast<size_t>(JNI_TRUE), result);
Nicolas Geoffray1004faa2016-03-23 14:28:30 +00001964
Andreas Gampe1a7e2922014-05-21 15:37:53 -07001965 // 2. regular interface trampoline
1966
Nicolas Geoffray1004faa2016-03-23 14:28:30 +00001967 result = Invoke3WithReferrer(static_cast<size_t>(inf_contains->GetDexMethodIndex()),
1968 reinterpret_cast<size_t>(array_list.Get()),
1969 reinterpret_cast<size_t>(obj.Get()),
1970 StubTest::GetEntrypoint(self,
1971 kQuickInvokeInterfaceTrampolineWithAccessCheck),
1972 self, contains_amethod);
Andreas Gampe1a7e2922014-05-21 15:37:53 -07001973
1974 ASSERT_FALSE(self->IsExceptionPending());
Nicolas Geoffray1004faa2016-03-23 14:28:30 +00001975 EXPECT_EQ(static_cast<size_t>(JNI_TRUE), result);
Andreas Gampe1a7e2922014-05-21 15:37:53 -07001976
Mathieu Chartiere401d142015-04-22 13:56:20 -07001977 result = Invoke3WithReferrer(
1978 static_cast<size_t>(inf_contains->GetDexMethodIndex()),
1979 reinterpret_cast<size_t>(array_list.Get()), reinterpret_cast<size_t>(array_list.Get()),
1980 StubTest::GetEntrypoint(self, kQuickInvokeInterfaceTrampolineWithAccessCheck), self,
1981 contains_amethod);
Andreas Gampe1a7e2922014-05-21 15:37:53 -07001982
1983 ASSERT_FALSE(self->IsExceptionPending());
1984 EXPECT_EQ(static_cast<size_t>(JNI_FALSE), result);
Andreas Gampe51f76352014-05-21 08:28:48 -07001985#else
Andreas Gampe6aac3552014-06-09 14:55:53 -07001986 LOG(INFO) << "Skipping imt as I don't know how to do that on " << kRuntimeISA;
Andreas Gampe51f76352014-05-21 08:28:48 -07001987 // Force-print to std::cout so it's also outside the logcat.
Andreas Gampe6aac3552014-06-09 14:55:53 -07001988 std::cout << "Skipping imt as I don't know how to do that on " << kRuntimeISA << std::endl;
1989#endif
1990}
1991
Andreas Gampe6aac3552014-06-09 14:55:53 -07001992TEST_F(StubTest, StringIndexOf) {
Chris Larsencf283da2016-01-19 16:45:35 -08001993#if defined(__arm__) || defined(__aarch64__) || defined(__mips__)
Andreas Gampe6aac3552014-06-09 14:55:53 -07001994 Thread* self = Thread::Current();
1995 ScopedObjectAccess soa(self);
1996 // garbage is created during ClassLinker::Init
1997
1998 // Create some strings
1999 // Use array so we can index into it and use a matrix for expected results
2000 // Setup: The first half is standard. The second half uses a non-zero offset.
2001 // TODO: Shared backing arrays.
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07002002 const char* c_str[] = { "", "a", "ba", "cba", "dcba", "edcba", "asdfghjkl" };
2003 static constexpr size_t kStringCount = arraysize(c_str);
2004 const char c_char[] = { 'a', 'b', 'c', 'd', 'e' };
2005 static constexpr size_t kCharCount = arraysize(c_char);
Andreas Gampe6aac3552014-06-09 14:55:53 -07002006
2007 StackHandleScope<kStringCount> hs(self);
2008 Handle<mirror::String> s[kStringCount];
2009
2010 for (size_t i = 0; i < kStringCount; ++i) {
2011 s[i] = hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), c_str[i]));
2012 }
2013
2014 // Matrix of expectations. First component is first parameter. Note we only check against the
2015 // sign, not the value. As we are testing random offsets, we need to compute this and need to
2016 // rely on String::CompareTo being correct.
2017 static constexpr size_t kMaxLen = 9;
2018 DCHECK_LE(strlen(c_str[kStringCount-1]), kMaxLen) << "Please fix the indexof test.";
2019
2020 // Last dimension: start, offset by 1.
2021 int32_t expected[kStringCount][kCharCount][kMaxLen + 3];
2022 for (size_t x = 0; x < kStringCount; ++x) {
2023 for (size_t y = 0; y < kCharCount; ++y) {
2024 for (size_t z = 0; z <= kMaxLen + 2; ++z) {
2025 expected[x][y][z] = s[x]->FastIndexOf(c_char[y], static_cast<int32_t>(z) - 1);
2026 }
2027 }
2028 }
2029
2030 // Play with it...
2031
2032 for (size_t x = 0; x < kStringCount; ++x) {
2033 for (size_t y = 0; y < kCharCount; ++y) {
2034 for (size_t z = 0; z <= kMaxLen + 2; ++z) {
2035 int32_t start = static_cast<int32_t>(z) - 1;
2036
2037 // Test string_compareto x y
2038 size_t result = Invoke3(reinterpret_cast<size_t>(s[x].Get()), c_char[y], start,
Andreas Gampe29b38412014-08-13 00:15:43 -07002039 StubTest::GetEntrypoint(self, kQuickIndexOf), self);
Andreas Gampe6aac3552014-06-09 14:55:53 -07002040
2041 EXPECT_FALSE(self->IsExceptionPending());
2042
2043 // The result is a 32b signed integer
2044 union {
2045 size_t r;
2046 int32_t i;
2047 } conv;
2048 conv.r = result;
2049
2050 EXPECT_EQ(expected[x][y][z], conv.i) << "Wrong result for " << c_str[x] << " / " <<
2051 c_char[y] << " @ " << start;
2052 }
2053 }
2054 }
2055
2056 // TODO: Deallocate things.
2057
2058 // Tests done.
2059#else
2060 LOG(INFO) << "Skipping indexof as I don't know how to do that on " << kRuntimeISA;
2061 // Force-print to std::cout so it's also outside the logcat.
2062 std::cout << "Skipping indexof as I don't know how to do that on " << kRuntimeISA << std::endl;
Andreas Gampe51f76352014-05-21 08:28:48 -07002063#endif
2064}
2065
Roland Levillain02b75802016-07-13 11:54:35 +01002066// TODO: Exercise the ReadBarrierMarkRegX entry points.
2067
Man Cao1aee9002015-07-14 22:31:42 -07002068TEST_F(StubTest, ReadBarrier) {
2069#if defined(ART_USE_READ_BARRIER) && (defined(__i386__) || defined(__arm__) || \
2070 defined(__aarch64__) || defined(__mips__) || (defined(__x86_64__) && !defined(__APPLE__)))
2071 Thread* self = Thread::Current();
2072
2073 const uintptr_t readBarrierSlow = StubTest::GetEntrypoint(self, kQuickReadBarrierSlow);
2074
2075 // Create an object
2076 ScopedObjectAccess soa(self);
2077 // garbage is created during ClassLinker::Init
2078
2079 StackHandleScope<2> hs(soa.Self());
2080 Handle<mirror::Class> c(
2081 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "Ljava/lang/Object;")));
2082
2083 // Build an object instance
2084 Handle<mirror::Object> obj(hs.NewHandle(c->AllocObject(soa.Self())));
2085
2086 EXPECT_FALSE(self->IsExceptionPending());
2087
2088 size_t result = Invoke3(0U, reinterpret_cast<size_t>(obj.Get()),
2089 mirror::Object::ClassOffset().SizeValue(), readBarrierSlow, self);
2090
2091 EXPECT_FALSE(self->IsExceptionPending());
2092 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
2093 mirror::Class* klass = reinterpret_cast<mirror::Class*>(result);
2094 EXPECT_EQ(klass, obj->GetClass());
2095
2096 // Tests done.
2097#else
2098 LOG(INFO) << "Skipping read_barrier_slow";
2099 // Force-print to std::cout so it's also outside the logcat.
2100 std::cout << "Skipping read_barrier_slow" << std::endl;
2101#endif
2102}
2103
Roland Levillain0d5a2812015-11-13 10:07:31 +00002104TEST_F(StubTest, ReadBarrierForRoot) {
2105#if defined(ART_USE_READ_BARRIER) && (defined(__i386__) || defined(__arm__) || \
2106 defined(__aarch64__) || defined(__mips__) || (defined(__x86_64__) && !defined(__APPLE__)))
2107 Thread* self = Thread::Current();
2108
2109 const uintptr_t readBarrierForRootSlow =
2110 StubTest::GetEntrypoint(self, kQuickReadBarrierForRootSlow);
2111
2112 // Create an object
2113 ScopedObjectAccess soa(self);
2114 // garbage is created during ClassLinker::Init
2115
2116 StackHandleScope<1> hs(soa.Self());
2117
2118 Handle<mirror::String> obj(
2119 hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), "hello, world!")));
2120
2121 EXPECT_FALSE(self->IsExceptionPending());
2122
2123 GcRoot<mirror::Class>& root = mirror::String::java_lang_String_;
2124 size_t result = Invoke3(reinterpret_cast<size_t>(&root), 0U, 0U, readBarrierForRootSlow, self);
2125
2126 EXPECT_FALSE(self->IsExceptionPending());
2127 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
2128 mirror::Class* klass = reinterpret_cast<mirror::Class*>(result);
2129 EXPECT_EQ(klass, obj->GetClass());
2130
2131 // Tests done.
2132#else
2133 LOG(INFO) << "Skipping read_barrier_for_root_slow";
2134 // Force-print to std::cout so it's also outside the logcat.
2135 std::cout << "Skipping read_barrier_for_root_slow" << std::endl;
2136#endif
2137}
2138
Andreas Gampe525cde22014-04-22 15:44:50 -07002139} // namespace art