blob: ee65fa8ab028fedf658f7971a67225df9f853477 [file] [log] [blame]
Andreas Gampe525cde22014-04-22 15:44:50 -07001/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
Ian Rogerse63db272014-07-15 15:36:11 -070017#include <cstdio>
18
Mathieu Chartierc7853442015-03-27 14:35:38 -070019#include "art_field-inl.h"
Mathieu Chartiere401d142015-04-22 13:56:20 -070020#include "art_method-inl.h"
Andreas Gampe542451c2016-07-26 09:02:02 -070021#include "base/enums.h"
Vladimir Marko3481ba22015-04-13 12:22:36 +010022#include "class_linker-inl.h"
Andreas Gampe525cde22014-04-22 15:44:50 -070023#include "common_runtime_test.h"
Andreas Gampe29b38412014-08-13 00:15:43 -070024#include "entrypoints/quick/quick_entrypoints_enum.h"
Andreas Gampe75a7db62016-09-26 12:04:26 -070025#include "imt_conflict_table.h"
Andreas Gampe13b27842016-11-07 16:48:23 -080026#include "jni_internal.h"
Nicolas Geoffray1004faa2016-03-23 14:28:30 +000027#include "linear_alloc.h"
Andreas Gampe51f76352014-05-21 08:28:48 -070028#include "mirror/class-inl.h"
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -070029#include "mirror/string-inl.h"
Mathieu Chartier0795f232016-09-27 18:43:30 -070030#include "scoped_thread_state_change-inl.h"
Andreas Gampe525cde22014-04-22 15:44:50 -070031
32namespace art {
33
34
35class StubTest : public CommonRuntimeTest {
36 protected:
37 // We need callee-save methods set up in the Runtime for exceptions.
38 void SetUp() OVERRIDE {
39 // Do the normal setup.
40 CommonRuntimeTest::SetUp();
41
42 {
43 // Create callee-save methods
44 ScopedObjectAccess soa(Thread::Current());
Vladimir Marko7624d252014-05-02 14:40:15 +010045 runtime_->SetInstructionSet(kRuntimeISA);
Andreas Gampe525cde22014-04-22 15:44:50 -070046 for (int i = 0; i < Runtime::kLastCalleeSaveType; i++) {
47 Runtime::CalleeSaveType type = Runtime::CalleeSaveType(i);
48 if (!runtime_->HasCalleeSaveMethod(type)) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -070049 runtime_->SetCalleeSaveMethod(runtime_->CreateCalleeSaveMethod(), type);
Andreas Gampe525cde22014-04-22 15:44:50 -070050 }
51 }
52 }
53 }
54
Ian Rogerse63db272014-07-15 15:36:11 -070055 void SetUpRuntimeOptions(RuntimeOptions *options) OVERRIDE {
Andreas Gampe00c1e6d2014-04-25 15:47:13 -070056 // Use a smaller heap
57 for (std::pair<std::string, const void*>& pair : *options) {
58 if (pair.first.find("-Xmx") == 0) {
59 pair.first = "-Xmx4M"; // Smallest we can go.
60 }
61 }
Andreas Gampe51f76352014-05-21 08:28:48 -070062 options->push_back(std::make_pair("-Xint", nullptr));
Andreas Gampe00c1e6d2014-04-25 15:47:13 -070063 }
Andreas Gampe525cde22014-04-22 15:44:50 -070064
Mathieu Chartier119c6bd2014-05-09 14:11:47 -070065 // Helper function needed since TEST_F makes a new class.
66 Thread::tls_ptr_sized_values* GetTlsPtr(Thread* self) {
67 return &self->tlsPtr_;
68 }
69
Andreas Gampe4fc046e2014-05-06 16:56:39 -070070 public:
Andreas Gampe525cde22014-04-22 15:44:50 -070071 size_t Invoke3(size_t arg0, size_t arg1, size_t arg2, uintptr_t code, Thread* self) {
Andreas Gampe6cf80102014-05-19 11:32:41 -070072 return Invoke3WithReferrer(arg0, arg1, arg2, code, self, nullptr);
Andreas Gampe525cde22014-04-22 15:44:50 -070073 }
Andreas Gampe6e4e59c2014-05-05 20:11:02 -070074
Andreas Gampe6e4e59c2014-05-05 20:11:02 -070075 // TODO: Set up a frame according to referrer's specs.
76 size_t Invoke3WithReferrer(size_t arg0, size_t arg1, size_t arg2, uintptr_t code, Thread* self,
Mathieu Chartiere401d142015-04-22 13:56:20 -070077 ArtMethod* referrer) {
Andreas Gampe9537ba22015-10-12 14:29:38 -070078 return Invoke3WithReferrerAndHidden(arg0, arg1, arg2, code, self, referrer, 0);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -070079 }
80
Andreas Gampe51f76352014-05-21 08:28:48 -070081 // TODO: Set up a frame according to referrer's specs.
82 size_t Invoke3WithReferrerAndHidden(size_t arg0, size_t arg1, size_t arg2, uintptr_t code,
Mathieu Chartiere401d142015-04-22 13:56:20 -070083 Thread* self, ArtMethod* referrer, size_t hidden) {
Andreas Gampe51f76352014-05-21 08:28:48 -070084 // Push a transition back into managed code onto the linked list in thread.
85 ManagedStack fragment;
86 self->PushManagedStackFragment(&fragment);
87
88 size_t result;
89 size_t fpr_result = 0;
90#if defined(__i386__)
91 // TODO: Set the thread?
Andreas Gampe9537ba22015-10-12 14:29:38 -070092#define PUSH(reg) "push " # reg "\n\t .cfi_adjust_cfa_offset 4\n\t"
93#define POP(reg) "pop " # reg "\n\t .cfi_adjust_cfa_offset -4\n\t"
Andreas Gampe51f76352014-05-21 08:28:48 -070094 __asm__ __volatile__(
Andreas Gampe9537ba22015-10-12 14:29:38 -070095 "movd %[hidden], %%xmm7\n\t" // This is a memory op, so do this early. If it is off of
96 // esp, then we won't be able to access it after spilling.
97
98 // Spill 6 registers.
99 PUSH(%%ebx)
100 PUSH(%%ecx)
101 PUSH(%%edx)
102 PUSH(%%esi)
103 PUSH(%%edi)
104 PUSH(%%ebp)
105
106 // Store the inputs to the stack, but keep the referrer up top, less work.
107 PUSH(%[referrer]) // Align stack.
108 PUSH(%[referrer]) // Store referrer
109
110 PUSH(%[arg0])
111 PUSH(%[arg1])
112 PUSH(%[arg2])
113 PUSH(%[code])
114 // Now read them back into the required registers.
115 POP(%%edi)
116 POP(%%edx)
117 POP(%%ecx)
118 POP(%%eax)
119 // Call is prepared now.
120
Andreas Gampe51f76352014-05-21 08:28:48 -0700121 "call *%%edi\n\t" // Call the stub
Andreas Gampe9537ba22015-10-12 14:29:38 -0700122 "addl $8, %%esp\n\t" // Pop referrer and padding.
123 ".cfi_adjust_cfa_offset -8\n\t"
124
125 // Restore 6 registers.
126 POP(%%ebp)
127 POP(%%edi)
128 POP(%%esi)
129 POP(%%edx)
130 POP(%%ecx)
131 POP(%%ebx)
132
Andreas Gampe51f76352014-05-21 08:28:48 -0700133 : "=a" (result)
134 // Use the result from eax
Andreas Gampe9537ba22015-10-12 14:29:38 -0700135 : [arg0] "r"(arg0), [arg1] "r"(arg1), [arg2] "r"(arg2), [code] "r"(code),
136 [referrer]"r"(referrer), [hidden]"m"(hidden)
Andreas Gampe2f6e3512014-06-07 01:32:33 -0700137 // This places code into edi, arg0 into eax, arg1 into ecx, and arg2 into edx
Andreas Gampe9537ba22015-10-12 14:29:38 -0700138 : "memory", "xmm7"); // clobber.
139#undef PUSH
140#undef POP
Andreas Gampe51f76352014-05-21 08:28:48 -0700141#elif defined(__arm__)
142 __asm__ __volatile__(
143 "push {r1-r12, lr}\n\t" // Save state, 13*4B = 52B
144 ".cfi_adjust_cfa_offset 52\n\t"
145 "push {r9}\n\t"
146 ".cfi_adjust_cfa_offset 4\n\t"
147 "mov r9, %[referrer]\n\n"
148 "str r9, [sp, #-8]!\n\t" // Push referrer, +8B padding so 16B aligned
149 ".cfi_adjust_cfa_offset 8\n\t"
150 "ldr r9, [sp, #8]\n\t"
151
152 // Push everything on the stack, so we don't rely on the order. What a mess. :-(
153 "sub sp, sp, #24\n\t"
154 "str %[arg0], [sp]\n\t"
155 "str %[arg1], [sp, #4]\n\t"
156 "str %[arg2], [sp, #8]\n\t"
157 "str %[code], [sp, #12]\n\t"
158 "str %[self], [sp, #16]\n\t"
159 "str %[hidden], [sp, #20]\n\t"
160 "ldr r0, [sp]\n\t"
161 "ldr r1, [sp, #4]\n\t"
162 "ldr r2, [sp, #8]\n\t"
163 "ldr r3, [sp, #12]\n\t"
164 "ldr r9, [sp, #16]\n\t"
165 "ldr r12, [sp, #20]\n\t"
166 "add sp, sp, #24\n\t"
167
168 "blx r3\n\t" // Call the stub
Mathieu Chartier2cebb242015-04-21 16:50:40 -0700169 "add sp, sp, #12\n\t" // Pop null and padding
Andreas Gampe51f76352014-05-21 08:28:48 -0700170 ".cfi_adjust_cfa_offset -12\n\t"
171 "pop {r1-r12, lr}\n\t" // Restore state
172 ".cfi_adjust_cfa_offset -52\n\t"
173 "mov %[result], r0\n\t" // Save the result
174 : [result] "=r" (result)
175 // Use the result from r0
Andreas Gampe2f6e3512014-06-07 01:32:33 -0700176 : [arg0] "r"(arg0), [arg1] "r"(arg1), [arg2] "r"(arg2), [code] "r"(code), [self] "r"(self),
177 [referrer] "r"(referrer), [hidden] "r"(hidden)
Andreas Gampeff7b1142015-08-03 10:25:06 -0700178 : "r0", "memory"); // clobber.
Andreas Gampe51f76352014-05-21 08:28:48 -0700179#elif defined(__aarch64__)
180 __asm__ __volatile__(
Andreas Gampef39b3782014-06-03 14:38:30 -0700181 // Spill x0-x7 which we say we don't clobber. May contain args.
Bilyan Borisov970e1912016-02-10 11:51:17 +0000182 "sub sp, sp, #80\n\t"
183 ".cfi_adjust_cfa_offset 80\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700184 "stp x0, x1, [sp]\n\t"
185 "stp x2, x3, [sp, #16]\n\t"
186 "stp x4, x5, [sp, #32]\n\t"
187 "stp x6, x7, [sp, #48]\n\t"
Bilyan Borisov970e1912016-02-10 11:51:17 +0000188 // To be extra defensive, store x20. We do this because some of the stubs might make a
189 // transition into the runtime via the blr instruction below and *not* save x20.
190 "str x20, [sp, #64]\n\t"
191 // 8 byte buffer
Andreas Gampe51f76352014-05-21 08:28:48 -0700192
Andreas Gampef39b3782014-06-03 14:38:30 -0700193 "sub sp, sp, #16\n\t" // Reserve stack space, 16B aligned
194 ".cfi_adjust_cfa_offset 16\n\t"
195 "str %[referrer], [sp]\n\t" // referrer
Andreas Gampe51f76352014-05-21 08:28:48 -0700196
197 // Push everything on the stack, so we don't rely on the order. What a mess. :-(
198 "sub sp, sp, #48\n\t"
199 ".cfi_adjust_cfa_offset 48\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700200 // All things are "r" constraints, so direct str/stp should work.
201 "stp %[arg0], %[arg1], [sp]\n\t"
202 "stp %[arg2], %[code], [sp, #16]\n\t"
203 "stp %[self], %[hidden], [sp, #32]\n\t"
Andreas Gampe51f76352014-05-21 08:28:48 -0700204
205 // Now we definitely have x0-x3 free, use it to garble d8 - d15
206 "movk x0, #0xfad0\n\t"
207 "movk x0, #0xebad, lsl #16\n\t"
208 "movk x0, #0xfad0, lsl #32\n\t"
209 "movk x0, #0xebad, lsl #48\n\t"
210 "fmov d8, x0\n\t"
211 "add x0, x0, 1\n\t"
212 "fmov d9, x0\n\t"
213 "add x0, x0, 1\n\t"
214 "fmov d10, x0\n\t"
215 "add x0, x0, 1\n\t"
216 "fmov d11, x0\n\t"
217 "add x0, x0, 1\n\t"
218 "fmov d12, x0\n\t"
219 "add x0, x0, 1\n\t"
220 "fmov d13, x0\n\t"
221 "add x0, x0, 1\n\t"
222 "fmov d14, x0\n\t"
223 "add x0, x0, 1\n\t"
224 "fmov d15, x0\n\t"
225
Andreas Gampef39b3782014-06-03 14:38:30 -0700226 // Load call params into the right registers.
227 "ldp x0, x1, [sp]\n\t"
228 "ldp x2, x3, [sp, #16]\n\t"
Serban Constantinescu9bd88b02015-04-22 16:24:46 +0100229 "ldp x19, x17, [sp, #32]\n\t"
Andreas Gampe51f76352014-05-21 08:28:48 -0700230 "add sp, sp, #48\n\t"
231 ".cfi_adjust_cfa_offset -48\n\t"
232
Andreas Gampe51f76352014-05-21 08:28:48 -0700233 "blr x3\n\t" // Call the stub
Andreas Gampef39b3782014-06-03 14:38:30 -0700234 "mov x8, x0\n\t" // Store result
235 "add sp, sp, #16\n\t" // Drop the quick "frame"
236 ".cfi_adjust_cfa_offset -16\n\t"
Andreas Gampe51f76352014-05-21 08:28:48 -0700237
238 // Test d8 - d15. We can use x1 and x2.
239 "movk x1, #0xfad0\n\t"
240 "movk x1, #0xebad, lsl #16\n\t"
241 "movk x1, #0xfad0, lsl #32\n\t"
242 "movk x1, #0xebad, lsl #48\n\t"
243 "fmov x2, d8\n\t"
244 "cmp x1, x2\n\t"
245 "b.ne 1f\n\t"
246 "add x1, x1, 1\n\t"
247
248 "fmov x2, d9\n\t"
249 "cmp x1, x2\n\t"
250 "b.ne 1f\n\t"
251 "add x1, x1, 1\n\t"
252
253 "fmov x2, d10\n\t"
254 "cmp x1, x2\n\t"
255 "b.ne 1f\n\t"
256 "add x1, x1, 1\n\t"
257
258 "fmov x2, d11\n\t"
259 "cmp x1, x2\n\t"
260 "b.ne 1f\n\t"
261 "add x1, x1, 1\n\t"
262
263 "fmov x2, d12\n\t"
264 "cmp x1, x2\n\t"
265 "b.ne 1f\n\t"
266 "add x1, x1, 1\n\t"
267
268 "fmov x2, d13\n\t"
269 "cmp x1, x2\n\t"
270 "b.ne 1f\n\t"
271 "add x1, x1, 1\n\t"
272
273 "fmov x2, d14\n\t"
274 "cmp x1, x2\n\t"
275 "b.ne 1f\n\t"
276 "add x1, x1, 1\n\t"
277
278 "fmov x2, d15\n\t"
279 "cmp x1, x2\n\t"
280 "b.ne 1f\n\t"
281
Andreas Gampef39b3782014-06-03 14:38:30 -0700282 "mov x9, #0\n\t" // Use x9 as flag, in clobber list
Andreas Gampe51f76352014-05-21 08:28:48 -0700283
284 // Finish up.
285 "2:\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700286 "ldp x0, x1, [sp]\n\t" // Restore stuff not named clobbered, may contain fpr_result
287 "ldp x2, x3, [sp, #16]\n\t"
288 "ldp x4, x5, [sp, #32]\n\t"
289 "ldp x6, x7, [sp, #48]\n\t"
Bilyan Borisov970e1912016-02-10 11:51:17 +0000290 "ldr x20, [sp, #64]\n\t"
291 "add sp, sp, #80\n\t" // Free stack space, now sp as on entry
292 ".cfi_adjust_cfa_offset -80\n\t"
Andreas Gampe51f76352014-05-21 08:28:48 -0700293
Andreas Gampef39b3782014-06-03 14:38:30 -0700294 "str x9, %[fpr_result]\n\t" // Store the FPR comparison result
295 "mov %[result], x8\n\t" // Store the call result
296
Andreas Gampe51f76352014-05-21 08:28:48 -0700297 "b 3f\n\t" // Goto end
298
299 // Failed fpr verification.
300 "1:\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700301 "mov x9, #1\n\t"
Andreas Gampe51f76352014-05-21 08:28:48 -0700302 "b 2b\n\t" // Goto finish-up
303
304 // End
305 "3:\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700306 : [result] "=r" (result)
307 // Use the result from r0
Andreas Gampe51f76352014-05-21 08:28:48 -0700308 : [arg0] "0"(arg0), [arg1] "r"(arg1), [arg2] "r"(arg2), [code] "r"(code), [self] "r"(self),
Andreas Gampef39b3782014-06-03 14:38:30 -0700309 [referrer] "r"(referrer), [hidden] "r"(hidden), [fpr_result] "m" (fpr_result)
Bilyan Borisov970e1912016-02-10 11:51:17 +0000310 // Leave one register unclobbered, which is needed for compiling with
311 // -fstack-protector-strong. According to AAPCS64 registers x9-x15 are caller-saved,
312 // which means we should unclobber one of the callee-saved registers that are unused.
313 // Here we use x20.
314 : "x8", "x9", "x10", "x11", "x12", "x13", "x14", "x15", "x16", "x17", "x18", "x19",
Andreas Gampef39b3782014-06-03 14:38:30 -0700315 "x21", "x22", "x23", "x24", "x25", "x26", "x27", "x28", "x30",
316 "d0", "d1", "d2", "d3", "d4", "d5", "d6", "d7",
317 "d8", "d9", "d10", "d11", "d12", "d13", "d14", "d15",
318 "d16", "d17", "d18", "d19", "d20", "d21", "d22", "d23",
Andreas Gampe2f6e3512014-06-07 01:32:33 -0700319 "d24", "d25", "d26", "d27", "d28", "d29", "d30", "d31",
Bilyan Borisov970e1912016-02-10 11:51:17 +0000320 "memory");
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200321#elif defined(__mips__) && !defined(__LP64__)
322 __asm__ __volatile__ (
323 // Spill a0-a3 and t0-t7 which we say we don't clobber. May contain args.
324 "addiu $sp, $sp, -64\n\t"
325 "sw $a0, 0($sp)\n\t"
326 "sw $a1, 4($sp)\n\t"
327 "sw $a2, 8($sp)\n\t"
328 "sw $a3, 12($sp)\n\t"
329 "sw $t0, 16($sp)\n\t"
330 "sw $t1, 20($sp)\n\t"
331 "sw $t2, 24($sp)\n\t"
332 "sw $t3, 28($sp)\n\t"
333 "sw $t4, 32($sp)\n\t"
334 "sw $t5, 36($sp)\n\t"
335 "sw $t6, 40($sp)\n\t"
336 "sw $t7, 44($sp)\n\t"
337 // Spill gp register since it is caller save.
338 "sw $gp, 52($sp)\n\t"
339
340 "addiu $sp, $sp, -16\n\t" // Reserve stack space, 16B aligned.
341 "sw %[referrer], 0($sp)\n\t"
342
343 // Push everything on the stack, so we don't rely on the order.
344 "addiu $sp, $sp, -24\n\t"
345 "sw %[arg0], 0($sp)\n\t"
346 "sw %[arg1], 4($sp)\n\t"
347 "sw %[arg2], 8($sp)\n\t"
348 "sw %[code], 12($sp)\n\t"
349 "sw %[self], 16($sp)\n\t"
350 "sw %[hidden], 20($sp)\n\t"
351
352 // Load call params into the right registers.
353 "lw $a0, 0($sp)\n\t"
354 "lw $a1, 4($sp)\n\t"
355 "lw $a2, 8($sp)\n\t"
356 "lw $t9, 12($sp)\n\t"
357 "lw $s1, 16($sp)\n\t"
Alexey Frunze1b8464d2016-11-12 17:22:05 -0800358 "lw $t7, 20($sp)\n\t"
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200359 "addiu $sp, $sp, 24\n\t"
360
361 "jalr $t9\n\t" // Call the stub.
362 "nop\n\t"
363 "addiu $sp, $sp, 16\n\t" // Drop the quick "frame".
364
365 // Restore stuff not named clobbered.
366 "lw $a0, 0($sp)\n\t"
367 "lw $a1, 4($sp)\n\t"
368 "lw $a2, 8($sp)\n\t"
369 "lw $a3, 12($sp)\n\t"
370 "lw $t0, 16($sp)\n\t"
371 "lw $t1, 20($sp)\n\t"
372 "lw $t2, 24($sp)\n\t"
373 "lw $t3, 28($sp)\n\t"
374 "lw $t4, 32($sp)\n\t"
375 "lw $t5, 36($sp)\n\t"
376 "lw $t6, 40($sp)\n\t"
377 "lw $t7, 44($sp)\n\t"
378 // Restore gp.
379 "lw $gp, 52($sp)\n\t"
380 "addiu $sp, $sp, 64\n\t" // Free stack space, now sp as on entry.
381
382 "move %[result], $v0\n\t" // Store the call result.
383 : [result] "=r" (result)
384 : [arg0] "r"(arg0), [arg1] "r"(arg1), [arg2] "r"(arg2), [code] "r"(code), [self] "r"(self),
385 [referrer] "r"(referrer), [hidden] "r"(hidden)
386 : "at", "v0", "v1", "s0", "s1", "s2", "s3", "s4", "s5", "s6", "s7", "t8", "t9", "k0", "k1",
387 "fp", "ra",
Nicolas Geoffrayc5b4b322015-09-15 16:36:50 +0100388 "$f0", "$f1", "$f2", "$f3", "$f4", "$f5", "$f6", "$f7", "$f8", "$f9", "$f10", "$f11",
389 "$f12", "$f13", "$f14", "$f15", "$f16", "$f17", "$f18", "$f19", "$f20", "$f21", "$f22",
390 "$f23", "$f24", "$f25", "$f26", "$f27", "$f28", "$f29", "$f30", "$f31",
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200391 "memory"); // clobber.
392#elif defined(__mips__) && defined(__LP64__)
393 __asm__ __volatile__ (
Goran Jakovljevic745f3cd2016-01-15 14:08:47 +0100394 // Spill a0-a7 which we say we don't clobber. May contain args.
395 "daddiu $sp, $sp, -64\n\t"
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200396 "sd $a0, 0($sp)\n\t"
397 "sd $a1, 8($sp)\n\t"
398 "sd $a2, 16($sp)\n\t"
399 "sd $a3, 24($sp)\n\t"
400 "sd $a4, 32($sp)\n\t"
401 "sd $a5, 40($sp)\n\t"
402 "sd $a6, 48($sp)\n\t"
403 "sd $a7, 56($sp)\n\t"
404
405 "daddiu $sp, $sp, -16\n\t" // Reserve stack space, 16B aligned.
406 "sd %[referrer], 0($sp)\n\t"
407
408 // Push everything on the stack, so we don't rely on the order.
409 "daddiu $sp, $sp, -48\n\t"
410 "sd %[arg0], 0($sp)\n\t"
411 "sd %[arg1], 8($sp)\n\t"
412 "sd %[arg2], 16($sp)\n\t"
413 "sd %[code], 24($sp)\n\t"
414 "sd %[self], 32($sp)\n\t"
415 "sd %[hidden], 40($sp)\n\t"
416
417 // Load call params into the right registers.
418 "ld $a0, 0($sp)\n\t"
419 "ld $a1, 8($sp)\n\t"
420 "ld $a2, 16($sp)\n\t"
421 "ld $t9, 24($sp)\n\t"
422 "ld $s1, 32($sp)\n\t"
423 "ld $t0, 40($sp)\n\t"
424 "daddiu $sp, $sp, 48\n\t"
425
426 "jalr $t9\n\t" // Call the stub.
427 "nop\n\t"
428 "daddiu $sp, $sp, 16\n\t" // Drop the quick "frame".
429
430 // Restore stuff not named clobbered.
431 "ld $a0, 0($sp)\n\t"
432 "ld $a1, 8($sp)\n\t"
433 "ld $a2, 16($sp)\n\t"
434 "ld $a3, 24($sp)\n\t"
435 "ld $a4, 32($sp)\n\t"
436 "ld $a5, 40($sp)\n\t"
437 "ld $a6, 48($sp)\n\t"
438 "ld $a7, 56($sp)\n\t"
Goran Jakovljevic745f3cd2016-01-15 14:08:47 +0100439 "daddiu $sp, $sp, 64\n\t"
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200440
441 "move %[result], $v0\n\t" // Store the call result.
442 : [result] "=r" (result)
443 : [arg0] "r"(arg0), [arg1] "r"(arg1), [arg2] "r"(arg2), [code] "r"(code), [self] "r"(self),
444 [referrer] "r"(referrer), [hidden] "r"(hidden)
Goran Jakovljevic745f3cd2016-01-15 14:08:47 +0100445 // Instead aliases t0-t3, register names $12-$15 has been used in the clobber list because
446 // t0-t3 are ambiguous.
447 : "at", "v0", "v1", "$12", "$13", "$14", "$15", "s0", "s1", "s2", "s3", "s4", "s5", "s6",
448 "s7", "t8", "t9", "k0", "k1", "fp", "ra",
Goran Jakovljevic4d44e532015-11-27 11:20:20 +0100449 "$f0", "$f1", "$f2", "$f3", "$f4", "$f5", "$f6", "$f7", "$f8", "$f9", "$f10", "$f11",
450 "$f12", "$f13", "$f14", "$f15", "$f16", "$f17", "$f18", "$f19", "$f20", "$f21", "$f22",
451 "$f23", "$f24", "$f25", "$f26", "$f27", "$f28", "$f29", "$f30", "$f31",
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200452 "memory"); // clobber.
Andreas Gampe9537ba22015-10-12 14:29:38 -0700453#elif defined(__x86_64__) && !defined(__APPLE__)
454#define PUSH(reg) "pushq " # reg "\n\t .cfi_adjust_cfa_offset 8\n\t"
455#define POP(reg) "popq " # reg "\n\t .cfi_adjust_cfa_offset -8\n\t"
456 // Note: Uses the native convention. We do a callee-save regimen by manually spilling and
457 // restoring almost all registers.
Andreas Gampe51f76352014-05-21 08:28:48 -0700458 // TODO: Set the thread?
459 __asm__ __volatile__(
Andreas Gampe9537ba22015-10-12 14:29:38 -0700460 // Spill almost everything (except rax, rsp). 14 registers.
461 PUSH(%%rbx)
462 PUSH(%%rcx)
463 PUSH(%%rdx)
464 PUSH(%%rsi)
465 PUSH(%%rdi)
466 PUSH(%%rbp)
467 PUSH(%%r8)
468 PUSH(%%r9)
469 PUSH(%%r10)
470 PUSH(%%r11)
471 PUSH(%%r12)
472 PUSH(%%r13)
473 PUSH(%%r14)
474 PUSH(%%r15)
475
476 PUSH(%[referrer]) // Push referrer & 16B alignment padding
477 PUSH(%[referrer])
478
479 // Now juggle the input registers.
480 PUSH(%[arg0])
481 PUSH(%[arg1])
482 PUSH(%[arg2])
483 PUSH(%[hidden])
484 PUSH(%[code])
485 POP(%%r8)
486 POP(%%rax)
487 POP(%%rdx)
488 POP(%%rsi)
489 POP(%%rdi)
490
491 "call *%%r8\n\t" // Call the stub
492 "addq $16, %%rsp\n\t" // Pop null and padding
Andreas Gampe51f76352014-05-21 08:28:48 -0700493 ".cfi_adjust_cfa_offset -16\n\t"
Andreas Gampe9537ba22015-10-12 14:29:38 -0700494
495 POP(%%r15)
496 POP(%%r14)
497 POP(%%r13)
498 POP(%%r12)
499 POP(%%r11)
500 POP(%%r10)
501 POP(%%r9)
502 POP(%%r8)
503 POP(%%rbp)
504 POP(%%rdi)
505 POP(%%rsi)
506 POP(%%rdx)
507 POP(%%rcx)
508 POP(%%rbx)
509
Andreas Gampe51f76352014-05-21 08:28:48 -0700510 : "=a" (result)
511 // Use the result from rax
Andreas Gampe9537ba22015-10-12 14:29:38 -0700512 : [arg0] "r"(arg0), [arg1] "r"(arg1), [arg2] "r"(arg2), [code] "r"(code),
513 [referrer] "r"(referrer), [hidden] "r"(hidden)
514 // This places arg0 into rdi, arg1 into rsi, arg2 into rdx, and code into some other
515 // register. We can't use "b" (rbx), as ASAN uses this for the frame pointer.
516 : "memory"); // We spill and restore (almost) all registers, so only mention memory here.
517#undef PUSH
518#undef POP
Andreas Gampe51f76352014-05-21 08:28:48 -0700519#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -0800520 UNUSED(arg0, arg1, arg2, code, referrer, hidden);
Andreas Gampe51f76352014-05-21 08:28:48 -0700521 LOG(WARNING) << "Was asked to invoke for an architecture I do not understand.";
522 result = 0;
523#endif
524 // Pop transition.
525 self->PopManagedStackFragment(fragment);
526
527 fp_result = fpr_result;
528 EXPECT_EQ(0U, fp_result);
529
530 return result;
531 }
532
Andreas Gampe29b38412014-08-13 00:15:43 -0700533 static uintptr_t GetEntrypoint(Thread* self, QuickEntrypointEnum entrypoint) {
534 int32_t offset;
Andreas Gampe542451c2016-07-26 09:02:02 -0700535 offset = GetThreadOffset<kRuntimePointerSize>(entrypoint).Int32Value();
Andreas Gampe29b38412014-08-13 00:15:43 -0700536 return *reinterpret_cast<uintptr_t*>(reinterpret_cast<uint8_t*>(self) + offset);
537 }
538
Andreas Gampe6cf80102014-05-19 11:32:41 -0700539 protected:
540 size_t fp_result;
Andreas Gampe525cde22014-04-22 15:44:50 -0700541};
542
543
Andreas Gampe525cde22014-04-22 15:44:50 -0700544TEST_F(StubTest, Memcpy) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200545#if defined(__i386__) || (defined(__x86_64__) && !defined(__APPLE__)) || defined(__mips__)
Andreas Gampe525cde22014-04-22 15:44:50 -0700546 Thread* self = Thread::Current();
547
548 uint32_t orig[20];
549 uint32_t trg[20];
550 for (size_t i = 0; i < 20; ++i) {
551 orig[i] = i;
552 trg[i] = 0;
553 }
554
555 Invoke3(reinterpret_cast<size_t>(&trg[4]), reinterpret_cast<size_t>(&orig[4]),
Andreas Gampe29b38412014-08-13 00:15:43 -0700556 10 * sizeof(uint32_t), StubTest::GetEntrypoint(self, kQuickMemcpy), self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700557
558 EXPECT_EQ(orig[0], trg[0]);
559
560 for (size_t i = 1; i < 4; ++i) {
561 EXPECT_NE(orig[i], trg[i]);
562 }
563
564 for (size_t i = 4; i < 14; ++i) {
565 EXPECT_EQ(orig[i], trg[i]);
566 }
567
568 for (size_t i = 14; i < 20; ++i) {
569 EXPECT_NE(orig[i], trg[i]);
570 }
571
572 // TODO: Test overlapping?
573
574#else
575 LOG(INFO) << "Skipping memcpy as I don't know how to do that on " << kRuntimeISA;
576 // Force-print to std::cout so it's also outside the logcat.
577 std::cout << "Skipping memcpy as I don't know how to do that on " << kRuntimeISA << std::endl;
578#endif
579}
580
Andreas Gampe525cde22014-04-22 15:44:50 -0700581TEST_F(StubTest, LockObject) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200582#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
583 (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -0700584 static constexpr size_t kThinLockLoops = 100;
585
Andreas Gampe525cde22014-04-22 15:44:50 -0700586 Thread* self = Thread::Current();
Andreas Gampe29b38412014-08-13 00:15:43 -0700587
588 const uintptr_t art_quick_lock_object = StubTest::GetEntrypoint(self, kQuickLockObject);
589
Andreas Gampe525cde22014-04-22 15:44:50 -0700590 // Create an object
591 ScopedObjectAccess soa(self);
592 // garbage is created during ClassLinker::Init
593
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700594 StackHandleScope<2> hs(soa.Self());
595 Handle<mirror::String> obj(
596 hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), "hello, world!")));
Andreas Gampe525cde22014-04-22 15:44:50 -0700597 LockWord lock = obj->GetLockWord(false);
598 LockWord::LockState old_state = lock.GetState();
599 EXPECT_EQ(LockWord::LockState::kUnlocked, old_state);
600
Andreas Gampe29b38412014-08-13 00:15:43 -0700601 Invoke3(reinterpret_cast<size_t>(obj.Get()), 0U, 0U, art_quick_lock_object, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700602
603 LockWord lock_after = obj->GetLockWord(false);
604 LockWord::LockState new_state = lock_after.GetState();
605 EXPECT_EQ(LockWord::LockState::kThinLocked, new_state);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700606 EXPECT_EQ(lock_after.ThinLockCount(), 0U); // Thin lock starts count at zero
607
608 for (size_t i = 1; i < kThinLockLoops; ++i) {
Andreas Gampe29b38412014-08-13 00:15:43 -0700609 Invoke3(reinterpret_cast<size_t>(obj.Get()), 0U, 0U, art_quick_lock_object, self);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700610
611 // Check we're at lock count i
612
613 LockWord l_inc = obj->GetLockWord(false);
614 LockWord::LockState l_inc_state = l_inc.GetState();
615 EXPECT_EQ(LockWord::LockState::kThinLocked, l_inc_state);
616 EXPECT_EQ(l_inc.ThinLockCount(), i);
617 }
618
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700619 // Force a fat lock by running identity hashcode to fill up lock word.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700620 Handle<mirror::String> obj2(hs.NewHandle(
621 mirror::String::AllocFromModifiedUtf8(soa.Self(), "hello, world!")));
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700622
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700623 obj2->IdentityHashCode();
624
Andreas Gampe29b38412014-08-13 00:15:43 -0700625 Invoke3(reinterpret_cast<size_t>(obj2.Get()), 0U, 0U, art_quick_lock_object, self);
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700626
627 LockWord lock_after2 = obj2->GetLockWord(false);
628 LockWord::LockState new_state2 = lock_after2.GetState();
629 EXPECT_EQ(LockWord::LockState::kFatLocked, new_state2);
630 EXPECT_NE(lock_after2.FatLockMonitor(), static_cast<Monitor*>(nullptr));
631
632 // Test done.
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700633#else
634 LOG(INFO) << "Skipping lock_object as I don't know how to do that on " << kRuntimeISA;
635 // Force-print to std::cout so it's also outside the logcat.
636 std::cout << "Skipping lock_object as I don't know how to do that on " << kRuntimeISA << std::endl;
637#endif
638}
639
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700640
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700641class RandGen {
642 public:
643 explicit RandGen(uint32_t seed) : val_(seed) {}
644
645 uint32_t next() {
646 val_ = val_ * 48271 % 2147483647 + 13;
647 return val_;
648 }
649
650 uint32_t val_;
651};
652
653
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700654// NO_THREAD_SAFETY_ANALYSIS as we do not want to grab exclusive mutator lock for MonitorInfo.
655static void TestUnlockObject(StubTest* test) NO_THREAD_SAFETY_ANALYSIS {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200656#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
657 (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -0700658 static constexpr size_t kThinLockLoops = 100;
659
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700660 Thread* self = Thread::Current();
Andreas Gampe29b38412014-08-13 00:15:43 -0700661
662 const uintptr_t art_quick_lock_object = StubTest::GetEntrypoint(self, kQuickLockObject);
663 const uintptr_t art_quick_unlock_object = StubTest::GetEntrypoint(self, kQuickUnlockObject);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700664 // Create an object
665 ScopedObjectAccess soa(self);
666 // garbage is created during ClassLinker::Init
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700667 static constexpr size_t kNumberOfLocks = 10; // Number of objects = lock
668 StackHandleScope<kNumberOfLocks + 1> hs(self);
669 Handle<mirror::String> obj(
670 hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), "hello, world!")));
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700671 LockWord lock = obj->GetLockWord(false);
672 LockWord::LockState old_state = lock.GetState();
673 EXPECT_EQ(LockWord::LockState::kUnlocked, old_state);
674
Andreas Gampe29b38412014-08-13 00:15:43 -0700675 test->Invoke3(reinterpret_cast<size_t>(obj.Get()), 0U, 0U, art_quick_unlock_object, self);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700676 // This should be an illegal monitor state.
677 EXPECT_TRUE(self->IsExceptionPending());
678 self->ClearException();
679
680 LockWord lock_after = obj->GetLockWord(false);
681 LockWord::LockState new_state = lock_after.GetState();
682 EXPECT_EQ(LockWord::LockState::kUnlocked, new_state);
Andreas Gampe525cde22014-04-22 15:44:50 -0700683
Andreas Gampe29b38412014-08-13 00:15:43 -0700684 test->Invoke3(reinterpret_cast<size_t>(obj.Get()), 0U, 0U, art_quick_lock_object, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700685
686 LockWord lock_after2 = obj->GetLockWord(false);
687 LockWord::LockState new_state2 = lock_after2.GetState();
688 EXPECT_EQ(LockWord::LockState::kThinLocked, new_state2);
689
Andreas Gampe29b38412014-08-13 00:15:43 -0700690 test->Invoke3(reinterpret_cast<size_t>(obj.Get()), 0U, 0U, art_quick_unlock_object, self);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700691
692 LockWord lock_after3 = obj->GetLockWord(false);
693 LockWord::LockState new_state3 = lock_after3.GetState();
694 EXPECT_EQ(LockWord::LockState::kUnlocked, new_state3);
695
696 // Stress test:
697 // Keep a number of objects and their locks in flight. Randomly lock or unlock one of them in
698 // each step.
699
700 RandGen r(0x1234);
701
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700702 constexpr size_t kIterations = 10000; // Number of iterations
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700703 constexpr size_t kMoveToFat = 1000; // Chance of 1:kMoveFat to make a lock fat.
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700704
705 size_t counts[kNumberOfLocks];
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700706 bool fat[kNumberOfLocks]; // Whether a lock should be thin or fat.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700707 Handle<mirror::String> objects[kNumberOfLocks];
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700708
709 // Initialize = allocate.
710 for (size_t i = 0; i < kNumberOfLocks; ++i) {
711 counts[i] = 0;
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700712 fat[i] = false;
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700713 objects[i] = hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), ""));
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700714 }
715
716 for (size_t i = 0; i < kIterations; ++i) {
717 // Select which lock to update.
718 size_t index = r.next() % kNumberOfLocks;
719
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700720 // Make lock fat?
721 if (!fat[index] && (r.next() % kMoveToFat == 0)) {
722 fat[index] = true;
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700723 objects[index]->IdentityHashCode();
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700724
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700725 LockWord lock_iter = objects[index]->GetLockWord(false);
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700726 LockWord::LockState iter_state = lock_iter.GetState();
727 if (counts[index] == 0) {
728 EXPECT_EQ(LockWord::LockState::kHashCode, iter_state);
729 } else {
730 EXPECT_EQ(LockWord::LockState::kFatLocked, iter_state);
731 }
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700732 } else {
Andreas Gampe277ccbd2014-11-03 21:36:10 -0800733 bool take_lock; // Whether to lock or unlock in this step.
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700734 if (counts[index] == 0) {
Andreas Gampe277ccbd2014-11-03 21:36:10 -0800735 take_lock = true;
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700736 } else if (counts[index] == kThinLockLoops) {
Andreas Gampe277ccbd2014-11-03 21:36:10 -0800737 take_lock = false;
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700738 } else {
739 // Randomly.
Andreas Gampe277ccbd2014-11-03 21:36:10 -0800740 take_lock = r.next() % 2 == 0;
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700741 }
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700742
Andreas Gampe277ccbd2014-11-03 21:36:10 -0800743 if (take_lock) {
Andreas Gampe29b38412014-08-13 00:15:43 -0700744 test->Invoke3(reinterpret_cast<size_t>(objects[index].Get()), 0U, 0U, art_quick_lock_object,
745 self);
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700746 counts[index]++;
747 } else {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700748 test->Invoke3(reinterpret_cast<size_t>(objects[index].Get()), 0U, 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -0700749 art_quick_unlock_object, self);
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700750 counts[index]--;
751 }
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700752
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700753 EXPECT_FALSE(self->IsExceptionPending());
754
755 // Check the new state.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700756 LockWord lock_iter = objects[index]->GetLockWord(true);
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700757 LockWord::LockState iter_state = lock_iter.GetState();
758 if (fat[index]) {
759 // Abuse MonitorInfo.
760 EXPECT_EQ(LockWord::LockState::kFatLocked, iter_state) << index;
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700761 MonitorInfo info(objects[index].Get());
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700762 EXPECT_EQ(counts[index], info.entry_count_) << index;
763 } else {
764 if (counts[index] > 0) {
765 EXPECT_EQ(LockWord::LockState::kThinLocked, iter_state);
766 EXPECT_EQ(counts[index] - 1, lock_iter.ThinLockCount());
767 } else {
768 EXPECT_EQ(LockWord::LockState::kUnlocked, iter_state);
769 }
770 }
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700771 }
772 }
773
774 // Unlock the remaining count times and then check it's unlocked. Then deallocate.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700775 // Go reverse order to correctly handle Handles.
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700776 for (size_t i = 0; i < kNumberOfLocks; ++i) {
777 size_t index = kNumberOfLocks - 1 - i;
778 size_t count = counts[index];
779 while (count > 0) {
Andreas Gampe29b38412014-08-13 00:15:43 -0700780 test->Invoke3(reinterpret_cast<size_t>(objects[index].Get()), 0U, 0U, art_quick_unlock_object,
781 self);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700782 count--;
783 }
784
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700785 LockWord lock_after4 = objects[index]->GetLockWord(false);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700786 LockWord::LockState new_state4 = lock_after4.GetState();
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700787 EXPECT_TRUE(LockWord::LockState::kUnlocked == new_state4
788 || LockWord::LockState::kFatLocked == new_state4);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700789 }
790
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700791 // Test done.
Andreas Gampe525cde22014-04-22 15:44:50 -0700792#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -0800793 UNUSED(test);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700794 LOG(INFO) << "Skipping unlock_object as I don't know how to do that on " << kRuntimeISA;
Andreas Gampe525cde22014-04-22 15:44:50 -0700795 // Force-print to std::cout so it's also outside the logcat.
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700796 std::cout << "Skipping unlock_object as I don't know how to do that on " << kRuntimeISA << std::endl;
Andreas Gampe525cde22014-04-22 15:44:50 -0700797#endif
798}
799
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700800TEST_F(StubTest, UnlockObject) {
Andreas Gampe369810a2015-01-14 19:53:31 -0800801 // This will lead to monitor error messages in the log.
802 ScopedLogSeverity sls(LogSeverity::FATAL);
803
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700804 TestUnlockObject(this);
805}
Andreas Gampe525cde22014-04-22 15:44:50 -0700806
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200807#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
808 (defined(__x86_64__) && !defined(__APPLE__))
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800809extern "C" void art_quick_check_instance_of(void);
Andreas Gampe525cde22014-04-22 15:44:50 -0700810#endif
811
812TEST_F(StubTest, CheckCast) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200813#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
814 (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe525cde22014-04-22 15:44:50 -0700815 Thread* self = Thread::Current();
Andreas Gampe29b38412014-08-13 00:15:43 -0700816
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800817 const uintptr_t art_quick_check_instance_of =
818 StubTest::GetEntrypoint(self, kQuickCheckInstanceOf);
Andreas Gampe29b38412014-08-13 00:15:43 -0700819
Andreas Gampe525cde22014-04-22 15:44:50 -0700820 // Find some classes.
821 ScopedObjectAccess soa(self);
822 // garbage is created during ClassLinker::Init
823
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800824 VariableSizedHandleScope hs(soa.Self());
825 Handle<mirror::Class> klass_obj(
826 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "Ljava/lang/Object;")));
827 Handle<mirror::Class> klass_str(
828 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "Ljava/lang/String;")));
829 Handle<mirror::Class> klass_list(
830 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "Ljava/util/List;")));
831 Handle<mirror::Class> klass_cloneable(
832 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "Ljava/lang/Cloneable;")));
833 Handle<mirror::Class> klass_array_list(
834 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "Ljava/util/ArrayList;")));
835 Handle<mirror::Object> obj(hs.NewHandle(klass_obj->AllocObject(soa.Self())));
836 Handle<mirror::String> string(hs.NewHandle(
837 mirror::String::AllocFromModifiedUtf8(soa.Self(), "ABCD")));
838 Handle<mirror::Object> array_list(hs.NewHandle(klass_array_list->AllocObject(soa.Self())));
Andreas Gampe525cde22014-04-22 15:44:50 -0700839
840 EXPECT_FALSE(self->IsExceptionPending());
841
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800842 Invoke3(reinterpret_cast<size_t>(obj.Get()),
843 reinterpret_cast<size_t>(klass_obj.Get()),
Mathieu Chartier2ecfd272016-11-01 10:10:05 -0700844 0U,
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800845 art_quick_check_instance_of,
Mathieu Chartier2ecfd272016-11-01 10:10:05 -0700846 self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700847 EXPECT_FALSE(self->IsExceptionPending());
848
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800849 // Expected true: Test string instance of java.lang.String.
850 Invoke3(reinterpret_cast<size_t>(string.Get()),
851 reinterpret_cast<size_t>(klass_str.Get()),
Mathieu Chartier2ecfd272016-11-01 10:10:05 -0700852 0U,
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800853 art_quick_check_instance_of,
Mathieu Chartier2ecfd272016-11-01 10:10:05 -0700854 self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700855 EXPECT_FALSE(self->IsExceptionPending());
856
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800857 // Expected true: Test string instance of java.lang.Object.
858 Invoke3(reinterpret_cast<size_t>(string.Get()),
859 reinterpret_cast<size_t>(klass_obj.Get()),
Mathieu Chartier2ecfd272016-11-01 10:10:05 -0700860 0U,
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800861 art_quick_check_instance_of,
Mathieu Chartier2ecfd272016-11-01 10:10:05 -0700862 self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700863 EXPECT_FALSE(self->IsExceptionPending());
864
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800865 // Expected false: Test object instance of java.lang.String.
866 Invoke3(reinterpret_cast<size_t>(obj.Get()),
867 reinterpret_cast<size_t>(klass_str.Get()),
Mathieu Chartier2ecfd272016-11-01 10:10:05 -0700868 0U,
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800869 art_quick_check_instance_of,
Mathieu Chartier2ecfd272016-11-01 10:10:05 -0700870 self);
871 EXPECT_TRUE(self->IsExceptionPending());
872 self->ClearException();
873
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800874 Invoke3(reinterpret_cast<size_t>(array_list.Get()),
875 reinterpret_cast<size_t>(klass_list.Get()),
Mathieu Chartier2ecfd272016-11-01 10:10:05 -0700876 0U,
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800877 art_quick_check_instance_of,
878 self);
879 EXPECT_FALSE(self->IsExceptionPending());
880
881 Invoke3(reinterpret_cast<size_t>(array_list.Get()),
882 reinterpret_cast<size_t>(klass_cloneable.Get()),
883 0U,
884 art_quick_check_instance_of,
885 self);
886 EXPECT_FALSE(self->IsExceptionPending());
887
888 Invoke3(reinterpret_cast<size_t>(string.Get()),
889 reinterpret_cast<size_t>(klass_array_list.Get()),
890 0U,
891 art_quick_check_instance_of,
892 self);
893 EXPECT_TRUE(self->IsExceptionPending());
894 self->ClearException();
895
896 Invoke3(reinterpret_cast<size_t>(string.Get()),
897 reinterpret_cast<size_t>(klass_cloneable.Get()),
898 0U,
899 art_quick_check_instance_of,
Mathieu Chartier2ecfd272016-11-01 10:10:05 -0700900 self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700901 EXPECT_TRUE(self->IsExceptionPending());
902 self->ClearException();
903
904#else
905 LOG(INFO) << "Skipping check_cast as I don't know how to do that on " << kRuntimeISA;
906 // Force-print to std::cout so it's also outside the logcat.
907 std::cout << "Skipping check_cast as I don't know how to do that on " << kRuntimeISA << std::endl;
908#endif
909}
910
911
Andreas Gampe525cde22014-04-22 15:44:50 -0700912TEST_F(StubTest, APutObj) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200913#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
914 (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe525cde22014-04-22 15:44:50 -0700915 Thread* self = Thread::Current();
Andreas Gampe29b38412014-08-13 00:15:43 -0700916
917 // Do not check non-checked ones, we'd need handlers and stuff...
918 const uintptr_t art_quick_aput_obj_with_null_and_bound_check =
919 StubTest::GetEntrypoint(self, kQuickAputObjectWithNullAndBoundCheck);
920
Andreas Gampe525cde22014-04-22 15:44:50 -0700921 // Create an object
922 ScopedObjectAccess soa(self);
923 // garbage is created during ClassLinker::Init
924
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700925 StackHandleScope<5> hs(soa.Self());
926 Handle<mirror::Class> c(
927 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "Ljava/lang/Object;")));
928 Handle<mirror::Class> ca(
929 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "[Ljava/lang/String;")));
Andreas Gampe525cde22014-04-22 15:44:50 -0700930
931 // Build a string array of size 1
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700932 Handle<mirror::ObjectArray<mirror::Object>> array(
933 hs.NewHandle(mirror::ObjectArray<mirror::Object>::Alloc(soa.Self(), ca.Get(), 10)));
Andreas Gampe525cde22014-04-22 15:44:50 -0700934
935 // Build a string -> should be assignable
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700936 Handle<mirror::String> str_obj(
937 hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), "hello, world!")));
Andreas Gampe525cde22014-04-22 15:44:50 -0700938
939 // Build a generic object -> should fail assigning
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700940 Handle<mirror::Object> obj_obj(hs.NewHandle(c->AllocObject(soa.Self())));
Andreas Gampe525cde22014-04-22 15:44:50 -0700941
942 // Play with it...
943
944 // 1) Success cases
Andreas Gampef4e910b2014-04-29 16:55:52 -0700945 // 1.1) Assign str_obj to array[0..3]
Andreas Gampe525cde22014-04-22 15:44:50 -0700946
947 EXPECT_FALSE(self->IsExceptionPending());
948
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700949 Invoke3(reinterpret_cast<size_t>(array.Get()), 0U, reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampe29b38412014-08-13 00:15:43 -0700950 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700951
952 EXPECT_FALSE(self->IsExceptionPending());
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700953 EXPECT_EQ(str_obj.Get(), array->Get(0));
Andreas Gampe525cde22014-04-22 15:44:50 -0700954
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700955 Invoke3(reinterpret_cast<size_t>(array.Get()), 1U, reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampe29b38412014-08-13 00:15:43 -0700956 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampef4e910b2014-04-29 16:55:52 -0700957
958 EXPECT_FALSE(self->IsExceptionPending());
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700959 EXPECT_EQ(str_obj.Get(), array->Get(1));
Andreas Gampef4e910b2014-04-29 16:55:52 -0700960
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700961 Invoke3(reinterpret_cast<size_t>(array.Get()), 2U, reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampe29b38412014-08-13 00:15:43 -0700962 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampef4e910b2014-04-29 16:55:52 -0700963
964 EXPECT_FALSE(self->IsExceptionPending());
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700965 EXPECT_EQ(str_obj.Get(), array->Get(2));
Andreas Gampef4e910b2014-04-29 16:55:52 -0700966
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700967 Invoke3(reinterpret_cast<size_t>(array.Get()), 3U, reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampe29b38412014-08-13 00:15:43 -0700968 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampef4e910b2014-04-29 16:55:52 -0700969
970 EXPECT_FALSE(self->IsExceptionPending());
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700971 EXPECT_EQ(str_obj.Get(), array->Get(3));
Andreas Gampef4e910b2014-04-29 16:55:52 -0700972
973 // 1.2) Assign null to array[0..3]
Andreas Gampe525cde22014-04-22 15:44:50 -0700974
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700975 Invoke3(reinterpret_cast<size_t>(array.Get()), 0U, reinterpret_cast<size_t>(nullptr),
Andreas Gampe29b38412014-08-13 00:15:43 -0700976 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700977
978 EXPECT_FALSE(self->IsExceptionPending());
Andreas Gampef4e910b2014-04-29 16:55:52 -0700979 EXPECT_EQ(nullptr, array->Get(0));
980
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700981 Invoke3(reinterpret_cast<size_t>(array.Get()), 1U, reinterpret_cast<size_t>(nullptr),
Andreas Gampe29b38412014-08-13 00:15:43 -0700982 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampef4e910b2014-04-29 16:55:52 -0700983
984 EXPECT_FALSE(self->IsExceptionPending());
985 EXPECT_EQ(nullptr, array->Get(1));
986
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700987 Invoke3(reinterpret_cast<size_t>(array.Get()), 2U, reinterpret_cast<size_t>(nullptr),
Andreas Gampe29b38412014-08-13 00:15:43 -0700988 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampef4e910b2014-04-29 16:55:52 -0700989
990 EXPECT_FALSE(self->IsExceptionPending());
991 EXPECT_EQ(nullptr, array->Get(2));
992
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700993 Invoke3(reinterpret_cast<size_t>(array.Get()), 3U, reinterpret_cast<size_t>(nullptr),
Andreas Gampe29b38412014-08-13 00:15:43 -0700994 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampef4e910b2014-04-29 16:55:52 -0700995
996 EXPECT_FALSE(self->IsExceptionPending());
997 EXPECT_EQ(nullptr, array->Get(3));
Andreas Gampe525cde22014-04-22 15:44:50 -0700998
999 // TODO: Check _which_ exception is thrown. Then make 3) check that it's the right check order.
1000
1001 // 2) Failure cases (str into str[])
1002 // 2.1) Array = null
1003 // TODO: Throwing NPE needs actual DEX code
1004
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001005// Invoke3(reinterpret_cast<size_t>(nullptr), 0U, reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampe525cde22014-04-22 15:44:50 -07001006// reinterpret_cast<uintptr_t>(&art_quick_aput_obj_with_null_and_bound_check), self);
1007//
1008// EXPECT_TRUE(self->IsExceptionPending());
1009// self->ClearException();
1010
1011 // 2.2) Index < 0
1012
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001013 Invoke3(reinterpret_cast<size_t>(array.Get()), static_cast<size_t>(-1),
1014 reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampe29b38412014-08-13 00:15:43 -07001015 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampe525cde22014-04-22 15:44:50 -07001016
1017 EXPECT_TRUE(self->IsExceptionPending());
1018 self->ClearException();
1019
1020 // 2.3) Index > 0
1021
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001022 Invoke3(reinterpret_cast<size_t>(array.Get()), 10U, reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampe29b38412014-08-13 00:15:43 -07001023 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampe525cde22014-04-22 15:44:50 -07001024
1025 EXPECT_TRUE(self->IsExceptionPending());
1026 self->ClearException();
1027
1028 // 3) Failure cases (obj into str[])
1029
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001030 Invoke3(reinterpret_cast<size_t>(array.Get()), 0U, reinterpret_cast<size_t>(obj_obj.Get()),
Andreas Gampe29b38412014-08-13 00:15:43 -07001031 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampe525cde22014-04-22 15:44:50 -07001032
1033 EXPECT_TRUE(self->IsExceptionPending());
1034 self->ClearException();
1035
1036 // Tests done.
1037#else
1038 LOG(INFO) << "Skipping aput_obj as I don't know how to do that on " << kRuntimeISA;
1039 // Force-print to std::cout so it's also outside the logcat.
1040 std::cout << "Skipping aput_obj as I don't know how to do that on " << kRuntimeISA << std::endl;
1041#endif
1042}
1043
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001044TEST_F(StubTest, AllocObject) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001045#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1046 (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe369810a2015-01-14 19:53:31 -08001047 // This will lead to OOM error messages in the log.
1048 ScopedLogSeverity sls(LogSeverity::FATAL);
1049
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001050 // TODO: Check the "Unresolved" allocation stubs
1051
1052 Thread* self = Thread::Current();
1053 // Create an object
1054 ScopedObjectAccess soa(self);
1055 // garbage is created during ClassLinker::Init
1056
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001057 StackHandleScope<2> hs(soa.Self());
1058 Handle<mirror::Class> c(
1059 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "Ljava/lang/Object;")));
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001060
1061 // Play with it...
1062
1063 EXPECT_FALSE(self->IsExceptionPending());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001064 {
Nicolas Geoffray0d3998b2017-01-12 15:35:12 +00001065 size_t result = Invoke3(reinterpret_cast<size_t>(c.Get()), 0u, 0U,
1066 StubTest::GetEntrypoint(self, kQuickAllocObjectWithChecks),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001067 self);
1068
1069 EXPECT_FALSE(self->IsExceptionPending());
1070 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
1071 mirror::Object* obj = reinterpret_cast<mirror::Object*>(result);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001072 EXPECT_EQ(c.Get(), obj->GetClass());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001073 VerifyObject(obj);
1074 }
1075
1076 {
Mathieu Chartiere401d142015-04-22 13:56:20 -07001077 size_t result = Invoke3(reinterpret_cast<size_t>(c.Get()), 0u, 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001078 StubTest::GetEntrypoint(self, kQuickAllocObjectResolved),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001079 self);
1080
1081 EXPECT_FALSE(self->IsExceptionPending());
1082 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
1083 mirror::Object* obj = reinterpret_cast<mirror::Object*>(result);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001084 EXPECT_EQ(c.Get(), obj->GetClass());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001085 VerifyObject(obj);
1086 }
1087
1088 {
Mathieu Chartiere401d142015-04-22 13:56:20 -07001089 size_t result = Invoke3(reinterpret_cast<size_t>(c.Get()), 0u, 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001090 StubTest::GetEntrypoint(self, kQuickAllocObjectInitialized),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001091 self);
1092
1093 EXPECT_FALSE(self->IsExceptionPending());
1094 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
1095 mirror::Object* obj = reinterpret_cast<mirror::Object*>(result);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001096 EXPECT_EQ(c.Get(), obj->GetClass());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001097 VerifyObject(obj);
1098 }
1099
1100 // Failure tests.
1101
1102 // Out-of-memory.
1103 {
1104 Runtime::Current()->GetHeap()->SetIdealFootprint(1 * GB);
1105
1106 // Array helps to fill memory faster.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001107 Handle<mirror::Class> ca(
1108 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "[Ljava/lang/Object;")));
1109
1110 // Use arbitrary large amount for now.
1111 static const size_t kMaxHandles = 1000000;
Ian Rogers700a4022014-05-19 16:49:03 -07001112 std::unique_ptr<StackHandleScope<kMaxHandles>> hsp(new StackHandleScope<kMaxHandles>(self));
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001113
1114 std::vector<Handle<mirror::Object>> handles;
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001115 // Start allocating with 128K
1116 size_t length = 128 * KB / 4;
1117 while (length > 10) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001118 Handle<mirror::Object> h(hsp->NewHandle<mirror::Object>(
1119 mirror::ObjectArray<mirror::Object>::Alloc(soa.Self(), ca.Get(), length / 4)));
1120 if (self->IsExceptionPending() || h.Get() == nullptr) {
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001121 self->ClearException();
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001122
1123 // Try a smaller length
1124 length = length / 8;
1125 // Use at most half the reported free space.
1126 size_t mem = Runtime::Current()->GetHeap()->GetFreeMemory();
1127 if (length * 8 > mem) {
1128 length = mem / 8;
1129 }
1130 } else {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001131 handles.push_back(h);
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001132 }
1133 }
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001134 LOG(INFO) << "Used " << handles.size() << " arrays to fill space.";
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001135
1136 // Allocate simple objects till it fails.
1137 while (!self->IsExceptionPending()) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001138 Handle<mirror::Object> h = hsp->NewHandle(c->AllocObject(soa.Self()));
1139 if (!self->IsExceptionPending() && h.Get() != nullptr) {
1140 handles.push_back(h);
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001141 }
1142 }
1143 self->ClearException();
1144
Mathieu Chartiere401d142015-04-22 13:56:20 -07001145 size_t result = Invoke3(reinterpret_cast<size_t>(c.Get()), 0u, 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001146 StubTest::GetEntrypoint(self, kQuickAllocObjectInitialized),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001147 self);
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001148 EXPECT_TRUE(self->IsExceptionPending());
1149 self->ClearException();
1150 EXPECT_EQ(reinterpret_cast<size_t>(nullptr), result);
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001151 }
1152
1153 // Tests done.
1154#else
1155 LOG(INFO) << "Skipping alloc_object as I don't know how to do that on " << kRuntimeISA;
1156 // Force-print to std::cout so it's also outside the logcat.
1157 std::cout << "Skipping alloc_object as I don't know how to do that on " << kRuntimeISA << std::endl;
1158#endif
1159}
1160
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001161TEST_F(StubTest, AllocObjectArray) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001162#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1163 (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001164 // TODO: Check the "Unresolved" allocation stubs
1165
Andreas Gampe369810a2015-01-14 19:53:31 -08001166 // This will lead to OOM error messages in the log.
1167 ScopedLogSeverity sls(LogSeverity::FATAL);
1168
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001169 Thread* self = Thread::Current();
1170 // Create an object
1171 ScopedObjectAccess soa(self);
1172 // garbage is created during ClassLinker::Init
1173
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001174 StackHandleScope<2> hs(self);
1175 Handle<mirror::Class> c(
1176 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "[Ljava/lang/Object;")));
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001177
1178 // Needed to have a linked method.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001179 Handle<mirror::Class> c_obj(
1180 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "Ljava/lang/Object;")));
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001181
1182 // Play with it...
1183
1184 EXPECT_FALSE(self->IsExceptionPending());
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001185
1186 // For some reason this does not work, as the type_idx is artificial and outside what the
1187 // resolved types of c_obj allow...
1188
Ian Rogerscf7f1912014-10-22 22:06:39 -07001189 if ((false)) {
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001190 // Use an arbitrary method from c to use as referrer
Andreas Gampe542451c2016-07-26 09:02:02 -07001191 size_t result = Invoke3(
Andreas Gampea5b09a62016-11-17 15:21:22 -08001192 static_cast<size_t>(c->GetDexTypeIndex().index_), // type_idx
Andreas Gampe542451c2016-07-26 09:02:02 -07001193 10U,
1194 // arbitrary
1195 reinterpret_cast<size_t>(c_obj->GetVirtualMethod(0, kRuntimePointerSize)),
1196 StubTest::GetEntrypoint(self, kQuickAllocArray),
1197 self);
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001198
1199 EXPECT_FALSE(self->IsExceptionPending());
1200 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
1201 mirror::Array* obj = reinterpret_cast<mirror::Array*>(result);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001202 EXPECT_EQ(c.Get(), obj->GetClass());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001203 VerifyObject(obj);
1204 EXPECT_EQ(obj->GetLength(), 10);
1205 }
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001206
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001207 {
Mathieu Chartier2cebb242015-04-21 16:50:40 -07001208 // We can use null in the second argument as we do not need a method here (not used in
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001209 // resolved/initialized cases)
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08001210 size_t result = Invoke3(reinterpret_cast<size_t>(c.Get()), 10U,
1211 reinterpret_cast<size_t>(nullptr),
Andreas Gampe29b38412014-08-13 00:15:43 -07001212 StubTest::GetEntrypoint(self, kQuickAllocArrayResolved),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001213 self);
David Sehr709b0702016-10-13 09:12:37 -07001214 EXPECT_FALSE(self->IsExceptionPending()) << mirror::Object::PrettyTypeOf(self->GetException());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001215 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
1216 mirror::Object* obj = reinterpret_cast<mirror::Object*>(result);
1217 EXPECT_TRUE(obj->IsArrayInstance());
1218 EXPECT_TRUE(obj->IsObjectArray());
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001219 EXPECT_EQ(c.Get(), obj->GetClass());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001220 VerifyObject(obj);
1221 mirror::Array* array = reinterpret_cast<mirror::Array*>(result);
1222 EXPECT_EQ(array->GetLength(), 10);
1223 }
1224
1225 // Failure tests.
1226
1227 // Out-of-memory.
1228 {
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08001229 size_t result = Invoke3(reinterpret_cast<size_t>(c.Get()),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001230 GB, // that should fail...
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08001231 reinterpret_cast<size_t>(nullptr),
Andreas Gampe29b38412014-08-13 00:15:43 -07001232 StubTest::GetEntrypoint(self, kQuickAllocArrayResolved),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001233 self);
1234
1235 EXPECT_TRUE(self->IsExceptionPending());
1236 self->ClearException();
1237 EXPECT_EQ(reinterpret_cast<size_t>(nullptr), result);
1238 }
1239
1240 // Tests done.
1241#else
1242 LOG(INFO) << "Skipping alloc_array as I don't know how to do that on " << kRuntimeISA;
1243 // Force-print to std::cout so it's also outside the logcat.
1244 std::cout << "Skipping alloc_array as I don't know how to do that on " << kRuntimeISA << std::endl;
1245#endif
1246}
1247
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001248
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001249TEST_F(StubTest, StringCompareTo) {
jessicahandojo3aaa37b2016-07-29 14:46:37 -07001250 TEST_DISABLED_FOR_STRING_COMPRESSION();
Scott Wakelingc25cbf12016-04-18 09:00:11 +01001251 // There is no StringCompareTo runtime entrypoint for __arm__ or __aarch64__.
1252#if defined(__i386__) || defined(__mips__) || \
1253 (defined(__x86_64__) && !defined(__APPLE__))
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001254 // TODO: Check the "Unresolved" allocation stubs
1255
1256 Thread* self = Thread::Current();
Andreas Gampe29b38412014-08-13 00:15:43 -07001257
1258 const uintptr_t art_quick_string_compareto = StubTest::GetEntrypoint(self, kQuickStringCompareTo);
1259
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001260 ScopedObjectAccess soa(self);
1261 // garbage is created during ClassLinker::Init
1262
1263 // Create some strings
1264 // Use array so we can index into it and use a matrix for expected results
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -07001265 // Setup: The first half is standard. The second half uses a non-zero offset.
1266 // TODO: Shared backing arrays.
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001267 const char* c[] = { "", "", "a", "aa", "ab",
Serban Constantinescu86797a72014-06-19 16:17:56 +01001268 "aacaacaacaacaacaac", // This one's under the default limit to go to __memcmp16.
1269 "aacaacaacaacaacaacaacaacaacaacaacaac", // This one's over.
1270 "aacaacaacaacaacaacaacaacaacaacaacaaca" }; // As is this one. We need a separate one to
1271 // defeat object-equal optimizations.
Jeff Hao848f70a2014-01-15 13:49:50 -08001272 static constexpr size_t kStringCount = arraysize(c);
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001273
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001274 StackHandleScope<kStringCount> hs(self);
1275 Handle<mirror::String> s[kStringCount];
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001276
Jeff Hao848f70a2014-01-15 13:49:50 -08001277 for (size_t i = 0; i < kStringCount; ++i) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001278 s[i] = hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), c[i]));
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001279 }
1280
1281 // TODO: wide characters
1282
1283 // Matrix of expectations. First component is first parameter. Note we only check against the
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -07001284 // sign, not the value. As we are testing random offsets, we need to compute this and need to
1285 // rely on String::CompareTo being correct.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001286 int32_t expected[kStringCount][kStringCount];
1287 for (size_t x = 0; x < kStringCount; ++x) {
1288 for (size_t y = 0; y < kStringCount; ++y) {
1289 expected[x][y] = s[x]->CompareTo(s[y].Get());
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -07001290 }
1291 }
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001292
1293 // Play with it...
1294
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001295 for (size_t x = 0; x < kStringCount; ++x) {
1296 for (size_t y = 0; y < kStringCount; ++y) {
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001297 // Test string_compareto x y
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001298 size_t result = Invoke3(reinterpret_cast<size_t>(s[x].Get()),
1299 reinterpret_cast<size_t>(s[y].Get()), 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001300 art_quick_string_compareto, self);
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001301
1302 EXPECT_FALSE(self->IsExceptionPending());
1303
1304 // The result is a 32b signed integer
1305 union {
1306 size_t r;
1307 int32_t i;
1308 } conv;
1309 conv.r = result;
1310 int32_t e = expected[x][y];
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -07001311 EXPECT_TRUE(e == 0 ? conv.i == 0 : true) << "x=" << c[x] << " y=" << c[y] << " res=" <<
1312 conv.r;
1313 EXPECT_TRUE(e < 0 ? conv.i < 0 : true) << "x=" << c[x] << " y=" << c[y] << " res=" <<
1314 conv.r;
1315 EXPECT_TRUE(e > 0 ? conv.i > 0 : true) << "x=" << c[x] << " y=" << c[y] << " res=" <<
1316 conv.r;
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001317 }
1318 }
1319
Andreas Gampe7177d7c2014-05-02 12:10:02 -07001320 // TODO: Deallocate things.
1321
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001322 // Tests done.
1323#else
1324 LOG(INFO) << "Skipping string_compareto as I don't know how to do that on " << kRuntimeISA;
1325 // Force-print to std::cout so it's also outside the logcat.
1326 std::cout << "Skipping string_compareto as I don't know how to do that on " << kRuntimeISA <<
1327 std::endl;
1328#endif
1329}
1330
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001331
Mathieu Chartierc7853442015-03-27 14:35:38 -07001332static void GetSetBooleanStatic(ArtField* f, Thread* self,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001333 ArtMethod* referrer, StubTest* test)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07001334 REQUIRES_SHARED(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001335#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1336 (defined(__x86_64__) && !defined(__APPLE__))
Fred Shih37f05ef2014-07-16 18:38:08 -07001337 constexpr size_t num_values = 5;
1338 uint8_t values[num_values] = { 0, 1, 2, 128, 0xFF };
1339
1340 for (size_t i = 0; i < num_values; ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001341 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001342 static_cast<size_t>(values[i]),
1343 0U,
1344 StubTest::GetEntrypoint(self, kQuickSet8Static),
1345 self,
1346 referrer);
1347
Mathieu Chartierc7853442015-03-27 14:35:38 -07001348 size_t res = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001349 0U, 0U,
1350 StubTest::GetEntrypoint(self, kQuickGetBooleanStatic),
1351 self,
1352 referrer);
1353 // Boolean currently stores bools as uint8_t, be more zealous about asserting correct writes/gets.
1354 EXPECT_EQ(values[i], static_cast<uint8_t>(res)) << "Iteration " << i;
1355 }
1356#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001357 UNUSED(f, self, referrer, test);
Fred Shih37f05ef2014-07-16 18:38:08 -07001358 LOG(INFO) << "Skipping set_boolean_static as I don't know how to do that on " << kRuntimeISA;
1359 // Force-print to std::cout so it's also outside the logcat.
1360 std::cout << "Skipping set_boolean_static as I don't know how to do that on " << kRuntimeISA << std::endl;
1361#endif
1362}
Mathieu Chartiere401d142015-04-22 13:56:20 -07001363static void GetSetByteStatic(ArtField* f, Thread* self, ArtMethod* referrer,
Mathieu Chartierc7853442015-03-27 14:35:38 -07001364 StubTest* test)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07001365 REQUIRES_SHARED(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001366#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1367 (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001368 int8_t values[] = { -128, -64, 0, 64, 127 };
Fred Shih37f05ef2014-07-16 18:38:08 -07001369
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001370 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001371 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001372 static_cast<size_t>(values[i]),
1373 0U,
1374 StubTest::GetEntrypoint(self, kQuickSet8Static),
1375 self,
1376 referrer);
1377
Mathieu Chartierc7853442015-03-27 14:35:38 -07001378 size_t res = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001379 0U, 0U,
1380 StubTest::GetEntrypoint(self, kQuickGetByteStatic),
1381 self,
1382 referrer);
1383 EXPECT_EQ(values[i], static_cast<int8_t>(res)) << "Iteration " << i;
1384 }
1385#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001386 UNUSED(f, self, referrer, test);
Fred Shih37f05ef2014-07-16 18:38:08 -07001387 LOG(INFO) << "Skipping set_byte_static as I don't know how to do that on " << kRuntimeISA;
1388 // Force-print to std::cout so it's also outside the logcat.
1389 std::cout << "Skipping set_byte_static as I don't know how to do that on " << kRuntimeISA << std::endl;
1390#endif
1391}
1392
1393
Mathieu Chartierc7853442015-03-27 14:35:38 -07001394static void GetSetBooleanInstance(Handle<mirror::Object>* obj, ArtField* f, Thread* self,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001395 ArtMethod* referrer, StubTest* test)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07001396 REQUIRES_SHARED(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001397#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1398 (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001399 uint8_t values[] = { 0, true, 2, 128, 0xFF };
Fred Shih37f05ef2014-07-16 18:38:08 -07001400
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001401 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001402 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001403 reinterpret_cast<size_t>(obj->Get()),
1404 static_cast<size_t>(values[i]),
1405 StubTest::GetEntrypoint(self, kQuickSet8Instance),
1406 self,
1407 referrer);
1408
Mathieu Chartierc7853442015-03-27 14:35:38 -07001409 uint8_t res = f->GetBoolean(obj->Get());
Fred Shih37f05ef2014-07-16 18:38:08 -07001410 EXPECT_EQ(values[i], res) << "Iteration " << i;
1411
Mathieu Chartierc7853442015-03-27 14:35:38 -07001412 f->SetBoolean<false>(obj->Get(), res);
Fred Shih37f05ef2014-07-16 18:38:08 -07001413
Mathieu Chartierc7853442015-03-27 14:35:38 -07001414 size_t res2 = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001415 reinterpret_cast<size_t>(obj->Get()),
1416 0U,
1417 StubTest::GetEntrypoint(self, kQuickGetBooleanInstance),
1418 self,
1419 referrer);
1420 EXPECT_EQ(res, static_cast<uint8_t>(res2));
1421 }
1422#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001423 UNUSED(obj, f, self, referrer, test);
Fred Shih37f05ef2014-07-16 18:38:08 -07001424 LOG(INFO) << "Skipping set_boolean_instance as I don't know how to do that on " << kRuntimeISA;
1425 // Force-print to std::cout so it's also outside the logcat.
1426 std::cout << "Skipping set_boolean_instance as I don't know how to do that on " << kRuntimeISA << std::endl;
1427#endif
1428}
Mathieu Chartierc7853442015-03-27 14:35:38 -07001429static void GetSetByteInstance(Handle<mirror::Object>* obj, ArtField* f,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001430 Thread* self, ArtMethod* referrer, StubTest* test)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07001431 REQUIRES_SHARED(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001432#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1433 (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001434 int8_t values[] = { -128, -64, 0, 64, 127 };
Fred Shih37f05ef2014-07-16 18:38:08 -07001435
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001436 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001437 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001438 reinterpret_cast<size_t>(obj->Get()),
1439 static_cast<size_t>(values[i]),
1440 StubTest::GetEntrypoint(self, kQuickSet8Instance),
1441 self,
1442 referrer);
1443
Mathieu Chartierc7853442015-03-27 14:35:38 -07001444 int8_t res = f->GetByte(obj->Get());
Fred Shih37f05ef2014-07-16 18:38:08 -07001445 EXPECT_EQ(res, values[i]) << "Iteration " << i;
Mathieu Chartierc7853442015-03-27 14:35:38 -07001446 f->SetByte<false>(obj->Get(), ++res);
Fred Shih37f05ef2014-07-16 18:38:08 -07001447
Mathieu Chartierc7853442015-03-27 14:35:38 -07001448 size_t res2 = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001449 reinterpret_cast<size_t>(obj->Get()),
1450 0U,
1451 StubTest::GetEntrypoint(self, kQuickGetByteInstance),
1452 self,
1453 referrer);
1454 EXPECT_EQ(res, static_cast<int8_t>(res2));
1455 }
1456#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001457 UNUSED(obj, f, self, referrer, test);
Fred Shih37f05ef2014-07-16 18:38:08 -07001458 LOG(INFO) << "Skipping set_byte_instance as I don't know how to do that on " << kRuntimeISA;
1459 // Force-print to std::cout so it's also outside the logcat.
1460 std::cout << "Skipping set_byte_instance as I don't know how to do that on " << kRuntimeISA << std::endl;
1461#endif
1462}
1463
Mathieu Chartiere401d142015-04-22 13:56:20 -07001464static void GetSetCharStatic(ArtField* f, Thread* self, ArtMethod* referrer,
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001465 StubTest* test)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07001466 REQUIRES_SHARED(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001467#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1468 (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001469 uint16_t values[] = { 0, 1, 2, 255, 32768, 0xFFFF };
Fred Shih37f05ef2014-07-16 18:38:08 -07001470
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001471 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001472 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001473 static_cast<size_t>(values[i]),
1474 0U,
1475 StubTest::GetEntrypoint(self, kQuickSet16Static),
1476 self,
1477 referrer);
1478
Mathieu Chartierc7853442015-03-27 14:35:38 -07001479 size_t res = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001480 0U, 0U,
1481 StubTest::GetEntrypoint(self, kQuickGetCharStatic),
1482 self,
1483 referrer);
1484
1485 EXPECT_EQ(values[i], static_cast<uint16_t>(res)) << "Iteration " << i;
1486 }
1487#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001488 UNUSED(f, self, referrer, test);
Fred Shih37f05ef2014-07-16 18:38:08 -07001489 LOG(INFO) << "Skipping set_char_static as I don't know how to do that on " << kRuntimeISA;
1490 // Force-print to std::cout so it's also outside the logcat.
1491 std::cout << "Skipping set_char_static as I don't know how to do that on " << kRuntimeISA << std::endl;
1492#endif
1493}
Mathieu Chartierc7853442015-03-27 14:35:38 -07001494static void GetSetShortStatic(ArtField* f, Thread* self,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001495 ArtMethod* referrer, StubTest* test)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07001496 REQUIRES_SHARED(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001497#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1498 (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001499 int16_t values[] = { -0x7FFF, -32768, 0, 255, 32767, 0x7FFE };
Fred Shih37f05ef2014-07-16 18:38:08 -07001500
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001501 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001502 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001503 static_cast<size_t>(values[i]),
1504 0U,
1505 StubTest::GetEntrypoint(self, kQuickSet16Static),
1506 self,
1507 referrer);
1508
Mathieu Chartierc7853442015-03-27 14:35:38 -07001509 size_t res = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001510 0U, 0U,
1511 StubTest::GetEntrypoint(self, kQuickGetShortStatic),
1512 self,
1513 referrer);
1514
1515 EXPECT_EQ(static_cast<int16_t>(res), values[i]) << "Iteration " << i;
1516 }
1517#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001518 UNUSED(f, self, referrer, test);
Fred Shih37f05ef2014-07-16 18:38:08 -07001519 LOG(INFO) << "Skipping set_short_static as I don't know how to do that on " << kRuntimeISA;
1520 // Force-print to std::cout so it's also outside the logcat.
1521 std::cout << "Skipping set_short_static as I don't know how to do that on " << kRuntimeISA << std::endl;
1522#endif
1523}
1524
Mathieu Chartierc7853442015-03-27 14:35:38 -07001525static void GetSetCharInstance(Handle<mirror::Object>* obj, ArtField* f,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001526 Thread* self, ArtMethod* referrer, StubTest* test)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07001527 REQUIRES_SHARED(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001528#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1529 (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001530 uint16_t values[] = { 0, 1, 2, 255, 32768, 0xFFFF };
Fred Shih37f05ef2014-07-16 18:38:08 -07001531
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001532 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001533 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001534 reinterpret_cast<size_t>(obj->Get()),
1535 static_cast<size_t>(values[i]),
1536 StubTest::GetEntrypoint(self, kQuickSet16Instance),
1537 self,
1538 referrer);
1539
Mathieu Chartierc7853442015-03-27 14:35:38 -07001540 uint16_t res = f->GetChar(obj->Get());
Fred Shih37f05ef2014-07-16 18:38:08 -07001541 EXPECT_EQ(res, values[i]) << "Iteration " << i;
Mathieu Chartierc7853442015-03-27 14:35:38 -07001542 f->SetChar<false>(obj->Get(), ++res);
Fred Shih37f05ef2014-07-16 18:38:08 -07001543
Mathieu Chartierc7853442015-03-27 14:35:38 -07001544 size_t res2 = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001545 reinterpret_cast<size_t>(obj->Get()),
1546 0U,
1547 StubTest::GetEntrypoint(self, kQuickGetCharInstance),
1548 self,
1549 referrer);
1550 EXPECT_EQ(res, static_cast<uint16_t>(res2));
1551 }
1552#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001553 UNUSED(obj, f, self, referrer, test);
Fred Shih37f05ef2014-07-16 18:38:08 -07001554 LOG(INFO) << "Skipping set_char_instance as I don't know how to do that on " << kRuntimeISA;
1555 // Force-print to std::cout so it's also outside the logcat.
1556 std::cout << "Skipping set_char_instance as I don't know how to do that on " << kRuntimeISA << std::endl;
1557#endif
1558}
Mathieu Chartierc7853442015-03-27 14:35:38 -07001559static void GetSetShortInstance(Handle<mirror::Object>* obj, ArtField* f,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001560 Thread* self, ArtMethod* referrer, StubTest* test)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07001561 REQUIRES_SHARED(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001562#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1563 (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001564 int16_t values[] = { -0x7FFF, -32768, 0, 255, 32767, 0x7FFE };
Fred Shih37f05ef2014-07-16 18:38:08 -07001565
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001566 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001567 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001568 reinterpret_cast<size_t>(obj->Get()),
1569 static_cast<size_t>(values[i]),
1570 StubTest::GetEntrypoint(self, kQuickSet16Instance),
1571 self,
1572 referrer);
1573
Mathieu Chartierc7853442015-03-27 14:35:38 -07001574 int16_t res = f->GetShort(obj->Get());
Fred Shih37f05ef2014-07-16 18:38:08 -07001575 EXPECT_EQ(res, values[i]) << "Iteration " << i;
Mathieu Chartierc7853442015-03-27 14:35:38 -07001576 f->SetShort<false>(obj->Get(), ++res);
Fred Shih37f05ef2014-07-16 18:38:08 -07001577
Mathieu Chartierc7853442015-03-27 14:35:38 -07001578 size_t res2 = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001579 reinterpret_cast<size_t>(obj->Get()),
1580 0U,
1581 StubTest::GetEntrypoint(self, kQuickGetShortInstance),
1582 self,
1583 referrer);
1584 EXPECT_EQ(res, static_cast<int16_t>(res2));
1585 }
1586#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001587 UNUSED(obj, f, self, referrer, test);
Fred Shih37f05ef2014-07-16 18:38:08 -07001588 LOG(INFO) << "Skipping set_short_instance as I don't know how to do that on " << kRuntimeISA;
1589 // Force-print to std::cout so it's also outside the logcat.
1590 std::cout << "Skipping set_short_instance as I don't know how to do that on " << kRuntimeISA << std::endl;
1591#endif
1592}
1593
Mathieu Chartiere401d142015-04-22 13:56:20 -07001594static void GetSet32Static(ArtField* f, Thread* self, ArtMethod* referrer,
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001595 StubTest* test)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07001596 REQUIRES_SHARED(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001597#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1598 (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001599 uint32_t values[] = { 0, 1, 2, 255, 32768, 1000000, 0xFFFFFFFF };
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001600
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001601 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001602 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001603 static_cast<size_t>(values[i]),
1604 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001605 StubTest::GetEntrypoint(self, kQuickSet32Static),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001606 self,
1607 referrer);
1608
Mathieu Chartierc7853442015-03-27 14:35:38 -07001609 size_t res = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001610 0U, 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001611 StubTest::GetEntrypoint(self, kQuickGet32Static),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001612 self,
1613 referrer);
1614
Goran Jakovljevic04568812015-04-23 15:27:23 +02001615#if defined(__mips__) && defined(__LP64__)
1616 EXPECT_EQ(static_cast<uint32_t>(res), values[i]) << "Iteration " << i;
1617#else
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001618 EXPECT_EQ(res, values[i]) << "Iteration " << i;
Goran Jakovljevic04568812015-04-23 15:27:23 +02001619#endif
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001620 }
1621#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001622 UNUSED(f, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001623 LOG(INFO) << "Skipping set32static as I don't know how to do that on " << kRuntimeISA;
1624 // Force-print to std::cout so it's also outside the logcat.
1625 std::cout << "Skipping set32static as I don't know how to do that on " << kRuntimeISA << std::endl;
1626#endif
1627}
1628
1629
Mathieu Chartierc7853442015-03-27 14:35:38 -07001630static void GetSet32Instance(Handle<mirror::Object>* obj, ArtField* f,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001631 Thread* self, ArtMethod* referrer, StubTest* test)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07001632 REQUIRES_SHARED(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001633#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1634 (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001635 uint32_t values[] = { 0, 1, 2, 255, 32768, 1000000, 0xFFFFFFFF };
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001636
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001637 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001638 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001639 reinterpret_cast<size_t>(obj->Get()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001640 static_cast<size_t>(values[i]),
Andreas Gampe29b38412014-08-13 00:15:43 -07001641 StubTest::GetEntrypoint(self, kQuickSet32Instance),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001642 self,
1643 referrer);
1644
Mathieu Chartierc7853442015-03-27 14:35:38 -07001645 int32_t res = f->GetInt(obj->Get());
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001646 EXPECT_EQ(res, static_cast<int32_t>(values[i])) << "Iteration " << i;
1647
1648 res++;
Mathieu Chartierc7853442015-03-27 14:35:38 -07001649 f->SetInt<false>(obj->Get(), res);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001650
Mathieu Chartierc7853442015-03-27 14:35:38 -07001651 size_t res2 = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001652 reinterpret_cast<size_t>(obj->Get()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001653 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001654 StubTest::GetEntrypoint(self, kQuickGet32Instance),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001655 self,
1656 referrer);
1657 EXPECT_EQ(res, static_cast<int32_t>(res2));
1658 }
1659#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001660 UNUSED(obj, f, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001661 LOG(INFO) << "Skipping set32instance as I don't know how to do that on " << kRuntimeISA;
1662 // Force-print to std::cout so it's also outside the logcat.
1663 std::cout << "Skipping set32instance as I don't know how to do that on " << kRuntimeISA << std::endl;
1664#endif
1665}
1666
1667
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001668#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1669 (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001670
1671static void set_and_check_static(uint32_t f_idx, mirror::Object* val, Thread* self,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001672 ArtMethod* referrer, StubTest* test)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07001673 REQUIRES_SHARED(Locks::mutator_lock_) {
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001674 test->Invoke3WithReferrer(static_cast<size_t>(f_idx),
1675 reinterpret_cast<size_t>(val),
1676 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001677 StubTest::GetEntrypoint(self, kQuickSetObjStatic),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001678 self,
1679 referrer);
1680
1681 size_t res = test->Invoke3WithReferrer(static_cast<size_t>(f_idx),
1682 0U, 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001683 StubTest::GetEntrypoint(self, kQuickGetObjStatic),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001684 self,
1685 referrer);
1686
1687 EXPECT_EQ(res, reinterpret_cast<size_t>(val)) << "Value " << val;
1688}
1689#endif
1690
Mathieu Chartiere401d142015-04-22 13:56:20 -07001691static void GetSetObjStatic(ArtField* f, Thread* self, ArtMethod* referrer,
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001692 StubTest* test)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07001693 REQUIRES_SHARED(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001694#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1695 (defined(__x86_64__) && !defined(__APPLE__))
Mathieu Chartierc7853442015-03-27 14:35:38 -07001696 set_and_check_static(f->GetDexFieldIndex(), nullptr, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001697
1698 // Allocate a string object for simplicity.
1699 mirror::String* str = mirror::String::AllocFromModifiedUtf8(self, "Test");
Mathieu Chartierc7853442015-03-27 14:35:38 -07001700 set_and_check_static(f->GetDexFieldIndex(), str, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001701
Mathieu Chartierc7853442015-03-27 14:35:38 -07001702 set_and_check_static(f->GetDexFieldIndex(), nullptr, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001703#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001704 UNUSED(f, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001705 LOG(INFO) << "Skipping setObjstatic as I don't know how to do that on " << kRuntimeISA;
1706 // Force-print to std::cout so it's also outside the logcat.
1707 std::cout << "Skipping setObjstatic as I don't know how to do that on " << kRuntimeISA << std::endl;
1708#endif
1709}
1710
1711
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001712#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1713 (defined(__x86_64__) && !defined(__APPLE__))
Mathieu Chartierc7853442015-03-27 14:35:38 -07001714static void set_and_check_instance(ArtField* f, mirror::Object* trg,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001715 mirror::Object* val, Thread* self, ArtMethod* referrer,
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001716 StubTest* test)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07001717 REQUIRES_SHARED(Locks::mutator_lock_) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001718 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001719 reinterpret_cast<size_t>(trg),
1720 reinterpret_cast<size_t>(val),
Andreas Gampe29b38412014-08-13 00:15:43 -07001721 StubTest::GetEntrypoint(self, kQuickSetObjInstance),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001722 self,
1723 referrer);
1724
Mathieu Chartierc7853442015-03-27 14:35:38 -07001725 size_t res = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001726 reinterpret_cast<size_t>(trg),
1727 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001728 StubTest::GetEntrypoint(self, kQuickGetObjInstance),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001729 self,
1730 referrer);
1731
1732 EXPECT_EQ(res, reinterpret_cast<size_t>(val)) << "Value " << val;
1733
Mathieu Chartier3398c782016-09-30 10:27:43 -07001734 EXPECT_OBJ_PTR_EQ(val, f->GetObj(trg));
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001735}
1736#endif
1737
Mathieu Chartierc7853442015-03-27 14:35:38 -07001738static void GetSetObjInstance(Handle<mirror::Object>* obj, ArtField* f,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001739 Thread* self, ArtMethod* referrer, StubTest* test)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07001740 REQUIRES_SHARED(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001741#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1742 (defined(__x86_64__) && !defined(__APPLE__))
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001743 set_and_check_instance(f, obj->Get(), nullptr, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001744
1745 // Allocate a string object for simplicity.
1746 mirror::String* str = mirror::String::AllocFromModifiedUtf8(self, "Test");
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001747 set_and_check_instance(f, obj->Get(), str, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001748
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001749 set_and_check_instance(f, obj->Get(), nullptr, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001750#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001751 UNUSED(obj, f, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001752 LOG(INFO) << "Skipping setObjinstance as I don't know how to do that on " << kRuntimeISA;
1753 // Force-print to std::cout so it's also outside the logcat.
1754 std::cout << "Skipping setObjinstance as I don't know how to do that on " << kRuntimeISA << std::endl;
1755#endif
1756}
1757
1758
Calin Juravle872ab3f2015-10-02 07:27:51 +01001759// TODO: Complete these tests for 32b architectures
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001760
Mathieu Chartiere401d142015-04-22 13:56:20 -07001761static void GetSet64Static(ArtField* f, Thread* self, ArtMethod* referrer,
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001762 StubTest* test)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07001763 REQUIRES_SHARED(Locks::mutator_lock_) {
Calin Juravle6e399ac2015-10-02 23:56:06 +01001764#if (defined(__x86_64__) && !defined(__APPLE__)) || (defined(__mips__) && defined(__LP64__)) \
1765 || defined(__aarch64__)
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001766 uint64_t values[] = { 0, 1, 2, 255, 32768, 1000000, 0xFFFFFFFF, 0xFFFFFFFFFFFF };
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001767
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001768 for (size_t i = 0; i < arraysize(values); ++i) {
Calin Juravle6e399ac2015-10-02 23:56:06 +01001769 // 64 bit FieldSet stores the set value in the second register.
1770 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Calin Juravle24cc1b32015-10-06 11:46:58 +01001771 0U,
1772 values[i],
1773 StubTest::GetEntrypoint(self, kQuickSet64Static),
1774 self,
1775 referrer);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001776
Mathieu Chartierc7853442015-03-27 14:35:38 -07001777 size_t res = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001778 0U, 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001779 StubTest::GetEntrypoint(self, kQuickGet64Static),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001780 self,
1781 referrer);
1782
1783 EXPECT_EQ(res, values[i]) << "Iteration " << i;
1784 }
1785#else
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001786 UNUSED(f, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001787 LOG(INFO) << "Skipping set64static as I don't know how to do that on " << kRuntimeISA;
1788 // Force-print to std::cout so it's also outside the logcat.
1789 std::cout << "Skipping set64static as I don't know how to do that on " << kRuntimeISA << std::endl;
1790#endif
1791}
1792
1793
Mathieu Chartierc7853442015-03-27 14:35:38 -07001794static void GetSet64Instance(Handle<mirror::Object>* obj, ArtField* f,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001795 Thread* self, ArtMethod* referrer, StubTest* test)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07001796 REQUIRES_SHARED(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001797#if (defined(__x86_64__) && !defined(__APPLE__)) || (defined(__mips__) && defined(__LP64__)) || \
1798 defined(__aarch64__)
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001799 uint64_t values[] = { 0, 1, 2, 255, 32768, 1000000, 0xFFFFFFFF, 0xFFFFFFFFFFFF };
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001800
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001801 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001802 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001803 reinterpret_cast<size_t>(obj->Get()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001804 static_cast<size_t>(values[i]),
Andreas Gampe29b38412014-08-13 00:15:43 -07001805 StubTest::GetEntrypoint(self, kQuickSet64Instance),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001806 self,
1807 referrer);
1808
Mathieu Chartierc7853442015-03-27 14:35:38 -07001809 int64_t res = f->GetLong(obj->Get());
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001810 EXPECT_EQ(res, static_cast<int64_t>(values[i])) << "Iteration " << i;
1811
1812 res++;
Mathieu Chartierc7853442015-03-27 14:35:38 -07001813 f->SetLong<false>(obj->Get(), res);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001814
Mathieu Chartierc7853442015-03-27 14:35:38 -07001815 size_t res2 = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001816 reinterpret_cast<size_t>(obj->Get()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001817 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001818 StubTest::GetEntrypoint(self, kQuickGet64Instance),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001819 self,
1820 referrer);
1821 EXPECT_EQ(res, static_cast<int64_t>(res2));
1822 }
1823#else
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001824 UNUSED(obj, f, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001825 LOG(INFO) << "Skipping set64instance as I don't know how to do that on " << kRuntimeISA;
1826 // Force-print to std::cout so it's also outside the logcat.
1827 std::cout << "Skipping set64instance as I don't know how to do that on " << kRuntimeISA << std::endl;
1828#endif
1829}
1830
1831static void TestFields(Thread* self, StubTest* test, Primitive::Type test_type) {
1832 // garbage is created during ClassLinker::Init
1833
1834 JNIEnv* env = Thread::Current()->GetJniEnv();
1835 jclass jc = env->FindClass("AllFields");
Mathieu Chartier2cebb242015-04-21 16:50:40 -07001836 CHECK(jc != nullptr);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001837 jobject o = env->AllocObject(jc);
Mathieu Chartier2cebb242015-04-21 16:50:40 -07001838 CHECK(o != nullptr);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001839
1840 ScopedObjectAccess soa(self);
Mathieu Chartiere401d142015-04-22 13:56:20 -07001841 StackHandleScope<3> hs(self);
Mathieu Chartier0795f232016-09-27 18:43:30 -07001842 Handle<mirror::Object> obj(hs.NewHandle(soa.Decode<mirror::Object>(o)));
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001843 Handle<mirror::Class> c(hs.NewHandle(obj->GetClass()));
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001844 // Need a method as a referrer
Andreas Gampe542451c2016-07-26 09:02:02 -07001845 ArtMethod* m = c->GetDirectMethod(0, kRuntimePointerSize);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001846
1847 // Play with it...
1848
1849 // Static fields.
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001850 for (ArtField& f : c->GetSFields()) {
1851 Primitive::Type type = f.GetTypeAsPrimitiveType();
Mathieu Chartierc7853442015-03-27 14:35:38 -07001852 if (test_type != type) {
1853 continue;
1854 }
1855 switch (type) {
1856 case Primitive::Type::kPrimBoolean:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001857 GetSetBooleanStatic(&f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001858 break;
1859 case Primitive::Type::kPrimByte:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001860 GetSetByteStatic(&f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001861 break;
1862 case Primitive::Type::kPrimChar:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001863 GetSetCharStatic(&f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001864 break;
1865 case Primitive::Type::kPrimShort:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001866 GetSetShortStatic(&f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001867 break;
1868 case Primitive::Type::kPrimInt:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001869 GetSet32Static(&f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001870 break;
1871 case Primitive::Type::kPrimLong:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001872 GetSet64Static(&f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001873 break;
1874 case Primitive::Type::kPrimNot:
1875 // Don't try array.
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001876 if (f.GetTypeDescriptor()[0] != '[') {
1877 GetSetObjStatic(&f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001878 }
1879 break;
1880 default:
1881 break; // Skip.
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001882 }
1883 }
1884
1885 // Instance fields.
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001886 for (ArtField& f : c->GetIFields()) {
1887 Primitive::Type type = f.GetTypeAsPrimitiveType();
Mathieu Chartierc7853442015-03-27 14:35:38 -07001888 if (test_type != type) {
1889 continue;
1890 }
1891 switch (type) {
1892 case Primitive::Type::kPrimBoolean:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001893 GetSetBooleanInstance(&obj, &f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001894 break;
1895 case Primitive::Type::kPrimByte:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001896 GetSetByteInstance(&obj, &f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001897 break;
1898 case Primitive::Type::kPrimChar:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001899 GetSetCharInstance(&obj, &f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001900 break;
1901 case Primitive::Type::kPrimShort:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001902 GetSetShortInstance(&obj, &f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001903 break;
1904 case Primitive::Type::kPrimInt:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001905 GetSet32Instance(&obj, &f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001906 break;
1907 case Primitive::Type::kPrimLong:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001908 GetSet64Instance(&obj, &f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001909 break;
1910 case Primitive::Type::kPrimNot:
1911 // Don't try array.
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001912 if (f.GetTypeDescriptor()[0] != '[') {
1913 GetSetObjInstance(&obj, &f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001914 }
1915 break;
1916 default:
1917 break; // Skip.
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001918 }
1919 }
1920
1921 // TODO: Deallocate things.
1922}
1923
Fred Shih37f05ef2014-07-16 18:38:08 -07001924TEST_F(StubTest, Fields8) {
Fred Shih37f05ef2014-07-16 18:38:08 -07001925 Thread* self = Thread::Current();
1926
1927 self->TransitionFromSuspendedToRunnable();
1928 LoadDex("AllFields");
1929 bool started = runtime_->Start();
1930 CHECK(started);
1931
1932 TestFields(self, this, Primitive::Type::kPrimBoolean);
1933 TestFields(self, this, Primitive::Type::kPrimByte);
1934}
1935
1936TEST_F(StubTest, Fields16) {
Fred Shih37f05ef2014-07-16 18:38:08 -07001937 Thread* self = Thread::Current();
1938
1939 self->TransitionFromSuspendedToRunnable();
1940 LoadDex("AllFields");
1941 bool started = runtime_->Start();
1942 CHECK(started);
1943
1944 TestFields(self, this, Primitive::Type::kPrimChar);
1945 TestFields(self, this, Primitive::Type::kPrimShort);
1946}
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001947
1948TEST_F(StubTest, Fields32) {
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001949 Thread* self = Thread::Current();
1950
1951 self->TransitionFromSuspendedToRunnable();
1952 LoadDex("AllFields");
1953 bool started = runtime_->Start();
1954 CHECK(started);
1955
1956 TestFields(self, this, Primitive::Type::kPrimInt);
1957}
1958
1959TEST_F(StubTest, FieldsObj) {
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001960 Thread* self = Thread::Current();
1961
1962 self->TransitionFromSuspendedToRunnable();
1963 LoadDex("AllFields");
1964 bool started = runtime_->Start();
1965 CHECK(started);
1966
1967 TestFields(self, this, Primitive::Type::kPrimNot);
1968}
1969
1970TEST_F(StubTest, Fields64) {
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001971 Thread* self = Thread::Current();
1972
1973 self->TransitionFromSuspendedToRunnable();
1974 LoadDex("AllFields");
1975 bool started = runtime_->Start();
1976 CHECK(started);
1977
1978 TestFields(self, this, Primitive::Type::kPrimLong);
1979}
1980
Vladimir Marko9d07e3d2016-03-31 12:02:28 +01001981// Disabled, b/27991555 .
1982// FIXME: Hacking the entry point to point to art_quick_to_interpreter_bridge is broken.
1983// The bridge calls through to GetCalleeSaveMethodCaller() which looks up the pre-header
1984// and gets a bogus OatQuickMethodHeader* pointing into our assembly code just before
1985// the bridge and uses that to check for inlined frames, crashing in the process.
1986TEST_F(StubTest, DISABLED_IMT) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001987#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1988 (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe51f76352014-05-21 08:28:48 -07001989 Thread* self = Thread::Current();
1990
1991 ScopedObjectAccess soa(self);
1992 StackHandleScope<7> hs(self);
1993
1994 JNIEnv* env = Thread::Current()->GetJniEnv();
1995
1996 // ArrayList
1997
1998 // Load ArrayList and used methods (JNI).
1999 jclass arraylist_jclass = env->FindClass("java/util/ArrayList");
2000 ASSERT_NE(nullptr, arraylist_jclass);
2001 jmethodID arraylist_constructor = env->GetMethodID(arraylist_jclass, "<init>", "()V");
2002 ASSERT_NE(nullptr, arraylist_constructor);
Mathieu Chartiere401d142015-04-22 13:56:20 -07002003 jmethodID contains_jmethod = env->GetMethodID(
2004 arraylist_jclass, "contains", "(Ljava/lang/Object;)Z");
Andreas Gampe51f76352014-05-21 08:28:48 -07002005 ASSERT_NE(nullptr, contains_jmethod);
2006 jmethodID add_jmethod = env->GetMethodID(arraylist_jclass, "add", "(Ljava/lang/Object;)Z");
2007 ASSERT_NE(nullptr, add_jmethod);
2008
Mathieu Chartiere401d142015-04-22 13:56:20 -07002009 // Get representation.
Andreas Gampe13b27842016-11-07 16:48:23 -08002010 ArtMethod* contains_amethod = jni::DecodeArtMethod(contains_jmethod);
Andreas Gampe51f76352014-05-21 08:28:48 -07002011
2012 // Patch up ArrayList.contains.
Mathieu Chartiere401d142015-04-22 13:56:20 -07002013 if (contains_amethod->GetEntryPointFromQuickCompiledCode() == nullptr) {
2014 contains_amethod->SetEntryPointFromQuickCompiledCode(reinterpret_cast<void*>(
Andreas Gampe29b38412014-08-13 00:15:43 -07002015 StubTest::GetEntrypoint(self, kQuickQuickToInterpreterBridge)));
Andreas Gampe51f76352014-05-21 08:28:48 -07002016 }
2017
2018 // List
2019
2020 // Load List and used methods (JNI).
2021 jclass list_jclass = env->FindClass("java/util/List");
2022 ASSERT_NE(nullptr, list_jclass);
Mathieu Chartiere401d142015-04-22 13:56:20 -07002023 jmethodID inf_contains_jmethod = env->GetMethodID(
2024 list_jclass, "contains", "(Ljava/lang/Object;)Z");
Andreas Gampe51f76352014-05-21 08:28:48 -07002025 ASSERT_NE(nullptr, inf_contains_jmethod);
2026
2027 // Get mirror representation.
Andreas Gampe13b27842016-11-07 16:48:23 -08002028 ArtMethod* inf_contains = jni::DecodeArtMethod(inf_contains_jmethod);
Andreas Gampe51f76352014-05-21 08:28:48 -07002029
2030 // Object
2031
2032 jclass obj_jclass = env->FindClass("java/lang/Object");
2033 ASSERT_NE(nullptr, obj_jclass);
2034 jmethodID obj_constructor = env->GetMethodID(obj_jclass, "<init>", "()V");
2035 ASSERT_NE(nullptr, obj_constructor);
2036
Andreas Gampe51f76352014-05-21 08:28:48 -07002037 // Create instances.
2038
2039 jobject jarray_list = env->NewObject(arraylist_jclass, arraylist_constructor);
2040 ASSERT_NE(nullptr, jarray_list);
Mathieu Chartier0795f232016-09-27 18:43:30 -07002041 Handle<mirror::Object> array_list(hs.NewHandle(soa.Decode<mirror::Object>(jarray_list)));
Andreas Gampe51f76352014-05-21 08:28:48 -07002042
2043 jobject jobj = env->NewObject(obj_jclass, obj_constructor);
2044 ASSERT_NE(nullptr, jobj);
Mathieu Chartier0795f232016-09-27 18:43:30 -07002045 Handle<mirror::Object> obj(hs.NewHandle(soa.Decode<mirror::Object>(jobj)));
Andreas Gampe51f76352014-05-21 08:28:48 -07002046
Andreas Gampe1a7e2922014-05-21 15:37:53 -07002047 // Invocation tests.
2048
2049 // 1. imt_conflict
2050
2051 // Contains.
Nicolas Geoffray1004faa2016-03-23 14:28:30 +00002052
2053 // We construct the ImtConflictTable ourselves, as we cannot go into the runtime stub
2054 // that will create it: the runtime stub expects to be called by compiled code.
Nicolas Geoffray1004faa2016-03-23 14:28:30 +00002055 LinearAlloc* linear_alloc = Runtime::Current()->GetLinearAlloc();
2056 ArtMethod* conflict_method = Runtime::Current()->CreateImtConflictMethod(linear_alloc);
Mathieu Chartiere42888f2016-04-14 10:49:19 -07002057 ImtConflictTable* empty_conflict_table =
2058 Runtime::Current()->GetClassLinker()->CreateImtConflictTable(/*count*/0u, linear_alloc);
Nicolas Geoffray1004faa2016-03-23 14:28:30 +00002059 void* data = linear_alloc->Alloc(
2060 self,
Andreas Gampe542451c2016-07-26 09:02:02 -07002061 ImtConflictTable::ComputeSizeWithOneMoreEntry(empty_conflict_table, kRuntimePointerSize));
Nicolas Geoffray1004faa2016-03-23 14:28:30 +00002062 ImtConflictTable* new_table = new (data) ImtConflictTable(
Andreas Gampe542451c2016-07-26 09:02:02 -07002063 empty_conflict_table, inf_contains, contains_amethod, kRuntimePointerSize);
2064 conflict_method->SetImtConflictTable(new_table, kRuntimePointerSize);
Nicolas Geoffray1004faa2016-03-23 14:28:30 +00002065
Andreas Gampe51f76352014-05-21 08:28:48 -07002066 size_t result =
Nicolas Geoffray1004faa2016-03-23 14:28:30 +00002067 Invoke3WithReferrerAndHidden(reinterpret_cast<size_t>(conflict_method),
2068 reinterpret_cast<size_t>(array_list.Get()),
Andreas Gampe51f76352014-05-21 08:28:48 -07002069 reinterpret_cast<size_t>(obj.Get()),
Andreas Gampe29b38412014-08-13 00:15:43 -07002070 StubTest::GetEntrypoint(self, kQuickQuickImtConflictTrampoline),
Nicolas Geoffray1004faa2016-03-23 14:28:30 +00002071 self,
2072 contains_amethod,
Mathieu Chartiere401d142015-04-22 13:56:20 -07002073 static_cast<size_t>(inf_contains->GetDexMethodIndex()));
Andreas Gampe51f76352014-05-21 08:28:48 -07002074
2075 ASSERT_FALSE(self->IsExceptionPending());
2076 EXPECT_EQ(static_cast<size_t>(JNI_FALSE), result);
2077
2078 // Add object.
2079
2080 env->CallBooleanMethod(jarray_list, add_jmethod, jobj);
2081
David Sehr709b0702016-10-13 09:12:37 -07002082 ASSERT_FALSE(self->IsExceptionPending()) << mirror::Object::PrettyTypeOf(self->GetException());
Andreas Gampe51f76352014-05-21 08:28:48 -07002083
Andreas Gampe1a7e2922014-05-21 15:37:53 -07002084 // Contains.
Andreas Gampe51f76352014-05-21 08:28:48 -07002085
Nicolas Geoffray1004faa2016-03-23 14:28:30 +00002086 result =
2087 Invoke3WithReferrerAndHidden(reinterpret_cast<size_t>(conflict_method),
2088 reinterpret_cast<size_t>(array_list.Get()),
2089 reinterpret_cast<size_t>(obj.Get()),
2090 StubTest::GetEntrypoint(self, kQuickQuickImtConflictTrampoline),
2091 self,
2092 contains_amethod,
2093 static_cast<size_t>(inf_contains->GetDexMethodIndex()));
Andreas Gampe51f76352014-05-21 08:28:48 -07002094
2095 ASSERT_FALSE(self->IsExceptionPending());
2096 EXPECT_EQ(static_cast<size_t>(JNI_TRUE), result);
Nicolas Geoffray1004faa2016-03-23 14:28:30 +00002097
Andreas Gampe1a7e2922014-05-21 15:37:53 -07002098 // 2. regular interface trampoline
2099
Nicolas Geoffray1004faa2016-03-23 14:28:30 +00002100 result = Invoke3WithReferrer(static_cast<size_t>(inf_contains->GetDexMethodIndex()),
2101 reinterpret_cast<size_t>(array_list.Get()),
2102 reinterpret_cast<size_t>(obj.Get()),
2103 StubTest::GetEntrypoint(self,
2104 kQuickInvokeInterfaceTrampolineWithAccessCheck),
2105 self, contains_amethod);
Andreas Gampe1a7e2922014-05-21 15:37:53 -07002106
2107 ASSERT_FALSE(self->IsExceptionPending());
Nicolas Geoffray1004faa2016-03-23 14:28:30 +00002108 EXPECT_EQ(static_cast<size_t>(JNI_TRUE), result);
Andreas Gampe1a7e2922014-05-21 15:37:53 -07002109
Mathieu Chartiere401d142015-04-22 13:56:20 -07002110 result = Invoke3WithReferrer(
2111 static_cast<size_t>(inf_contains->GetDexMethodIndex()),
2112 reinterpret_cast<size_t>(array_list.Get()), reinterpret_cast<size_t>(array_list.Get()),
2113 StubTest::GetEntrypoint(self, kQuickInvokeInterfaceTrampolineWithAccessCheck), self,
2114 contains_amethod);
Andreas Gampe1a7e2922014-05-21 15:37:53 -07002115
2116 ASSERT_FALSE(self->IsExceptionPending());
2117 EXPECT_EQ(static_cast<size_t>(JNI_FALSE), result);
Andreas Gampe51f76352014-05-21 08:28:48 -07002118#else
Andreas Gampe6aac3552014-06-09 14:55:53 -07002119 LOG(INFO) << "Skipping imt as I don't know how to do that on " << kRuntimeISA;
Andreas Gampe51f76352014-05-21 08:28:48 -07002120 // Force-print to std::cout so it's also outside the logcat.
Andreas Gampe6aac3552014-06-09 14:55:53 -07002121 std::cout << "Skipping imt as I don't know how to do that on " << kRuntimeISA << std::endl;
2122#endif
2123}
2124
Andreas Gampe6aac3552014-06-09 14:55:53 -07002125TEST_F(StubTest, StringIndexOf) {
Chris Larsencf283da2016-01-19 16:45:35 -08002126#if defined(__arm__) || defined(__aarch64__) || defined(__mips__)
Andreas Gampe6aac3552014-06-09 14:55:53 -07002127 Thread* self = Thread::Current();
2128 ScopedObjectAccess soa(self);
2129 // garbage is created during ClassLinker::Init
2130
2131 // Create some strings
2132 // Use array so we can index into it and use a matrix for expected results
2133 // Setup: The first half is standard. The second half uses a non-zero offset.
2134 // TODO: Shared backing arrays.
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07002135 const char* c_str[] = { "", "a", "ba", "cba", "dcba", "edcba", "asdfghjkl" };
2136 static constexpr size_t kStringCount = arraysize(c_str);
2137 const char c_char[] = { 'a', 'b', 'c', 'd', 'e' };
2138 static constexpr size_t kCharCount = arraysize(c_char);
Andreas Gampe6aac3552014-06-09 14:55:53 -07002139
2140 StackHandleScope<kStringCount> hs(self);
2141 Handle<mirror::String> s[kStringCount];
2142
2143 for (size_t i = 0; i < kStringCount; ++i) {
2144 s[i] = hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), c_str[i]));
2145 }
2146
2147 // Matrix of expectations. First component is first parameter. Note we only check against the
2148 // sign, not the value. As we are testing random offsets, we need to compute this and need to
2149 // rely on String::CompareTo being correct.
2150 static constexpr size_t kMaxLen = 9;
2151 DCHECK_LE(strlen(c_str[kStringCount-1]), kMaxLen) << "Please fix the indexof test.";
2152
2153 // Last dimension: start, offset by 1.
2154 int32_t expected[kStringCount][kCharCount][kMaxLen + 3];
2155 for (size_t x = 0; x < kStringCount; ++x) {
2156 for (size_t y = 0; y < kCharCount; ++y) {
2157 for (size_t z = 0; z <= kMaxLen + 2; ++z) {
2158 expected[x][y][z] = s[x]->FastIndexOf(c_char[y], static_cast<int32_t>(z) - 1);
2159 }
2160 }
2161 }
2162
2163 // Play with it...
2164
2165 for (size_t x = 0; x < kStringCount; ++x) {
2166 for (size_t y = 0; y < kCharCount; ++y) {
2167 for (size_t z = 0; z <= kMaxLen + 2; ++z) {
2168 int32_t start = static_cast<int32_t>(z) - 1;
2169
2170 // Test string_compareto x y
2171 size_t result = Invoke3(reinterpret_cast<size_t>(s[x].Get()), c_char[y], start,
Andreas Gampe29b38412014-08-13 00:15:43 -07002172 StubTest::GetEntrypoint(self, kQuickIndexOf), self);
Andreas Gampe6aac3552014-06-09 14:55:53 -07002173
2174 EXPECT_FALSE(self->IsExceptionPending());
2175
2176 // The result is a 32b signed integer
2177 union {
2178 size_t r;
2179 int32_t i;
2180 } conv;
2181 conv.r = result;
2182
2183 EXPECT_EQ(expected[x][y][z], conv.i) << "Wrong result for " << c_str[x] << " / " <<
2184 c_char[y] << " @ " << start;
2185 }
2186 }
2187 }
2188
2189 // TODO: Deallocate things.
2190
2191 // Tests done.
2192#else
2193 LOG(INFO) << "Skipping indexof as I don't know how to do that on " << kRuntimeISA;
2194 // Force-print to std::cout so it's also outside the logcat.
2195 std::cout << "Skipping indexof as I don't know how to do that on " << kRuntimeISA << std::endl;
Andreas Gampe51f76352014-05-21 08:28:48 -07002196#endif
2197}
2198
Roland Levillain02b75802016-07-13 11:54:35 +01002199// TODO: Exercise the ReadBarrierMarkRegX entry points.
2200
Man Cao1aee9002015-07-14 22:31:42 -07002201TEST_F(StubTest, ReadBarrier) {
2202#if defined(ART_USE_READ_BARRIER) && (defined(__i386__) || defined(__arm__) || \
2203 defined(__aarch64__) || defined(__mips__) || (defined(__x86_64__) && !defined(__APPLE__)))
2204 Thread* self = Thread::Current();
2205
2206 const uintptr_t readBarrierSlow = StubTest::GetEntrypoint(self, kQuickReadBarrierSlow);
2207
2208 // Create an object
2209 ScopedObjectAccess soa(self);
2210 // garbage is created during ClassLinker::Init
2211
2212 StackHandleScope<2> hs(soa.Self());
2213 Handle<mirror::Class> c(
2214 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "Ljava/lang/Object;")));
2215
2216 // Build an object instance
2217 Handle<mirror::Object> obj(hs.NewHandle(c->AllocObject(soa.Self())));
2218
2219 EXPECT_FALSE(self->IsExceptionPending());
2220
2221 size_t result = Invoke3(0U, reinterpret_cast<size_t>(obj.Get()),
2222 mirror::Object::ClassOffset().SizeValue(), readBarrierSlow, self);
2223
2224 EXPECT_FALSE(self->IsExceptionPending());
2225 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
2226 mirror::Class* klass = reinterpret_cast<mirror::Class*>(result);
2227 EXPECT_EQ(klass, obj->GetClass());
2228
2229 // Tests done.
2230#else
2231 LOG(INFO) << "Skipping read_barrier_slow";
2232 // Force-print to std::cout so it's also outside the logcat.
2233 std::cout << "Skipping read_barrier_slow" << std::endl;
2234#endif
2235}
2236
Roland Levillain0d5a2812015-11-13 10:07:31 +00002237TEST_F(StubTest, ReadBarrierForRoot) {
2238#if defined(ART_USE_READ_BARRIER) && (defined(__i386__) || defined(__arm__) || \
2239 defined(__aarch64__) || defined(__mips__) || (defined(__x86_64__) && !defined(__APPLE__)))
2240 Thread* self = Thread::Current();
2241
2242 const uintptr_t readBarrierForRootSlow =
2243 StubTest::GetEntrypoint(self, kQuickReadBarrierForRootSlow);
2244
2245 // Create an object
2246 ScopedObjectAccess soa(self);
2247 // garbage is created during ClassLinker::Init
2248
2249 StackHandleScope<1> hs(soa.Self());
2250
2251 Handle<mirror::String> obj(
2252 hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), "hello, world!")));
2253
2254 EXPECT_FALSE(self->IsExceptionPending());
2255
2256 GcRoot<mirror::Class>& root = mirror::String::java_lang_String_;
2257 size_t result = Invoke3(reinterpret_cast<size_t>(&root), 0U, 0U, readBarrierForRootSlow, self);
2258
2259 EXPECT_FALSE(self->IsExceptionPending());
2260 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
2261 mirror::Class* klass = reinterpret_cast<mirror::Class*>(result);
2262 EXPECT_EQ(klass, obj->GetClass());
2263
2264 // Tests done.
2265#else
2266 LOG(INFO) << "Skipping read_barrier_for_root_slow";
2267 // Force-print to std::cout so it's also outside the logcat.
2268 std::cout << "Skipping read_barrier_for_root_slow" << std::endl;
2269#endif
2270}
2271
Andreas Gampe525cde22014-04-22 15:44:50 -07002272} // namespace art