blob: 243b9d14b23d1c0bb672bbb889e1f738c9afe495 [file] [log] [blame]
Andreas Gampe525cde22014-04-22 15:44:50 -07001/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
Ian Rogerse63db272014-07-15 15:36:11 -070017#include <cstdio>
18
Mathieu Chartierc7853442015-03-27 14:35:38 -070019#include "art_field-inl.h"
Mathieu Chartiere401d142015-04-22 13:56:20 -070020#include "art_method-inl.h"
Andreas Gampe542451c2016-07-26 09:02:02 -070021#include "base/enums.h"
Vladimir Marko3481ba22015-04-13 12:22:36 +010022#include "class_linker-inl.h"
Andreas Gampe525cde22014-04-22 15:44:50 -070023#include "common_runtime_test.h"
Andreas Gampe29b38412014-08-13 00:15:43 -070024#include "entrypoints/quick/quick_entrypoints_enum.h"
Andreas Gampe75a7db62016-09-26 12:04:26 -070025#include "imt_conflict_table.h"
Nicolas Geoffray1004faa2016-03-23 14:28:30 +000026#include "linear_alloc.h"
Andreas Gampe51f76352014-05-21 08:28:48 -070027#include "mirror/class-inl.h"
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -070028#include "mirror/string-inl.h"
Mathieu Chartier0795f232016-09-27 18:43:30 -070029#include "scoped_thread_state_change-inl.h"
Andreas Gampe525cde22014-04-22 15:44:50 -070030
31namespace art {
32
33
34class StubTest : public CommonRuntimeTest {
35 protected:
36 // We need callee-save methods set up in the Runtime for exceptions.
37 void SetUp() OVERRIDE {
38 // Do the normal setup.
39 CommonRuntimeTest::SetUp();
40
41 {
42 // Create callee-save methods
43 ScopedObjectAccess soa(Thread::Current());
Vladimir Marko7624d252014-05-02 14:40:15 +010044 runtime_->SetInstructionSet(kRuntimeISA);
Andreas Gampe525cde22014-04-22 15:44:50 -070045 for (int i = 0; i < Runtime::kLastCalleeSaveType; i++) {
46 Runtime::CalleeSaveType type = Runtime::CalleeSaveType(i);
47 if (!runtime_->HasCalleeSaveMethod(type)) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -070048 runtime_->SetCalleeSaveMethod(runtime_->CreateCalleeSaveMethod(), type);
Andreas Gampe525cde22014-04-22 15:44:50 -070049 }
50 }
51 }
52 }
53
Ian Rogerse63db272014-07-15 15:36:11 -070054 void SetUpRuntimeOptions(RuntimeOptions *options) OVERRIDE {
Andreas Gampe00c1e6d2014-04-25 15:47:13 -070055 // Use a smaller heap
56 for (std::pair<std::string, const void*>& pair : *options) {
57 if (pair.first.find("-Xmx") == 0) {
58 pair.first = "-Xmx4M"; // Smallest we can go.
59 }
60 }
Andreas Gampe51f76352014-05-21 08:28:48 -070061 options->push_back(std::make_pair("-Xint", nullptr));
Andreas Gampe00c1e6d2014-04-25 15:47:13 -070062 }
Andreas Gampe525cde22014-04-22 15:44:50 -070063
Mathieu Chartier119c6bd2014-05-09 14:11:47 -070064 // Helper function needed since TEST_F makes a new class.
65 Thread::tls_ptr_sized_values* GetTlsPtr(Thread* self) {
66 return &self->tlsPtr_;
67 }
68
Andreas Gampe4fc046e2014-05-06 16:56:39 -070069 public:
Andreas Gampe525cde22014-04-22 15:44:50 -070070 size_t Invoke3(size_t arg0, size_t arg1, size_t arg2, uintptr_t code, Thread* self) {
Andreas Gampe6cf80102014-05-19 11:32:41 -070071 return Invoke3WithReferrer(arg0, arg1, arg2, code, self, nullptr);
Andreas Gampe525cde22014-04-22 15:44:50 -070072 }
Andreas Gampe6e4e59c2014-05-05 20:11:02 -070073
Andreas Gampe6e4e59c2014-05-05 20:11:02 -070074 // TODO: Set up a frame according to referrer's specs.
75 size_t Invoke3WithReferrer(size_t arg0, size_t arg1, size_t arg2, uintptr_t code, Thread* self,
Mathieu Chartiere401d142015-04-22 13:56:20 -070076 ArtMethod* referrer) {
Andreas Gampe9537ba22015-10-12 14:29:38 -070077 return Invoke3WithReferrerAndHidden(arg0, arg1, arg2, code, self, referrer, 0);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -070078 }
79
Andreas Gampe51f76352014-05-21 08:28:48 -070080 // TODO: Set up a frame according to referrer's specs.
81 size_t Invoke3WithReferrerAndHidden(size_t arg0, size_t arg1, size_t arg2, uintptr_t code,
Mathieu Chartiere401d142015-04-22 13:56:20 -070082 Thread* self, ArtMethod* referrer, size_t hidden) {
Andreas Gampe51f76352014-05-21 08:28:48 -070083 // Push a transition back into managed code onto the linked list in thread.
84 ManagedStack fragment;
85 self->PushManagedStackFragment(&fragment);
86
87 size_t result;
88 size_t fpr_result = 0;
89#if defined(__i386__)
90 // TODO: Set the thread?
Andreas Gampe9537ba22015-10-12 14:29:38 -070091#define PUSH(reg) "push " # reg "\n\t .cfi_adjust_cfa_offset 4\n\t"
92#define POP(reg) "pop " # reg "\n\t .cfi_adjust_cfa_offset -4\n\t"
Andreas Gampe51f76352014-05-21 08:28:48 -070093 __asm__ __volatile__(
Andreas Gampe9537ba22015-10-12 14:29:38 -070094 "movd %[hidden], %%xmm7\n\t" // This is a memory op, so do this early. If it is off of
95 // esp, then we won't be able to access it after spilling.
96
97 // Spill 6 registers.
98 PUSH(%%ebx)
99 PUSH(%%ecx)
100 PUSH(%%edx)
101 PUSH(%%esi)
102 PUSH(%%edi)
103 PUSH(%%ebp)
104
105 // Store the inputs to the stack, but keep the referrer up top, less work.
106 PUSH(%[referrer]) // Align stack.
107 PUSH(%[referrer]) // Store referrer
108
109 PUSH(%[arg0])
110 PUSH(%[arg1])
111 PUSH(%[arg2])
112 PUSH(%[code])
113 // Now read them back into the required registers.
114 POP(%%edi)
115 POP(%%edx)
116 POP(%%ecx)
117 POP(%%eax)
118 // Call is prepared now.
119
Andreas Gampe51f76352014-05-21 08:28:48 -0700120 "call *%%edi\n\t" // Call the stub
Andreas Gampe9537ba22015-10-12 14:29:38 -0700121 "addl $8, %%esp\n\t" // Pop referrer and padding.
122 ".cfi_adjust_cfa_offset -8\n\t"
123
124 // Restore 6 registers.
125 POP(%%ebp)
126 POP(%%edi)
127 POP(%%esi)
128 POP(%%edx)
129 POP(%%ecx)
130 POP(%%ebx)
131
Andreas Gampe51f76352014-05-21 08:28:48 -0700132 : "=a" (result)
133 // Use the result from eax
Andreas Gampe9537ba22015-10-12 14:29:38 -0700134 : [arg0] "r"(arg0), [arg1] "r"(arg1), [arg2] "r"(arg2), [code] "r"(code),
135 [referrer]"r"(referrer), [hidden]"m"(hidden)
Andreas Gampe2f6e3512014-06-07 01:32:33 -0700136 // This places code into edi, arg0 into eax, arg1 into ecx, and arg2 into edx
Andreas Gampe9537ba22015-10-12 14:29:38 -0700137 : "memory", "xmm7"); // clobber.
138#undef PUSH
139#undef POP
Andreas Gampe51f76352014-05-21 08:28:48 -0700140#elif defined(__arm__)
141 __asm__ __volatile__(
142 "push {r1-r12, lr}\n\t" // Save state, 13*4B = 52B
143 ".cfi_adjust_cfa_offset 52\n\t"
144 "push {r9}\n\t"
145 ".cfi_adjust_cfa_offset 4\n\t"
146 "mov r9, %[referrer]\n\n"
147 "str r9, [sp, #-8]!\n\t" // Push referrer, +8B padding so 16B aligned
148 ".cfi_adjust_cfa_offset 8\n\t"
149 "ldr r9, [sp, #8]\n\t"
150
151 // Push everything on the stack, so we don't rely on the order. What a mess. :-(
152 "sub sp, sp, #24\n\t"
153 "str %[arg0], [sp]\n\t"
154 "str %[arg1], [sp, #4]\n\t"
155 "str %[arg2], [sp, #8]\n\t"
156 "str %[code], [sp, #12]\n\t"
157 "str %[self], [sp, #16]\n\t"
158 "str %[hidden], [sp, #20]\n\t"
159 "ldr r0, [sp]\n\t"
160 "ldr r1, [sp, #4]\n\t"
161 "ldr r2, [sp, #8]\n\t"
162 "ldr r3, [sp, #12]\n\t"
163 "ldr r9, [sp, #16]\n\t"
164 "ldr r12, [sp, #20]\n\t"
165 "add sp, sp, #24\n\t"
166
167 "blx r3\n\t" // Call the stub
Mathieu Chartier2cebb242015-04-21 16:50:40 -0700168 "add sp, sp, #12\n\t" // Pop null and padding
Andreas Gampe51f76352014-05-21 08:28:48 -0700169 ".cfi_adjust_cfa_offset -12\n\t"
170 "pop {r1-r12, lr}\n\t" // Restore state
171 ".cfi_adjust_cfa_offset -52\n\t"
172 "mov %[result], r0\n\t" // Save the result
173 : [result] "=r" (result)
174 // Use the result from r0
Andreas Gampe2f6e3512014-06-07 01:32:33 -0700175 : [arg0] "r"(arg0), [arg1] "r"(arg1), [arg2] "r"(arg2), [code] "r"(code), [self] "r"(self),
176 [referrer] "r"(referrer), [hidden] "r"(hidden)
Andreas Gampeff7b1142015-08-03 10:25:06 -0700177 : "r0", "memory"); // clobber.
Andreas Gampe51f76352014-05-21 08:28:48 -0700178#elif defined(__aarch64__)
179 __asm__ __volatile__(
Andreas Gampef39b3782014-06-03 14:38:30 -0700180 // Spill x0-x7 which we say we don't clobber. May contain args.
Bilyan Borisov970e1912016-02-10 11:51:17 +0000181 "sub sp, sp, #80\n\t"
182 ".cfi_adjust_cfa_offset 80\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700183 "stp x0, x1, [sp]\n\t"
184 "stp x2, x3, [sp, #16]\n\t"
185 "stp x4, x5, [sp, #32]\n\t"
186 "stp x6, x7, [sp, #48]\n\t"
Bilyan Borisov970e1912016-02-10 11:51:17 +0000187 // To be extra defensive, store x20. We do this because some of the stubs might make a
188 // transition into the runtime via the blr instruction below and *not* save x20.
189 "str x20, [sp, #64]\n\t"
190 // 8 byte buffer
Andreas Gampe51f76352014-05-21 08:28:48 -0700191
Andreas Gampef39b3782014-06-03 14:38:30 -0700192 "sub sp, sp, #16\n\t" // Reserve stack space, 16B aligned
193 ".cfi_adjust_cfa_offset 16\n\t"
194 "str %[referrer], [sp]\n\t" // referrer
Andreas Gampe51f76352014-05-21 08:28:48 -0700195
196 // Push everything on the stack, so we don't rely on the order. What a mess. :-(
197 "sub sp, sp, #48\n\t"
198 ".cfi_adjust_cfa_offset 48\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700199 // All things are "r" constraints, so direct str/stp should work.
200 "stp %[arg0], %[arg1], [sp]\n\t"
201 "stp %[arg2], %[code], [sp, #16]\n\t"
202 "stp %[self], %[hidden], [sp, #32]\n\t"
Andreas Gampe51f76352014-05-21 08:28:48 -0700203
204 // Now we definitely have x0-x3 free, use it to garble d8 - d15
205 "movk x0, #0xfad0\n\t"
206 "movk x0, #0xebad, lsl #16\n\t"
207 "movk x0, #0xfad0, lsl #32\n\t"
208 "movk x0, #0xebad, lsl #48\n\t"
209 "fmov d8, x0\n\t"
210 "add x0, x0, 1\n\t"
211 "fmov d9, x0\n\t"
212 "add x0, x0, 1\n\t"
213 "fmov d10, x0\n\t"
214 "add x0, x0, 1\n\t"
215 "fmov d11, x0\n\t"
216 "add x0, x0, 1\n\t"
217 "fmov d12, x0\n\t"
218 "add x0, x0, 1\n\t"
219 "fmov d13, x0\n\t"
220 "add x0, x0, 1\n\t"
221 "fmov d14, x0\n\t"
222 "add x0, x0, 1\n\t"
223 "fmov d15, x0\n\t"
224
Andreas Gampef39b3782014-06-03 14:38:30 -0700225 // Load call params into the right registers.
226 "ldp x0, x1, [sp]\n\t"
227 "ldp x2, x3, [sp, #16]\n\t"
Serban Constantinescu9bd88b02015-04-22 16:24:46 +0100228 "ldp x19, x17, [sp, #32]\n\t"
Andreas Gampe51f76352014-05-21 08:28:48 -0700229 "add sp, sp, #48\n\t"
230 ".cfi_adjust_cfa_offset -48\n\t"
231
Andreas Gampe51f76352014-05-21 08:28:48 -0700232 "blr x3\n\t" // Call the stub
Andreas Gampef39b3782014-06-03 14:38:30 -0700233 "mov x8, x0\n\t" // Store result
234 "add sp, sp, #16\n\t" // Drop the quick "frame"
235 ".cfi_adjust_cfa_offset -16\n\t"
Andreas Gampe51f76352014-05-21 08:28:48 -0700236
237 // Test d8 - d15. We can use x1 and x2.
238 "movk x1, #0xfad0\n\t"
239 "movk x1, #0xebad, lsl #16\n\t"
240 "movk x1, #0xfad0, lsl #32\n\t"
241 "movk x1, #0xebad, lsl #48\n\t"
242 "fmov x2, d8\n\t"
243 "cmp x1, x2\n\t"
244 "b.ne 1f\n\t"
245 "add x1, x1, 1\n\t"
246
247 "fmov x2, d9\n\t"
248 "cmp x1, x2\n\t"
249 "b.ne 1f\n\t"
250 "add x1, x1, 1\n\t"
251
252 "fmov x2, d10\n\t"
253 "cmp x1, x2\n\t"
254 "b.ne 1f\n\t"
255 "add x1, x1, 1\n\t"
256
257 "fmov x2, d11\n\t"
258 "cmp x1, x2\n\t"
259 "b.ne 1f\n\t"
260 "add x1, x1, 1\n\t"
261
262 "fmov x2, d12\n\t"
263 "cmp x1, x2\n\t"
264 "b.ne 1f\n\t"
265 "add x1, x1, 1\n\t"
266
267 "fmov x2, d13\n\t"
268 "cmp x1, x2\n\t"
269 "b.ne 1f\n\t"
270 "add x1, x1, 1\n\t"
271
272 "fmov x2, d14\n\t"
273 "cmp x1, x2\n\t"
274 "b.ne 1f\n\t"
275 "add x1, x1, 1\n\t"
276
277 "fmov x2, d15\n\t"
278 "cmp x1, x2\n\t"
279 "b.ne 1f\n\t"
280
Andreas Gampef39b3782014-06-03 14:38:30 -0700281 "mov x9, #0\n\t" // Use x9 as flag, in clobber list
Andreas Gampe51f76352014-05-21 08:28:48 -0700282
283 // Finish up.
284 "2:\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700285 "ldp x0, x1, [sp]\n\t" // Restore stuff not named clobbered, may contain fpr_result
286 "ldp x2, x3, [sp, #16]\n\t"
287 "ldp x4, x5, [sp, #32]\n\t"
288 "ldp x6, x7, [sp, #48]\n\t"
Bilyan Borisov970e1912016-02-10 11:51:17 +0000289 "ldr x20, [sp, #64]\n\t"
290 "add sp, sp, #80\n\t" // Free stack space, now sp as on entry
291 ".cfi_adjust_cfa_offset -80\n\t"
Andreas Gampe51f76352014-05-21 08:28:48 -0700292
Andreas Gampef39b3782014-06-03 14:38:30 -0700293 "str x9, %[fpr_result]\n\t" // Store the FPR comparison result
294 "mov %[result], x8\n\t" // Store the call result
295
Andreas Gampe51f76352014-05-21 08:28:48 -0700296 "b 3f\n\t" // Goto end
297
298 // Failed fpr verification.
299 "1:\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700300 "mov x9, #1\n\t"
Andreas Gampe51f76352014-05-21 08:28:48 -0700301 "b 2b\n\t" // Goto finish-up
302
303 // End
304 "3:\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700305 : [result] "=r" (result)
306 // Use the result from r0
Andreas Gampe51f76352014-05-21 08:28:48 -0700307 : [arg0] "0"(arg0), [arg1] "r"(arg1), [arg2] "r"(arg2), [code] "r"(code), [self] "r"(self),
Andreas Gampef39b3782014-06-03 14:38:30 -0700308 [referrer] "r"(referrer), [hidden] "r"(hidden), [fpr_result] "m" (fpr_result)
Bilyan Borisov970e1912016-02-10 11:51:17 +0000309 // Leave one register unclobbered, which is needed for compiling with
310 // -fstack-protector-strong. According to AAPCS64 registers x9-x15 are caller-saved,
311 // which means we should unclobber one of the callee-saved registers that are unused.
312 // Here we use x20.
313 : "x8", "x9", "x10", "x11", "x12", "x13", "x14", "x15", "x16", "x17", "x18", "x19",
Andreas Gampef39b3782014-06-03 14:38:30 -0700314 "x21", "x22", "x23", "x24", "x25", "x26", "x27", "x28", "x30",
315 "d0", "d1", "d2", "d3", "d4", "d5", "d6", "d7",
316 "d8", "d9", "d10", "d11", "d12", "d13", "d14", "d15",
317 "d16", "d17", "d18", "d19", "d20", "d21", "d22", "d23",
Andreas Gampe2f6e3512014-06-07 01:32:33 -0700318 "d24", "d25", "d26", "d27", "d28", "d29", "d30", "d31",
Bilyan Borisov970e1912016-02-10 11:51:17 +0000319 "memory");
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200320#elif defined(__mips__) && !defined(__LP64__)
321 __asm__ __volatile__ (
322 // Spill a0-a3 and t0-t7 which we say we don't clobber. May contain args.
323 "addiu $sp, $sp, -64\n\t"
324 "sw $a0, 0($sp)\n\t"
325 "sw $a1, 4($sp)\n\t"
326 "sw $a2, 8($sp)\n\t"
327 "sw $a3, 12($sp)\n\t"
328 "sw $t0, 16($sp)\n\t"
329 "sw $t1, 20($sp)\n\t"
330 "sw $t2, 24($sp)\n\t"
331 "sw $t3, 28($sp)\n\t"
332 "sw $t4, 32($sp)\n\t"
333 "sw $t5, 36($sp)\n\t"
334 "sw $t6, 40($sp)\n\t"
335 "sw $t7, 44($sp)\n\t"
336 // Spill gp register since it is caller save.
337 "sw $gp, 52($sp)\n\t"
338
339 "addiu $sp, $sp, -16\n\t" // Reserve stack space, 16B aligned.
340 "sw %[referrer], 0($sp)\n\t"
341
342 // Push everything on the stack, so we don't rely on the order.
343 "addiu $sp, $sp, -24\n\t"
344 "sw %[arg0], 0($sp)\n\t"
345 "sw %[arg1], 4($sp)\n\t"
346 "sw %[arg2], 8($sp)\n\t"
347 "sw %[code], 12($sp)\n\t"
348 "sw %[self], 16($sp)\n\t"
349 "sw %[hidden], 20($sp)\n\t"
350
351 // Load call params into the right registers.
352 "lw $a0, 0($sp)\n\t"
353 "lw $a1, 4($sp)\n\t"
354 "lw $a2, 8($sp)\n\t"
355 "lw $t9, 12($sp)\n\t"
356 "lw $s1, 16($sp)\n\t"
357 "lw $t0, 20($sp)\n\t"
358 "addiu $sp, $sp, 24\n\t"
359
360 "jalr $t9\n\t" // Call the stub.
361 "nop\n\t"
362 "addiu $sp, $sp, 16\n\t" // Drop the quick "frame".
363
364 // Restore stuff not named clobbered.
365 "lw $a0, 0($sp)\n\t"
366 "lw $a1, 4($sp)\n\t"
367 "lw $a2, 8($sp)\n\t"
368 "lw $a3, 12($sp)\n\t"
369 "lw $t0, 16($sp)\n\t"
370 "lw $t1, 20($sp)\n\t"
371 "lw $t2, 24($sp)\n\t"
372 "lw $t3, 28($sp)\n\t"
373 "lw $t4, 32($sp)\n\t"
374 "lw $t5, 36($sp)\n\t"
375 "lw $t6, 40($sp)\n\t"
376 "lw $t7, 44($sp)\n\t"
377 // Restore gp.
378 "lw $gp, 52($sp)\n\t"
379 "addiu $sp, $sp, 64\n\t" // Free stack space, now sp as on entry.
380
381 "move %[result], $v0\n\t" // Store the call result.
382 : [result] "=r" (result)
383 : [arg0] "r"(arg0), [arg1] "r"(arg1), [arg2] "r"(arg2), [code] "r"(code), [self] "r"(self),
384 [referrer] "r"(referrer), [hidden] "r"(hidden)
385 : "at", "v0", "v1", "s0", "s1", "s2", "s3", "s4", "s5", "s6", "s7", "t8", "t9", "k0", "k1",
386 "fp", "ra",
Nicolas Geoffrayc5b4b322015-09-15 16:36:50 +0100387 "$f0", "$f1", "$f2", "$f3", "$f4", "$f5", "$f6", "$f7", "$f8", "$f9", "$f10", "$f11",
388 "$f12", "$f13", "$f14", "$f15", "$f16", "$f17", "$f18", "$f19", "$f20", "$f21", "$f22",
389 "$f23", "$f24", "$f25", "$f26", "$f27", "$f28", "$f29", "$f30", "$f31",
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200390 "memory"); // clobber.
391#elif defined(__mips__) && defined(__LP64__)
392 __asm__ __volatile__ (
Goran Jakovljevic745f3cd2016-01-15 14:08:47 +0100393 // Spill a0-a7 which we say we don't clobber. May contain args.
394 "daddiu $sp, $sp, -64\n\t"
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200395 "sd $a0, 0($sp)\n\t"
396 "sd $a1, 8($sp)\n\t"
397 "sd $a2, 16($sp)\n\t"
398 "sd $a3, 24($sp)\n\t"
399 "sd $a4, 32($sp)\n\t"
400 "sd $a5, 40($sp)\n\t"
401 "sd $a6, 48($sp)\n\t"
402 "sd $a7, 56($sp)\n\t"
403
404 "daddiu $sp, $sp, -16\n\t" // Reserve stack space, 16B aligned.
405 "sd %[referrer], 0($sp)\n\t"
406
407 // Push everything on the stack, so we don't rely on the order.
408 "daddiu $sp, $sp, -48\n\t"
409 "sd %[arg0], 0($sp)\n\t"
410 "sd %[arg1], 8($sp)\n\t"
411 "sd %[arg2], 16($sp)\n\t"
412 "sd %[code], 24($sp)\n\t"
413 "sd %[self], 32($sp)\n\t"
414 "sd %[hidden], 40($sp)\n\t"
415
416 // Load call params into the right registers.
417 "ld $a0, 0($sp)\n\t"
418 "ld $a1, 8($sp)\n\t"
419 "ld $a2, 16($sp)\n\t"
420 "ld $t9, 24($sp)\n\t"
421 "ld $s1, 32($sp)\n\t"
422 "ld $t0, 40($sp)\n\t"
423 "daddiu $sp, $sp, 48\n\t"
424
425 "jalr $t9\n\t" // Call the stub.
426 "nop\n\t"
427 "daddiu $sp, $sp, 16\n\t" // Drop the quick "frame".
428
429 // Restore stuff not named clobbered.
430 "ld $a0, 0($sp)\n\t"
431 "ld $a1, 8($sp)\n\t"
432 "ld $a2, 16($sp)\n\t"
433 "ld $a3, 24($sp)\n\t"
434 "ld $a4, 32($sp)\n\t"
435 "ld $a5, 40($sp)\n\t"
436 "ld $a6, 48($sp)\n\t"
437 "ld $a7, 56($sp)\n\t"
Goran Jakovljevic745f3cd2016-01-15 14:08:47 +0100438 "daddiu $sp, $sp, 64\n\t"
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200439
440 "move %[result], $v0\n\t" // Store the call result.
441 : [result] "=r" (result)
442 : [arg0] "r"(arg0), [arg1] "r"(arg1), [arg2] "r"(arg2), [code] "r"(code), [self] "r"(self),
443 [referrer] "r"(referrer), [hidden] "r"(hidden)
Goran Jakovljevic745f3cd2016-01-15 14:08:47 +0100444 // Instead aliases t0-t3, register names $12-$15 has been used in the clobber list because
445 // t0-t3 are ambiguous.
446 : "at", "v0", "v1", "$12", "$13", "$14", "$15", "s0", "s1", "s2", "s3", "s4", "s5", "s6",
447 "s7", "t8", "t9", "k0", "k1", "fp", "ra",
Goran Jakovljevic4d44e532015-11-27 11:20:20 +0100448 "$f0", "$f1", "$f2", "$f3", "$f4", "$f5", "$f6", "$f7", "$f8", "$f9", "$f10", "$f11",
449 "$f12", "$f13", "$f14", "$f15", "$f16", "$f17", "$f18", "$f19", "$f20", "$f21", "$f22",
450 "$f23", "$f24", "$f25", "$f26", "$f27", "$f28", "$f29", "$f30", "$f31",
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200451 "memory"); // clobber.
Andreas Gampe9537ba22015-10-12 14:29:38 -0700452#elif defined(__x86_64__) && !defined(__APPLE__)
453#define PUSH(reg) "pushq " # reg "\n\t .cfi_adjust_cfa_offset 8\n\t"
454#define POP(reg) "popq " # reg "\n\t .cfi_adjust_cfa_offset -8\n\t"
455 // Note: Uses the native convention. We do a callee-save regimen by manually spilling and
456 // restoring almost all registers.
Andreas Gampe51f76352014-05-21 08:28:48 -0700457 // TODO: Set the thread?
458 __asm__ __volatile__(
Andreas Gampe9537ba22015-10-12 14:29:38 -0700459 // Spill almost everything (except rax, rsp). 14 registers.
460 PUSH(%%rbx)
461 PUSH(%%rcx)
462 PUSH(%%rdx)
463 PUSH(%%rsi)
464 PUSH(%%rdi)
465 PUSH(%%rbp)
466 PUSH(%%r8)
467 PUSH(%%r9)
468 PUSH(%%r10)
469 PUSH(%%r11)
470 PUSH(%%r12)
471 PUSH(%%r13)
472 PUSH(%%r14)
473 PUSH(%%r15)
474
475 PUSH(%[referrer]) // Push referrer & 16B alignment padding
476 PUSH(%[referrer])
477
478 // Now juggle the input registers.
479 PUSH(%[arg0])
480 PUSH(%[arg1])
481 PUSH(%[arg2])
482 PUSH(%[hidden])
483 PUSH(%[code])
484 POP(%%r8)
485 POP(%%rax)
486 POP(%%rdx)
487 POP(%%rsi)
488 POP(%%rdi)
489
490 "call *%%r8\n\t" // Call the stub
491 "addq $16, %%rsp\n\t" // Pop null and padding
Andreas Gampe51f76352014-05-21 08:28:48 -0700492 ".cfi_adjust_cfa_offset -16\n\t"
Andreas Gampe9537ba22015-10-12 14:29:38 -0700493
494 POP(%%r15)
495 POP(%%r14)
496 POP(%%r13)
497 POP(%%r12)
498 POP(%%r11)
499 POP(%%r10)
500 POP(%%r9)
501 POP(%%r8)
502 POP(%%rbp)
503 POP(%%rdi)
504 POP(%%rsi)
505 POP(%%rdx)
506 POP(%%rcx)
507 POP(%%rbx)
508
Andreas Gampe51f76352014-05-21 08:28:48 -0700509 : "=a" (result)
510 // Use the result from rax
Andreas Gampe9537ba22015-10-12 14:29:38 -0700511 : [arg0] "r"(arg0), [arg1] "r"(arg1), [arg2] "r"(arg2), [code] "r"(code),
512 [referrer] "r"(referrer), [hidden] "r"(hidden)
513 // This places arg0 into rdi, arg1 into rsi, arg2 into rdx, and code into some other
514 // register. We can't use "b" (rbx), as ASAN uses this for the frame pointer.
515 : "memory"); // We spill and restore (almost) all registers, so only mention memory here.
516#undef PUSH
517#undef POP
Andreas Gampe51f76352014-05-21 08:28:48 -0700518#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -0800519 UNUSED(arg0, arg1, arg2, code, referrer, hidden);
Andreas Gampe51f76352014-05-21 08:28:48 -0700520 LOG(WARNING) << "Was asked to invoke for an architecture I do not understand.";
521 result = 0;
522#endif
523 // Pop transition.
524 self->PopManagedStackFragment(fragment);
525
526 fp_result = fpr_result;
527 EXPECT_EQ(0U, fp_result);
528
529 return result;
530 }
531
Andreas Gampe29b38412014-08-13 00:15:43 -0700532 static uintptr_t GetEntrypoint(Thread* self, QuickEntrypointEnum entrypoint) {
533 int32_t offset;
Andreas Gampe542451c2016-07-26 09:02:02 -0700534 offset = GetThreadOffset<kRuntimePointerSize>(entrypoint).Int32Value();
Andreas Gampe29b38412014-08-13 00:15:43 -0700535 return *reinterpret_cast<uintptr_t*>(reinterpret_cast<uint8_t*>(self) + offset);
536 }
537
Andreas Gampe6cf80102014-05-19 11:32:41 -0700538 protected:
539 size_t fp_result;
Andreas Gampe525cde22014-04-22 15:44:50 -0700540};
541
542
Andreas Gampe525cde22014-04-22 15:44:50 -0700543TEST_F(StubTest, Memcpy) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200544#if defined(__i386__) || (defined(__x86_64__) && !defined(__APPLE__)) || defined(__mips__)
Andreas Gampe525cde22014-04-22 15:44:50 -0700545 Thread* self = Thread::Current();
546
547 uint32_t orig[20];
548 uint32_t trg[20];
549 for (size_t i = 0; i < 20; ++i) {
550 orig[i] = i;
551 trg[i] = 0;
552 }
553
554 Invoke3(reinterpret_cast<size_t>(&trg[4]), reinterpret_cast<size_t>(&orig[4]),
Andreas Gampe29b38412014-08-13 00:15:43 -0700555 10 * sizeof(uint32_t), StubTest::GetEntrypoint(self, kQuickMemcpy), self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700556
557 EXPECT_EQ(orig[0], trg[0]);
558
559 for (size_t i = 1; i < 4; ++i) {
560 EXPECT_NE(orig[i], trg[i]);
561 }
562
563 for (size_t i = 4; i < 14; ++i) {
564 EXPECT_EQ(orig[i], trg[i]);
565 }
566
567 for (size_t i = 14; i < 20; ++i) {
568 EXPECT_NE(orig[i], trg[i]);
569 }
570
571 // TODO: Test overlapping?
572
573#else
574 LOG(INFO) << "Skipping memcpy as I don't know how to do that on " << kRuntimeISA;
575 // Force-print to std::cout so it's also outside the logcat.
576 std::cout << "Skipping memcpy as I don't know how to do that on " << kRuntimeISA << std::endl;
577#endif
578}
579
Andreas Gampe525cde22014-04-22 15:44:50 -0700580TEST_F(StubTest, LockObject) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200581#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
582 (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -0700583 static constexpr size_t kThinLockLoops = 100;
584
Andreas Gampe525cde22014-04-22 15:44:50 -0700585 Thread* self = Thread::Current();
Andreas Gampe29b38412014-08-13 00:15:43 -0700586
587 const uintptr_t art_quick_lock_object = StubTest::GetEntrypoint(self, kQuickLockObject);
588
Andreas Gampe525cde22014-04-22 15:44:50 -0700589 // Create an object
590 ScopedObjectAccess soa(self);
591 // garbage is created during ClassLinker::Init
592
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700593 StackHandleScope<2> hs(soa.Self());
594 Handle<mirror::String> obj(
595 hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), "hello, world!")));
Andreas Gampe525cde22014-04-22 15:44:50 -0700596 LockWord lock = obj->GetLockWord(false);
597 LockWord::LockState old_state = lock.GetState();
598 EXPECT_EQ(LockWord::LockState::kUnlocked, old_state);
599
Andreas Gampe29b38412014-08-13 00:15:43 -0700600 Invoke3(reinterpret_cast<size_t>(obj.Get()), 0U, 0U, art_quick_lock_object, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700601
602 LockWord lock_after = obj->GetLockWord(false);
603 LockWord::LockState new_state = lock_after.GetState();
604 EXPECT_EQ(LockWord::LockState::kThinLocked, new_state);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700605 EXPECT_EQ(lock_after.ThinLockCount(), 0U); // Thin lock starts count at zero
606
607 for (size_t i = 1; i < kThinLockLoops; ++i) {
Andreas Gampe29b38412014-08-13 00:15:43 -0700608 Invoke3(reinterpret_cast<size_t>(obj.Get()), 0U, 0U, art_quick_lock_object, self);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700609
610 // Check we're at lock count i
611
612 LockWord l_inc = obj->GetLockWord(false);
613 LockWord::LockState l_inc_state = l_inc.GetState();
614 EXPECT_EQ(LockWord::LockState::kThinLocked, l_inc_state);
615 EXPECT_EQ(l_inc.ThinLockCount(), i);
616 }
617
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700618 // Force a fat lock by running identity hashcode to fill up lock word.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700619 Handle<mirror::String> obj2(hs.NewHandle(
620 mirror::String::AllocFromModifiedUtf8(soa.Self(), "hello, world!")));
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700621
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700622 obj2->IdentityHashCode();
623
Andreas Gampe29b38412014-08-13 00:15:43 -0700624 Invoke3(reinterpret_cast<size_t>(obj2.Get()), 0U, 0U, art_quick_lock_object, self);
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700625
626 LockWord lock_after2 = obj2->GetLockWord(false);
627 LockWord::LockState new_state2 = lock_after2.GetState();
628 EXPECT_EQ(LockWord::LockState::kFatLocked, new_state2);
629 EXPECT_NE(lock_after2.FatLockMonitor(), static_cast<Monitor*>(nullptr));
630
631 // Test done.
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700632#else
633 LOG(INFO) << "Skipping lock_object as I don't know how to do that on " << kRuntimeISA;
634 // Force-print to std::cout so it's also outside the logcat.
635 std::cout << "Skipping lock_object as I don't know how to do that on " << kRuntimeISA << std::endl;
636#endif
637}
638
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700639
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700640class RandGen {
641 public:
642 explicit RandGen(uint32_t seed) : val_(seed) {}
643
644 uint32_t next() {
645 val_ = val_ * 48271 % 2147483647 + 13;
646 return val_;
647 }
648
649 uint32_t val_;
650};
651
652
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700653// NO_THREAD_SAFETY_ANALYSIS as we do not want to grab exclusive mutator lock for MonitorInfo.
654static void TestUnlockObject(StubTest* test) NO_THREAD_SAFETY_ANALYSIS {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200655#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
656 (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -0700657 static constexpr size_t kThinLockLoops = 100;
658
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700659 Thread* self = Thread::Current();
Andreas Gampe29b38412014-08-13 00:15:43 -0700660
661 const uintptr_t art_quick_lock_object = StubTest::GetEntrypoint(self, kQuickLockObject);
662 const uintptr_t art_quick_unlock_object = StubTest::GetEntrypoint(self, kQuickUnlockObject);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700663 // Create an object
664 ScopedObjectAccess soa(self);
665 // garbage is created during ClassLinker::Init
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700666 static constexpr size_t kNumberOfLocks = 10; // Number of objects = lock
667 StackHandleScope<kNumberOfLocks + 1> hs(self);
668 Handle<mirror::String> obj(
669 hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), "hello, world!")));
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700670 LockWord lock = obj->GetLockWord(false);
671 LockWord::LockState old_state = lock.GetState();
672 EXPECT_EQ(LockWord::LockState::kUnlocked, old_state);
673
Andreas Gampe29b38412014-08-13 00:15:43 -0700674 test->Invoke3(reinterpret_cast<size_t>(obj.Get()), 0U, 0U, art_quick_unlock_object, self);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700675 // This should be an illegal monitor state.
676 EXPECT_TRUE(self->IsExceptionPending());
677 self->ClearException();
678
679 LockWord lock_after = obj->GetLockWord(false);
680 LockWord::LockState new_state = lock_after.GetState();
681 EXPECT_EQ(LockWord::LockState::kUnlocked, new_state);
Andreas Gampe525cde22014-04-22 15:44:50 -0700682
Andreas Gampe29b38412014-08-13 00:15:43 -0700683 test->Invoke3(reinterpret_cast<size_t>(obj.Get()), 0U, 0U, art_quick_lock_object, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700684
685 LockWord lock_after2 = obj->GetLockWord(false);
686 LockWord::LockState new_state2 = lock_after2.GetState();
687 EXPECT_EQ(LockWord::LockState::kThinLocked, new_state2);
688
Andreas Gampe29b38412014-08-13 00:15:43 -0700689 test->Invoke3(reinterpret_cast<size_t>(obj.Get()), 0U, 0U, art_quick_unlock_object, self);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700690
691 LockWord lock_after3 = obj->GetLockWord(false);
692 LockWord::LockState new_state3 = lock_after3.GetState();
693 EXPECT_EQ(LockWord::LockState::kUnlocked, new_state3);
694
695 // Stress test:
696 // Keep a number of objects and their locks in flight. Randomly lock or unlock one of them in
697 // each step.
698
699 RandGen r(0x1234);
700
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700701 constexpr size_t kIterations = 10000; // Number of iterations
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700702 constexpr size_t kMoveToFat = 1000; // Chance of 1:kMoveFat to make a lock fat.
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700703
704 size_t counts[kNumberOfLocks];
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700705 bool fat[kNumberOfLocks]; // Whether a lock should be thin or fat.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700706 Handle<mirror::String> objects[kNumberOfLocks];
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700707
708 // Initialize = allocate.
709 for (size_t i = 0; i < kNumberOfLocks; ++i) {
710 counts[i] = 0;
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700711 fat[i] = false;
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700712 objects[i] = hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), ""));
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700713 }
714
715 for (size_t i = 0; i < kIterations; ++i) {
716 // Select which lock to update.
717 size_t index = r.next() % kNumberOfLocks;
718
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700719 // Make lock fat?
720 if (!fat[index] && (r.next() % kMoveToFat == 0)) {
721 fat[index] = true;
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700722 objects[index]->IdentityHashCode();
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700723
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700724 LockWord lock_iter = objects[index]->GetLockWord(false);
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700725 LockWord::LockState iter_state = lock_iter.GetState();
726 if (counts[index] == 0) {
727 EXPECT_EQ(LockWord::LockState::kHashCode, iter_state);
728 } else {
729 EXPECT_EQ(LockWord::LockState::kFatLocked, iter_state);
730 }
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700731 } else {
Andreas Gampe277ccbd2014-11-03 21:36:10 -0800732 bool take_lock; // Whether to lock or unlock in this step.
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700733 if (counts[index] == 0) {
Andreas Gampe277ccbd2014-11-03 21:36:10 -0800734 take_lock = true;
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700735 } else if (counts[index] == kThinLockLoops) {
Andreas Gampe277ccbd2014-11-03 21:36:10 -0800736 take_lock = false;
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700737 } else {
738 // Randomly.
Andreas Gampe277ccbd2014-11-03 21:36:10 -0800739 take_lock = r.next() % 2 == 0;
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700740 }
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700741
Andreas Gampe277ccbd2014-11-03 21:36:10 -0800742 if (take_lock) {
Andreas Gampe29b38412014-08-13 00:15:43 -0700743 test->Invoke3(reinterpret_cast<size_t>(objects[index].Get()), 0U, 0U, art_quick_lock_object,
744 self);
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700745 counts[index]++;
746 } else {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700747 test->Invoke3(reinterpret_cast<size_t>(objects[index].Get()), 0U, 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -0700748 art_quick_unlock_object, self);
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700749 counts[index]--;
750 }
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700751
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700752 EXPECT_FALSE(self->IsExceptionPending());
753
754 // Check the new state.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700755 LockWord lock_iter = objects[index]->GetLockWord(true);
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700756 LockWord::LockState iter_state = lock_iter.GetState();
757 if (fat[index]) {
758 // Abuse MonitorInfo.
759 EXPECT_EQ(LockWord::LockState::kFatLocked, iter_state) << index;
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700760 MonitorInfo info(objects[index].Get());
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700761 EXPECT_EQ(counts[index], info.entry_count_) << index;
762 } else {
763 if (counts[index] > 0) {
764 EXPECT_EQ(LockWord::LockState::kThinLocked, iter_state);
765 EXPECT_EQ(counts[index] - 1, lock_iter.ThinLockCount());
766 } else {
767 EXPECT_EQ(LockWord::LockState::kUnlocked, iter_state);
768 }
769 }
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700770 }
771 }
772
773 // Unlock the remaining count times and then check it's unlocked. Then deallocate.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700774 // Go reverse order to correctly handle Handles.
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700775 for (size_t i = 0; i < kNumberOfLocks; ++i) {
776 size_t index = kNumberOfLocks - 1 - i;
777 size_t count = counts[index];
778 while (count > 0) {
Andreas Gampe29b38412014-08-13 00:15:43 -0700779 test->Invoke3(reinterpret_cast<size_t>(objects[index].Get()), 0U, 0U, art_quick_unlock_object,
780 self);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700781 count--;
782 }
783
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700784 LockWord lock_after4 = objects[index]->GetLockWord(false);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700785 LockWord::LockState new_state4 = lock_after4.GetState();
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700786 EXPECT_TRUE(LockWord::LockState::kUnlocked == new_state4
787 || LockWord::LockState::kFatLocked == new_state4);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700788 }
789
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700790 // Test done.
Andreas Gampe525cde22014-04-22 15:44:50 -0700791#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -0800792 UNUSED(test);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700793 LOG(INFO) << "Skipping unlock_object as I don't know how to do that on " << kRuntimeISA;
Andreas Gampe525cde22014-04-22 15:44:50 -0700794 // Force-print to std::cout so it's also outside the logcat.
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700795 std::cout << "Skipping unlock_object as I don't know how to do that on " << kRuntimeISA << std::endl;
Andreas Gampe525cde22014-04-22 15:44:50 -0700796#endif
797}
798
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700799TEST_F(StubTest, UnlockObject) {
Andreas Gampe369810a2015-01-14 19:53:31 -0800800 // This will lead to monitor error messages in the log.
801 ScopedLogSeverity sls(LogSeverity::FATAL);
802
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700803 TestUnlockObject(this);
804}
Andreas Gampe525cde22014-04-22 15:44:50 -0700805
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200806#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
807 (defined(__x86_64__) && !defined(__APPLE__))
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800808extern "C" void art_quick_check_instance_of(void);
Andreas Gampe525cde22014-04-22 15:44:50 -0700809#endif
810
811TEST_F(StubTest, CheckCast) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200812#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
813 (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe525cde22014-04-22 15:44:50 -0700814 Thread* self = Thread::Current();
Andreas Gampe29b38412014-08-13 00:15:43 -0700815
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800816 const uintptr_t art_quick_check_instance_of =
817 StubTest::GetEntrypoint(self, kQuickCheckInstanceOf);
Andreas Gampe29b38412014-08-13 00:15:43 -0700818
Andreas Gampe525cde22014-04-22 15:44:50 -0700819 // Find some classes.
820 ScopedObjectAccess soa(self);
821 // garbage is created during ClassLinker::Init
822
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800823 VariableSizedHandleScope hs(soa.Self());
824 Handle<mirror::Class> klass_obj(
825 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "Ljava/lang/Object;")));
826 Handle<mirror::Class> klass_str(
827 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "Ljava/lang/String;")));
828 Handle<mirror::Class> klass_list(
829 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "Ljava/util/List;")));
830 Handle<mirror::Class> klass_cloneable(
831 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "Ljava/lang/Cloneable;")));
832 Handle<mirror::Class> klass_array_list(
833 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "Ljava/util/ArrayList;")));
834 Handle<mirror::Object> obj(hs.NewHandle(klass_obj->AllocObject(soa.Self())));
835 Handle<mirror::String> string(hs.NewHandle(
836 mirror::String::AllocFromModifiedUtf8(soa.Self(), "ABCD")));
837 Handle<mirror::Object> array_list(hs.NewHandle(klass_array_list->AllocObject(soa.Self())));
Andreas Gampe525cde22014-04-22 15:44:50 -0700838
839 EXPECT_FALSE(self->IsExceptionPending());
840
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800841 Invoke3(reinterpret_cast<size_t>(obj.Get()),
842 reinterpret_cast<size_t>(klass_obj.Get()),
Mathieu Chartier2ecfd272016-11-01 10:10:05 -0700843 0U,
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800844 art_quick_check_instance_of,
Mathieu Chartier2ecfd272016-11-01 10:10:05 -0700845 self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700846 EXPECT_FALSE(self->IsExceptionPending());
847
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800848 // Expected true: Test string instance of java.lang.String.
849 Invoke3(reinterpret_cast<size_t>(string.Get()),
850 reinterpret_cast<size_t>(klass_str.Get()),
Mathieu Chartier2ecfd272016-11-01 10:10:05 -0700851 0U,
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800852 art_quick_check_instance_of,
Mathieu Chartier2ecfd272016-11-01 10:10:05 -0700853 self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700854 EXPECT_FALSE(self->IsExceptionPending());
855
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800856 // Expected true: Test string instance of java.lang.Object.
857 Invoke3(reinterpret_cast<size_t>(string.Get()),
858 reinterpret_cast<size_t>(klass_obj.Get()),
Mathieu Chartier2ecfd272016-11-01 10:10:05 -0700859 0U,
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800860 art_quick_check_instance_of,
Mathieu Chartier2ecfd272016-11-01 10:10:05 -0700861 self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700862 EXPECT_FALSE(self->IsExceptionPending());
863
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800864 // Expected false: Test object instance of java.lang.String.
865 Invoke3(reinterpret_cast<size_t>(obj.Get()),
866 reinterpret_cast<size_t>(klass_str.Get()),
Mathieu Chartier2ecfd272016-11-01 10:10:05 -0700867 0U,
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800868 art_quick_check_instance_of,
Mathieu Chartier2ecfd272016-11-01 10:10:05 -0700869 self);
870 EXPECT_TRUE(self->IsExceptionPending());
871 self->ClearException();
872
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800873 Invoke3(reinterpret_cast<size_t>(array_list.Get()),
874 reinterpret_cast<size_t>(klass_list.Get()),
Mathieu Chartier2ecfd272016-11-01 10:10:05 -0700875 0U,
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800876 art_quick_check_instance_of,
877 self);
878 EXPECT_FALSE(self->IsExceptionPending());
879
880 Invoke3(reinterpret_cast<size_t>(array_list.Get()),
881 reinterpret_cast<size_t>(klass_cloneable.Get()),
882 0U,
883 art_quick_check_instance_of,
884 self);
885 EXPECT_FALSE(self->IsExceptionPending());
886
887 Invoke3(reinterpret_cast<size_t>(string.Get()),
888 reinterpret_cast<size_t>(klass_array_list.Get()),
889 0U,
890 art_quick_check_instance_of,
891 self);
892 EXPECT_TRUE(self->IsExceptionPending());
893 self->ClearException();
894
895 Invoke3(reinterpret_cast<size_t>(string.Get()),
896 reinterpret_cast<size_t>(klass_cloneable.Get()),
897 0U,
898 art_quick_check_instance_of,
Mathieu Chartier2ecfd272016-11-01 10:10:05 -0700899 self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700900 EXPECT_TRUE(self->IsExceptionPending());
901 self->ClearException();
902
903#else
904 LOG(INFO) << "Skipping check_cast as I don't know how to do that on " << kRuntimeISA;
905 // Force-print to std::cout so it's also outside the logcat.
906 std::cout << "Skipping check_cast as I don't know how to do that on " << kRuntimeISA << std::endl;
907#endif
908}
909
910
Andreas Gampe525cde22014-04-22 15:44:50 -0700911TEST_F(StubTest, APutObj) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200912#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
913 (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe525cde22014-04-22 15:44:50 -0700914 Thread* self = Thread::Current();
Andreas Gampe29b38412014-08-13 00:15:43 -0700915
916 // Do not check non-checked ones, we'd need handlers and stuff...
917 const uintptr_t art_quick_aput_obj_with_null_and_bound_check =
918 StubTest::GetEntrypoint(self, kQuickAputObjectWithNullAndBoundCheck);
919
Andreas Gampe525cde22014-04-22 15:44:50 -0700920 // Create an object
921 ScopedObjectAccess soa(self);
922 // garbage is created during ClassLinker::Init
923
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700924 StackHandleScope<5> hs(soa.Self());
925 Handle<mirror::Class> c(
926 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "Ljava/lang/Object;")));
927 Handle<mirror::Class> ca(
928 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "[Ljava/lang/String;")));
Andreas Gampe525cde22014-04-22 15:44:50 -0700929
930 // Build a string array of size 1
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700931 Handle<mirror::ObjectArray<mirror::Object>> array(
932 hs.NewHandle(mirror::ObjectArray<mirror::Object>::Alloc(soa.Self(), ca.Get(), 10)));
Andreas Gampe525cde22014-04-22 15:44:50 -0700933
934 // Build a string -> should be assignable
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700935 Handle<mirror::String> str_obj(
936 hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), "hello, world!")));
Andreas Gampe525cde22014-04-22 15:44:50 -0700937
938 // Build a generic object -> should fail assigning
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700939 Handle<mirror::Object> obj_obj(hs.NewHandle(c->AllocObject(soa.Self())));
Andreas Gampe525cde22014-04-22 15:44:50 -0700940
941 // Play with it...
942
943 // 1) Success cases
Andreas Gampef4e910b2014-04-29 16:55:52 -0700944 // 1.1) Assign str_obj to array[0..3]
Andreas Gampe525cde22014-04-22 15:44:50 -0700945
946 EXPECT_FALSE(self->IsExceptionPending());
947
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700948 Invoke3(reinterpret_cast<size_t>(array.Get()), 0U, reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampe29b38412014-08-13 00:15:43 -0700949 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700950
951 EXPECT_FALSE(self->IsExceptionPending());
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700952 EXPECT_EQ(str_obj.Get(), array->Get(0));
Andreas Gampe525cde22014-04-22 15:44:50 -0700953
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700954 Invoke3(reinterpret_cast<size_t>(array.Get()), 1U, reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampe29b38412014-08-13 00:15:43 -0700955 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampef4e910b2014-04-29 16:55:52 -0700956
957 EXPECT_FALSE(self->IsExceptionPending());
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700958 EXPECT_EQ(str_obj.Get(), array->Get(1));
Andreas Gampef4e910b2014-04-29 16:55:52 -0700959
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700960 Invoke3(reinterpret_cast<size_t>(array.Get()), 2U, reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampe29b38412014-08-13 00:15:43 -0700961 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampef4e910b2014-04-29 16:55:52 -0700962
963 EXPECT_FALSE(self->IsExceptionPending());
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700964 EXPECT_EQ(str_obj.Get(), array->Get(2));
Andreas Gampef4e910b2014-04-29 16:55:52 -0700965
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700966 Invoke3(reinterpret_cast<size_t>(array.Get()), 3U, reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampe29b38412014-08-13 00:15:43 -0700967 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampef4e910b2014-04-29 16:55:52 -0700968
969 EXPECT_FALSE(self->IsExceptionPending());
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700970 EXPECT_EQ(str_obj.Get(), array->Get(3));
Andreas Gampef4e910b2014-04-29 16:55:52 -0700971
972 // 1.2) Assign null to array[0..3]
Andreas Gampe525cde22014-04-22 15:44:50 -0700973
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700974 Invoke3(reinterpret_cast<size_t>(array.Get()), 0U, reinterpret_cast<size_t>(nullptr),
Andreas Gampe29b38412014-08-13 00:15:43 -0700975 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700976
977 EXPECT_FALSE(self->IsExceptionPending());
Andreas Gampef4e910b2014-04-29 16:55:52 -0700978 EXPECT_EQ(nullptr, array->Get(0));
979
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700980 Invoke3(reinterpret_cast<size_t>(array.Get()), 1U, reinterpret_cast<size_t>(nullptr),
Andreas Gampe29b38412014-08-13 00:15:43 -0700981 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampef4e910b2014-04-29 16:55:52 -0700982
983 EXPECT_FALSE(self->IsExceptionPending());
984 EXPECT_EQ(nullptr, array->Get(1));
985
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700986 Invoke3(reinterpret_cast<size_t>(array.Get()), 2U, reinterpret_cast<size_t>(nullptr),
Andreas Gampe29b38412014-08-13 00:15:43 -0700987 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampef4e910b2014-04-29 16:55:52 -0700988
989 EXPECT_FALSE(self->IsExceptionPending());
990 EXPECT_EQ(nullptr, array->Get(2));
991
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700992 Invoke3(reinterpret_cast<size_t>(array.Get()), 3U, reinterpret_cast<size_t>(nullptr),
Andreas Gampe29b38412014-08-13 00:15:43 -0700993 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampef4e910b2014-04-29 16:55:52 -0700994
995 EXPECT_FALSE(self->IsExceptionPending());
996 EXPECT_EQ(nullptr, array->Get(3));
Andreas Gampe525cde22014-04-22 15:44:50 -0700997
998 // TODO: Check _which_ exception is thrown. Then make 3) check that it's the right check order.
999
1000 // 2) Failure cases (str into str[])
1001 // 2.1) Array = null
1002 // TODO: Throwing NPE needs actual DEX code
1003
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001004// Invoke3(reinterpret_cast<size_t>(nullptr), 0U, reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampe525cde22014-04-22 15:44:50 -07001005// reinterpret_cast<uintptr_t>(&art_quick_aput_obj_with_null_and_bound_check), self);
1006//
1007// EXPECT_TRUE(self->IsExceptionPending());
1008// self->ClearException();
1009
1010 // 2.2) Index < 0
1011
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001012 Invoke3(reinterpret_cast<size_t>(array.Get()), static_cast<size_t>(-1),
1013 reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampe29b38412014-08-13 00:15:43 -07001014 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampe525cde22014-04-22 15:44:50 -07001015
1016 EXPECT_TRUE(self->IsExceptionPending());
1017 self->ClearException();
1018
1019 // 2.3) Index > 0
1020
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001021 Invoke3(reinterpret_cast<size_t>(array.Get()), 10U, reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampe29b38412014-08-13 00:15:43 -07001022 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampe525cde22014-04-22 15:44:50 -07001023
1024 EXPECT_TRUE(self->IsExceptionPending());
1025 self->ClearException();
1026
1027 // 3) Failure cases (obj into str[])
1028
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001029 Invoke3(reinterpret_cast<size_t>(array.Get()), 0U, reinterpret_cast<size_t>(obj_obj.Get()),
Andreas Gampe29b38412014-08-13 00:15:43 -07001030 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampe525cde22014-04-22 15:44:50 -07001031
1032 EXPECT_TRUE(self->IsExceptionPending());
1033 self->ClearException();
1034
1035 // Tests done.
1036#else
1037 LOG(INFO) << "Skipping aput_obj as I don't know how to do that on " << kRuntimeISA;
1038 // Force-print to std::cout so it's also outside the logcat.
1039 std::cout << "Skipping aput_obj as I don't know how to do that on " << kRuntimeISA << std::endl;
1040#endif
1041}
1042
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001043TEST_F(StubTest, AllocObject) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001044#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1045 (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe369810a2015-01-14 19:53:31 -08001046 // This will lead to OOM error messages in the log.
1047 ScopedLogSeverity sls(LogSeverity::FATAL);
1048
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001049 // TODO: Check the "Unresolved" allocation stubs
1050
1051 Thread* self = Thread::Current();
1052 // Create an object
1053 ScopedObjectAccess soa(self);
1054 // garbage is created during ClassLinker::Init
1055
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001056 StackHandleScope<2> hs(soa.Self());
1057 Handle<mirror::Class> c(
1058 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "Ljava/lang/Object;")));
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001059
1060 // Play with it...
1061
1062 EXPECT_FALSE(self->IsExceptionPending());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001063 {
1064 // Use an arbitrary method from c to use as referrer
1065 size_t result = Invoke3(static_cast<size_t>(c->GetDexTypeIndex()), // type_idx
Mathieu Chartiere401d142015-04-22 13:56:20 -07001066 // arbitrary
Andreas Gampe542451c2016-07-26 09:02:02 -07001067 reinterpret_cast<size_t>(c->GetVirtualMethod(0, kRuntimePointerSize)),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001068 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001069 StubTest::GetEntrypoint(self, kQuickAllocObject),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001070 self);
1071
1072 EXPECT_FALSE(self->IsExceptionPending());
1073 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
1074 mirror::Object* obj = reinterpret_cast<mirror::Object*>(result);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001075 EXPECT_EQ(c.Get(), obj->GetClass());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001076 VerifyObject(obj);
1077 }
1078
1079 {
Mathieu Chartier2cebb242015-04-21 16:50:40 -07001080 // We can use null in the second argument as we do not need a method here (not used in
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001081 // resolved/initialized cases)
Mathieu Chartiere401d142015-04-22 13:56:20 -07001082 size_t result = Invoke3(reinterpret_cast<size_t>(c.Get()), 0u, 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001083 StubTest::GetEntrypoint(self, kQuickAllocObjectResolved),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001084 self);
1085
1086 EXPECT_FALSE(self->IsExceptionPending());
1087 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
1088 mirror::Object* obj = reinterpret_cast<mirror::Object*>(result);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001089 EXPECT_EQ(c.Get(), obj->GetClass());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001090 VerifyObject(obj);
1091 }
1092
1093 {
Mathieu Chartier2cebb242015-04-21 16:50:40 -07001094 // We can use null in the second argument as we do not need a method here (not used in
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001095 // resolved/initialized cases)
Mathieu Chartiere401d142015-04-22 13:56:20 -07001096 size_t result = Invoke3(reinterpret_cast<size_t>(c.Get()), 0u, 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001097 StubTest::GetEntrypoint(self, kQuickAllocObjectInitialized),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001098 self);
1099
1100 EXPECT_FALSE(self->IsExceptionPending());
1101 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
1102 mirror::Object* obj = reinterpret_cast<mirror::Object*>(result);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001103 EXPECT_EQ(c.Get(), obj->GetClass());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001104 VerifyObject(obj);
1105 }
1106
1107 // Failure tests.
1108
1109 // Out-of-memory.
1110 {
1111 Runtime::Current()->GetHeap()->SetIdealFootprint(1 * GB);
1112
1113 // Array helps to fill memory faster.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001114 Handle<mirror::Class> ca(
1115 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "[Ljava/lang/Object;")));
1116
1117 // Use arbitrary large amount for now.
1118 static const size_t kMaxHandles = 1000000;
Ian Rogers700a4022014-05-19 16:49:03 -07001119 std::unique_ptr<StackHandleScope<kMaxHandles>> hsp(new StackHandleScope<kMaxHandles>(self));
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001120
1121 std::vector<Handle<mirror::Object>> handles;
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001122 // Start allocating with 128K
1123 size_t length = 128 * KB / 4;
1124 while (length > 10) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001125 Handle<mirror::Object> h(hsp->NewHandle<mirror::Object>(
1126 mirror::ObjectArray<mirror::Object>::Alloc(soa.Self(), ca.Get(), length / 4)));
1127 if (self->IsExceptionPending() || h.Get() == nullptr) {
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001128 self->ClearException();
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001129
1130 // Try a smaller length
1131 length = length / 8;
1132 // Use at most half the reported free space.
1133 size_t mem = Runtime::Current()->GetHeap()->GetFreeMemory();
1134 if (length * 8 > mem) {
1135 length = mem / 8;
1136 }
1137 } else {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001138 handles.push_back(h);
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001139 }
1140 }
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001141 LOG(INFO) << "Used " << handles.size() << " arrays to fill space.";
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001142
1143 // Allocate simple objects till it fails.
1144 while (!self->IsExceptionPending()) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001145 Handle<mirror::Object> h = hsp->NewHandle(c->AllocObject(soa.Self()));
1146 if (!self->IsExceptionPending() && h.Get() != nullptr) {
1147 handles.push_back(h);
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001148 }
1149 }
1150 self->ClearException();
1151
Mathieu Chartiere401d142015-04-22 13:56:20 -07001152 size_t result = Invoke3(reinterpret_cast<size_t>(c.Get()), 0u, 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001153 StubTest::GetEntrypoint(self, kQuickAllocObjectInitialized),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001154 self);
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001155 EXPECT_TRUE(self->IsExceptionPending());
1156 self->ClearException();
1157 EXPECT_EQ(reinterpret_cast<size_t>(nullptr), result);
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001158 }
1159
1160 // Tests done.
1161#else
1162 LOG(INFO) << "Skipping alloc_object as I don't know how to do that on " << kRuntimeISA;
1163 // Force-print to std::cout so it's also outside the logcat.
1164 std::cout << "Skipping alloc_object as I don't know how to do that on " << kRuntimeISA << std::endl;
1165#endif
1166}
1167
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001168TEST_F(StubTest, AllocObjectArray) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001169#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1170 (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001171 // TODO: Check the "Unresolved" allocation stubs
1172
Andreas Gampe369810a2015-01-14 19:53:31 -08001173 // This will lead to OOM error messages in the log.
1174 ScopedLogSeverity sls(LogSeverity::FATAL);
1175
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001176 Thread* self = Thread::Current();
1177 // Create an object
1178 ScopedObjectAccess soa(self);
1179 // garbage is created during ClassLinker::Init
1180
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001181 StackHandleScope<2> hs(self);
1182 Handle<mirror::Class> c(
1183 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "[Ljava/lang/Object;")));
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001184
1185 // Needed to have a linked method.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001186 Handle<mirror::Class> c_obj(
1187 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "Ljava/lang/Object;")));
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001188
1189 // Play with it...
1190
1191 EXPECT_FALSE(self->IsExceptionPending());
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001192
1193 // For some reason this does not work, as the type_idx is artificial and outside what the
1194 // resolved types of c_obj allow...
1195
Ian Rogerscf7f1912014-10-22 22:06:39 -07001196 if ((false)) {
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001197 // Use an arbitrary method from c to use as referrer
Andreas Gampe542451c2016-07-26 09:02:02 -07001198 size_t result = Invoke3(
1199 static_cast<size_t>(c->GetDexTypeIndex()), // type_idx
1200 10U,
1201 // arbitrary
1202 reinterpret_cast<size_t>(c_obj->GetVirtualMethod(0, kRuntimePointerSize)),
1203 StubTest::GetEntrypoint(self, kQuickAllocArray),
1204 self);
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001205
1206 EXPECT_FALSE(self->IsExceptionPending());
1207 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
1208 mirror::Array* obj = reinterpret_cast<mirror::Array*>(result);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001209 EXPECT_EQ(c.Get(), obj->GetClass());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001210 VerifyObject(obj);
1211 EXPECT_EQ(obj->GetLength(), 10);
1212 }
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001213
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001214 {
Mathieu Chartier2cebb242015-04-21 16:50:40 -07001215 // We can use null in the second argument as we do not need a method here (not used in
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001216 // resolved/initialized cases)
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08001217 size_t result = Invoke3(reinterpret_cast<size_t>(c.Get()), 10U,
1218 reinterpret_cast<size_t>(nullptr),
Andreas Gampe29b38412014-08-13 00:15:43 -07001219 StubTest::GetEntrypoint(self, kQuickAllocArrayResolved),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001220 self);
David Sehr709b0702016-10-13 09:12:37 -07001221 EXPECT_FALSE(self->IsExceptionPending()) << mirror::Object::PrettyTypeOf(self->GetException());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001222 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
1223 mirror::Object* obj = reinterpret_cast<mirror::Object*>(result);
1224 EXPECT_TRUE(obj->IsArrayInstance());
1225 EXPECT_TRUE(obj->IsObjectArray());
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001226 EXPECT_EQ(c.Get(), obj->GetClass());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001227 VerifyObject(obj);
1228 mirror::Array* array = reinterpret_cast<mirror::Array*>(result);
1229 EXPECT_EQ(array->GetLength(), 10);
1230 }
1231
1232 // Failure tests.
1233
1234 // Out-of-memory.
1235 {
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08001236 size_t result = Invoke3(reinterpret_cast<size_t>(c.Get()),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001237 GB, // that should fail...
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08001238 reinterpret_cast<size_t>(nullptr),
Andreas Gampe29b38412014-08-13 00:15:43 -07001239 StubTest::GetEntrypoint(self, kQuickAllocArrayResolved),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001240 self);
1241
1242 EXPECT_TRUE(self->IsExceptionPending());
1243 self->ClearException();
1244 EXPECT_EQ(reinterpret_cast<size_t>(nullptr), result);
1245 }
1246
1247 // Tests done.
1248#else
1249 LOG(INFO) << "Skipping alloc_array as I don't know how to do that on " << kRuntimeISA;
1250 // Force-print to std::cout so it's also outside the logcat.
1251 std::cout << "Skipping alloc_array as I don't know how to do that on " << kRuntimeISA << std::endl;
1252#endif
1253}
1254
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001255
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001256TEST_F(StubTest, StringCompareTo) {
jessicahandojo3aaa37b2016-07-29 14:46:37 -07001257 TEST_DISABLED_FOR_STRING_COMPRESSION();
Scott Wakelingc25cbf12016-04-18 09:00:11 +01001258 // There is no StringCompareTo runtime entrypoint for __arm__ or __aarch64__.
1259#if defined(__i386__) || defined(__mips__) || \
1260 (defined(__x86_64__) && !defined(__APPLE__))
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001261 // TODO: Check the "Unresolved" allocation stubs
1262
1263 Thread* self = Thread::Current();
Andreas Gampe29b38412014-08-13 00:15:43 -07001264
1265 const uintptr_t art_quick_string_compareto = StubTest::GetEntrypoint(self, kQuickStringCompareTo);
1266
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001267 ScopedObjectAccess soa(self);
1268 // garbage is created during ClassLinker::Init
1269
1270 // Create some strings
1271 // Use array so we can index into it and use a matrix for expected results
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -07001272 // Setup: The first half is standard. The second half uses a non-zero offset.
1273 // TODO: Shared backing arrays.
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001274 const char* c[] = { "", "", "a", "aa", "ab",
Serban Constantinescu86797a72014-06-19 16:17:56 +01001275 "aacaacaacaacaacaac", // This one's under the default limit to go to __memcmp16.
1276 "aacaacaacaacaacaacaacaacaacaacaacaac", // This one's over.
1277 "aacaacaacaacaacaacaacaacaacaacaacaaca" }; // As is this one. We need a separate one to
1278 // defeat object-equal optimizations.
Jeff Hao848f70a2014-01-15 13:49:50 -08001279 static constexpr size_t kStringCount = arraysize(c);
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001280
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001281 StackHandleScope<kStringCount> hs(self);
1282 Handle<mirror::String> s[kStringCount];
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001283
Jeff Hao848f70a2014-01-15 13:49:50 -08001284 for (size_t i = 0; i < kStringCount; ++i) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001285 s[i] = hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), c[i]));
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001286 }
1287
1288 // TODO: wide characters
1289
1290 // Matrix of expectations. First component is first parameter. Note we only check against the
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -07001291 // sign, not the value. As we are testing random offsets, we need to compute this and need to
1292 // rely on String::CompareTo being correct.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001293 int32_t expected[kStringCount][kStringCount];
1294 for (size_t x = 0; x < kStringCount; ++x) {
1295 for (size_t y = 0; y < kStringCount; ++y) {
1296 expected[x][y] = s[x]->CompareTo(s[y].Get());
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -07001297 }
1298 }
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001299
1300 // Play with it...
1301
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001302 for (size_t x = 0; x < kStringCount; ++x) {
1303 for (size_t y = 0; y < kStringCount; ++y) {
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001304 // Test string_compareto x y
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001305 size_t result = Invoke3(reinterpret_cast<size_t>(s[x].Get()),
1306 reinterpret_cast<size_t>(s[y].Get()), 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001307 art_quick_string_compareto, self);
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001308
1309 EXPECT_FALSE(self->IsExceptionPending());
1310
1311 // The result is a 32b signed integer
1312 union {
1313 size_t r;
1314 int32_t i;
1315 } conv;
1316 conv.r = result;
1317 int32_t e = expected[x][y];
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -07001318 EXPECT_TRUE(e == 0 ? conv.i == 0 : true) << "x=" << c[x] << " y=" << c[y] << " res=" <<
1319 conv.r;
1320 EXPECT_TRUE(e < 0 ? conv.i < 0 : true) << "x=" << c[x] << " y=" << c[y] << " res=" <<
1321 conv.r;
1322 EXPECT_TRUE(e > 0 ? conv.i > 0 : true) << "x=" << c[x] << " y=" << c[y] << " res=" <<
1323 conv.r;
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001324 }
1325 }
1326
Andreas Gampe7177d7c2014-05-02 12:10:02 -07001327 // TODO: Deallocate things.
1328
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001329 // Tests done.
1330#else
1331 LOG(INFO) << "Skipping string_compareto as I don't know how to do that on " << kRuntimeISA;
1332 // Force-print to std::cout so it's also outside the logcat.
1333 std::cout << "Skipping string_compareto as I don't know how to do that on " << kRuntimeISA <<
1334 std::endl;
1335#endif
1336}
1337
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001338
Mathieu Chartierc7853442015-03-27 14:35:38 -07001339static void GetSetBooleanStatic(ArtField* f, Thread* self,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001340 ArtMethod* referrer, StubTest* test)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07001341 REQUIRES_SHARED(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001342#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1343 (defined(__x86_64__) && !defined(__APPLE__))
Fred Shih37f05ef2014-07-16 18:38:08 -07001344 constexpr size_t num_values = 5;
1345 uint8_t values[num_values] = { 0, 1, 2, 128, 0xFF };
1346
1347 for (size_t i = 0; i < num_values; ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001348 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001349 static_cast<size_t>(values[i]),
1350 0U,
1351 StubTest::GetEntrypoint(self, kQuickSet8Static),
1352 self,
1353 referrer);
1354
Mathieu Chartierc7853442015-03-27 14:35:38 -07001355 size_t res = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001356 0U, 0U,
1357 StubTest::GetEntrypoint(self, kQuickGetBooleanStatic),
1358 self,
1359 referrer);
1360 // Boolean currently stores bools as uint8_t, be more zealous about asserting correct writes/gets.
1361 EXPECT_EQ(values[i], static_cast<uint8_t>(res)) << "Iteration " << i;
1362 }
1363#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001364 UNUSED(f, self, referrer, test);
Fred Shih37f05ef2014-07-16 18:38:08 -07001365 LOG(INFO) << "Skipping set_boolean_static as I don't know how to do that on " << kRuntimeISA;
1366 // Force-print to std::cout so it's also outside the logcat.
1367 std::cout << "Skipping set_boolean_static as I don't know how to do that on " << kRuntimeISA << std::endl;
1368#endif
1369}
Mathieu Chartiere401d142015-04-22 13:56:20 -07001370static void GetSetByteStatic(ArtField* f, Thread* self, ArtMethod* referrer,
Mathieu Chartierc7853442015-03-27 14:35:38 -07001371 StubTest* test)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07001372 REQUIRES_SHARED(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001373#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1374 (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001375 int8_t values[] = { -128, -64, 0, 64, 127 };
Fred Shih37f05ef2014-07-16 18:38:08 -07001376
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001377 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001378 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001379 static_cast<size_t>(values[i]),
1380 0U,
1381 StubTest::GetEntrypoint(self, kQuickSet8Static),
1382 self,
1383 referrer);
1384
Mathieu Chartierc7853442015-03-27 14:35:38 -07001385 size_t res = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001386 0U, 0U,
1387 StubTest::GetEntrypoint(self, kQuickGetByteStatic),
1388 self,
1389 referrer);
1390 EXPECT_EQ(values[i], static_cast<int8_t>(res)) << "Iteration " << i;
1391 }
1392#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001393 UNUSED(f, self, referrer, test);
Fred Shih37f05ef2014-07-16 18:38:08 -07001394 LOG(INFO) << "Skipping set_byte_static as I don't know how to do that on " << kRuntimeISA;
1395 // Force-print to std::cout so it's also outside the logcat.
1396 std::cout << "Skipping set_byte_static as I don't know how to do that on " << kRuntimeISA << std::endl;
1397#endif
1398}
1399
1400
Mathieu Chartierc7853442015-03-27 14:35:38 -07001401static void GetSetBooleanInstance(Handle<mirror::Object>* obj, ArtField* f, Thread* self,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001402 ArtMethod* referrer, StubTest* test)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07001403 REQUIRES_SHARED(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001404#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1405 (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001406 uint8_t values[] = { 0, true, 2, 128, 0xFF };
Fred Shih37f05ef2014-07-16 18:38:08 -07001407
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001408 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001409 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001410 reinterpret_cast<size_t>(obj->Get()),
1411 static_cast<size_t>(values[i]),
1412 StubTest::GetEntrypoint(self, kQuickSet8Instance),
1413 self,
1414 referrer);
1415
Mathieu Chartierc7853442015-03-27 14:35:38 -07001416 uint8_t res = f->GetBoolean(obj->Get());
Fred Shih37f05ef2014-07-16 18:38:08 -07001417 EXPECT_EQ(values[i], res) << "Iteration " << i;
1418
Mathieu Chartierc7853442015-03-27 14:35:38 -07001419 f->SetBoolean<false>(obj->Get(), res);
Fred Shih37f05ef2014-07-16 18:38:08 -07001420
Mathieu Chartierc7853442015-03-27 14:35:38 -07001421 size_t res2 = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001422 reinterpret_cast<size_t>(obj->Get()),
1423 0U,
1424 StubTest::GetEntrypoint(self, kQuickGetBooleanInstance),
1425 self,
1426 referrer);
1427 EXPECT_EQ(res, static_cast<uint8_t>(res2));
1428 }
1429#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001430 UNUSED(obj, f, self, referrer, test);
Fred Shih37f05ef2014-07-16 18:38:08 -07001431 LOG(INFO) << "Skipping set_boolean_instance as I don't know how to do that on " << kRuntimeISA;
1432 // Force-print to std::cout so it's also outside the logcat.
1433 std::cout << "Skipping set_boolean_instance as I don't know how to do that on " << kRuntimeISA << std::endl;
1434#endif
1435}
Mathieu Chartierc7853442015-03-27 14:35:38 -07001436static void GetSetByteInstance(Handle<mirror::Object>* obj, ArtField* f,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001437 Thread* self, ArtMethod* referrer, StubTest* test)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07001438 REQUIRES_SHARED(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001439#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1440 (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001441 int8_t values[] = { -128, -64, 0, 64, 127 };
Fred Shih37f05ef2014-07-16 18:38:08 -07001442
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001443 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001444 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001445 reinterpret_cast<size_t>(obj->Get()),
1446 static_cast<size_t>(values[i]),
1447 StubTest::GetEntrypoint(self, kQuickSet8Instance),
1448 self,
1449 referrer);
1450
Mathieu Chartierc7853442015-03-27 14:35:38 -07001451 int8_t res = f->GetByte(obj->Get());
Fred Shih37f05ef2014-07-16 18:38:08 -07001452 EXPECT_EQ(res, values[i]) << "Iteration " << i;
Mathieu Chartierc7853442015-03-27 14:35:38 -07001453 f->SetByte<false>(obj->Get(), ++res);
Fred Shih37f05ef2014-07-16 18:38:08 -07001454
Mathieu Chartierc7853442015-03-27 14:35:38 -07001455 size_t res2 = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001456 reinterpret_cast<size_t>(obj->Get()),
1457 0U,
1458 StubTest::GetEntrypoint(self, kQuickGetByteInstance),
1459 self,
1460 referrer);
1461 EXPECT_EQ(res, static_cast<int8_t>(res2));
1462 }
1463#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001464 UNUSED(obj, f, self, referrer, test);
Fred Shih37f05ef2014-07-16 18:38:08 -07001465 LOG(INFO) << "Skipping set_byte_instance as I don't know how to do that on " << kRuntimeISA;
1466 // Force-print to std::cout so it's also outside the logcat.
1467 std::cout << "Skipping set_byte_instance as I don't know how to do that on " << kRuntimeISA << std::endl;
1468#endif
1469}
1470
Mathieu Chartiere401d142015-04-22 13:56:20 -07001471static void GetSetCharStatic(ArtField* f, Thread* self, ArtMethod* referrer,
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001472 StubTest* test)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07001473 REQUIRES_SHARED(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001474#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1475 (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001476 uint16_t values[] = { 0, 1, 2, 255, 32768, 0xFFFF };
Fred Shih37f05ef2014-07-16 18:38:08 -07001477
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001478 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001479 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001480 static_cast<size_t>(values[i]),
1481 0U,
1482 StubTest::GetEntrypoint(self, kQuickSet16Static),
1483 self,
1484 referrer);
1485
Mathieu Chartierc7853442015-03-27 14:35:38 -07001486 size_t res = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001487 0U, 0U,
1488 StubTest::GetEntrypoint(self, kQuickGetCharStatic),
1489 self,
1490 referrer);
1491
1492 EXPECT_EQ(values[i], static_cast<uint16_t>(res)) << "Iteration " << i;
1493 }
1494#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001495 UNUSED(f, self, referrer, test);
Fred Shih37f05ef2014-07-16 18:38:08 -07001496 LOG(INFO) << "Skipping set_char_static as I don't know how to do that on " << kRuntimeISA;
1497 // Force-print to std::cout so it's also outside the logcat.
1498 std::cout << "Skipping set_char_static as I don't know how to do that on " << kRuntimeISA << std::endl;
1499#endif
1500}
Mathieu Chartierc7853442015-03-27 14:35:38 -07001501static void GetSetShortStatic(ArtField* f, Thread* self,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001502 ArtMethod* referrer, StubTest* test)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07001503 REQUIRES_SHARED(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001504#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1505 (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001506 int16_t values[] = { -0x7FFF, -32768, 0, 255, 32767, 0x7FFE };
Fred Shih37f05ef2014-07-16 18:38:08 -07001507
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001508 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001509 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001510 static_cast<size_t>(values[i]),
1511 0U,
1512 StubTest::GetEntrypoint(self, kQuickSet16Static),
1513 self,
1514 referrer);
1515
Mathieu Chartierc7853442015-03-27 14:35:38 -07001516 size_t res = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001517 0U, 0U,
1518 StubTest::GetEntrypoint(self, kQuickGetShortStatic),
1519 self,
1520 referrer);
1521
1522 EXPECT_EQ(static_cast<int16_t>(res), values[i]) << "Iteration " << i;
1523 }
1524#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001525 UNUSED(f, self, referrer, test);
Fred Shih37f05ef2014-07-16 18:38:08 -07001526 LOG(INFO) << "Skipping set_short_static as I don't know how to do that on " << kRuntimeISA;
1527 // Force-print to std::cout so it's also outside the logcat.
1528 std::cout << "Skipping set_short_static as I don't know how to do that on " << kRuntimeISA << std::endl;
1529#endif
1530}
1531
Mathieu Chartierc7853442015-03-27 14:35:38 -07001532static void GetSetCharInstance(Handle<mirror::Object>* obj, ArtField* f,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001533 Thread* self, ArtMethod* referrer, StubTest* test)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07001534 REQUIRES_SHARED(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001535#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1536 (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001537 uint16_t values[] = { 0, 1, 2, 255, 32768, 0xFFFF };
Fred Shih37f05ef2014-07-16 18:38:08 -07001538
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001539 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001540 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001541 reinterpret_cast<size_t>(obj->Get()),
1542 static_cast<size_t>(values[i]),
1543 StubTest::GetEntrypoint(self, kQuickSet16Instance),
1544 self,
1545 referrer);
1546
Mathieu Chartierc7853442015-03-27 14:35:38 -07001547 uint16_t res = f->GetChar(obj->Get());
Fred Shih37f05ef2014-07-16 18:38:08 -07001548 EXPECT_EQ(res, values[i]) << "Iteration " << i;
Mathieu Chartierc7853442015-03-27 14:35:38 -07001549 f->SetChar<false>(obj->Get(), ++res);
Fred Shih37f05ef2014-07-16 18:38:08 -07001550
Mathieu Chartierc7853442015-03-27 14:35:38 -07001551 size_t res2 = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001552 reinterpret_cast<size_t>(obj->Get()),
1553 0U,
1554 StubTest::GetEntrypoint(self, kQuickGetCharInstance),
1555 self,
1556 referrer);
1557 EXPECT_EQ(res, static_cast<uint16_t>(res2));
1558 }
1559#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001560 UNUSED(obj, f, self, referrer, test);
Fred Shih37f05ef2014-07-16 18:38:08 -07001561 LOG(INFO) << "Skipping set_char_instance as I don't know how to do that on " << kRuntimeISA;
1562 // Force-print to std::cout so it's also outside the logcat.
1563 std::cout << "Skipping set_char_instance as I don't know how to do that on " << kRuntimeISA << std::endl;
1564#endif
1565}
Mathieu Chartierc7853442015-03-27 14:35:38 -07001566static void GetSetShortInstance(Handle<mirror::Object>* obj, ArtField* f,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001567 Thread* self, ArtMethod* referrer, StubTest* test)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07001568 REQUIRES_SHARED(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001569#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1570 (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001571 int16_t values[] = { -0x7FFF, -32768, 0, 255, 32767, 0x7FFE };
Fred Shih37f05ef2014-07-16 18:38:08 -07001572
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001573 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001574 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001575 reinterpret_cast<size_t>(obj->Get()),
1576 static_cast<size_t>(values[i]),
1577 StubTest::GetEntrypoint(self, kQuickSet16Instance),
1578 self,
1579 referrer);
1580
Mathieu Chartierc7853442015-03-27 14:35:38 -07001581 int16_t res = f->GetShort(obj->Get());
Fred Shih37f05ef2014-07-16 18:38:08 -07001582 EXPECT_EQ(res, values[i]) << "Iteration " << i;
Mathieu Chartierc7853442015-03-27 14:35:38 -07001583 f->SetShort<false>(obj->Get(), ++res);
Fred Shih37f05ef2014-07-16 18:38:08 -07001584
Mathieu Chartierc7853442015-03-27 14:35:38 -07001585 size_t res2 = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001586 reinterpret_cast<size_t>(obj->Get()),
1587 0U,
1588 StubTest::GetEntrypoint(self, kQuickGetShortInstance),
1589 self,
1590 referrer);
1591 EXPECT_EQ(res, static_cast<int16_t>(res2));
1592 }
1593#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001594 UNUSED(obj, f, self, referrer, test);
Fred Shih37f05ef2014-07-16 18:38:08 -07001595 LOG(INFO) << "Skipping set_short_instance as I don't know how to do that on " << kRuntimeISA;
1596 // Force-print to std::cout so it's also outside the logcat.
1597 std::cout << "Skipping set_short_instance as I don't know how to do that on " << kRuntimeISA << std::endl;
1598#endif
1599}
1600
Mathieu Chartiere401d142015-04-22 13:56:20 -07001601static void GetSet32Static(ArtField* f, Thread* self, ArtMethod* referrer,
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001602 StubTest* test)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07001603 REQUIRES_SHARED(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001604#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1605 (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001606 uint32_t values[] = { 0, 1, 2, 255, 32768, 1000000, 0xFFFFFFFF };
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001607
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001608 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001609 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001610 static_cast<size_t>(values[i]),
1611 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001612 StubTest::GetEntrypoint(self, kQuickSet32Static),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001613 self,
1614 referrer);
1615
Mathieu Chartierc7853442015-03-27 14:35:38 -07001616 size_t res = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001617 0U, 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001618 StubTest::GetEntrypoint(self, kQuickGet32Static),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001619 self,
1620 referrer);
1621
Goran Jakovljevic04568812015-04-23 15:27:23 +02001622#if defined(__mips__) && defined(__LP64__)
1623 EXPECT_EQ(static_cast<uint32_t>(res), values[i]) << "Iteration " << i;
1624#else
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001625 EXPECT_EQ(res, values[i]) << "Iteration " << i;
Goran Jakovljevic04568812015-04-23 15:27:23 +02001626#endif
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001627 }
1628#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001629 UNUSED(f, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001630 LOG(INFO) << "Skipping set32static as I don't know how to do that on " << kRuntimeISA;
1631 // Force-print to std::cout so it's also outside the logcat.
1632 std::cout << "Skipping set32static as I don't know how to do that on " << kRuntimeISA << std::endl;
1633#endif
1634}
1635
1636
Mathieu Chartierc7853442015-03-27 14:35:38 -07001637static void GetSet32Instance(Handle<mirror::Object>* obj, ArtField* f,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001638 Thread* self, ArtMethod* referrer, StubTest* test)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07001639 REQUIRES_SHARED(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001640#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1641 (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001642 uint32_t values[] = { 0, 1, 2, 255, 32768, 1000000, 0xFFFFFFFF };
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001643
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001644 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001645 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001646 reinterpret_cast<size_t>(obj->Get()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001647 static_cast<size_t>(values[i]),
Andreas Gampe29b38412014-08-13 00:15:43 -07001648 StubTest::GetEntrypoint(self, kQuickSet32Instance),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001649 self,
1650 referrer);
1651
Mathieu Chartierc7853442015-03-27 14:35:38 -07001652 int32_t res = f->GetInt(obj->Get());
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001653 EXPECT_EQ(res, static_cast<int32_t>(values[i])) << "Iteration " << i;
1654
1655 res++;
Mathieu Chartierc7853442015-03-27 14:35:38 -07001656 f->SetInt<false>(obj->Get(), res);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001657
Mathieu Chartierc7853442015-03-27 14:35:38 -07001658 size_t res2 = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001659 reinterpret_cast<size_t>(obj->Get()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001660 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001661 StubTest::GetEntrypoint(self, kQuickGet32Instance),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001662 self,
1663 referrer);
1664 EXPECT_EQ(res, static_cast<int32_t>(res2));
1665 }
1666#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001667 UNUSED(obj, f, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001668 LOG(INFO) << "Skipping set32instance as I don't know how to do that on " << kRuntimeISA;
1669 // Force-print to std::cout so it's also outside the logcat.
1670 std::cout << "Skipping set32instance as I don't know how to do that on " << kRuntimeISA << std::endl;
1671#endif
1672}
1673
1674
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001675#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1676 (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001677
1678static void set_and_check_static(uint32_t f_idx, mirror::Object* val, Thread* self,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001679 ArtMethod* referrer, StubTest* test)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07001680 REQUIRES_SHARED(Locks::mutator_lock_) {
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001681 test->Invoke3WithReferrer(static_cast<size_t>(f_idx),
1682 reinterpret_cast<size_t>(val),
1683 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001684 StubTest::GetEntrypoint(self, kQuickSetObjStatic),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001685 self,
1686 referrer);
1687
1688 size_t res = test->Invoke3WithReferrer(static_cast<size_t>(f_idx),
1689 0U, 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001690 StubTest::GetEntrypoint(self, kQuickGetObjStatic),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001691 self,
1692 referrer);
1693
1694 EXPECT_EQ(res, reinterpret_cast<size_t>(val)) << "Value " << val;
1695}
1696#endif
1697
Mathieu Chartiere401d142015-04-22 13:56:20 -07001698static void GetSetObjStatic(ArtField* f, Thread* self, ArtMethod* referrer,
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001699 StubTest* test)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07001700 REQUIRES_SHARED(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001701#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1702 (defined(__x86_64__) && !defined(__APPLE__))
Mathieu Chartierc7853442015-03-27 14:35:38 -07001703 set_and_check_static(f->GetDexFieldIndex(), nullptr, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001704
1705 // Allocate a string object for simplicity.
1706 mirror::String* str = mirror::String::AllocFromModifiedUtf8(self, "Test");
Mathieu Chartierc7853442015-03-27 14:35:38 -07001707 set_and_check_static(f->GetDexFieldIndex(), str, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001708
Mathieu Chartierc7853442015-03-27 14:35:38 -07001709 set_and_check_static(f->GetDexFieldIndex(), nullptr, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001710#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001711 UNUSED(f, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001712 LOG(INFO) << "Skipping setObjstatic as I don't know how to do that on " << kRuntimeISA;
1713 // Force-print to std::cout so it's also outside the logcat.
1714 std::cout << "Skipping setObjstatic as I don't know how to do that on " << kRuntimeISA << std::endl;
1715#endif
1716}
1717
1718
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001719#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1720 (defined(__x86_64__) && !defined(__APPLE__))
Mathieu Chartierc7853442015-03-27 14:35:38 -07001721static void set_and_check_instance(ArtField* f, mirror::Object* trg,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001722 mirror::Object* val, Thread* self, ArtMethod* referrer,
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001723 StubTest* test)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07001724 REQUIRES_SHARED(Locks::mutator_lock_) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001725 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001726 reinterpret_cast<size_t>(trg),
1727 reinterpret_cast<size_t>(val),
Andreas Gampe29b38412014-08-13 00:15:43 -07001728 StubTest::GetEntrypoint(self, kQuickSetObjInstance),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001729 self,
1730 referrer);
1731
Mathieu Chartierc7853442015-03-27 14:35:38 -07001732 size_t res = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001733 reinterpret_cast<size_t>(trg),
1734 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001735 StubTest::GetEntrypoint(self, kQuickGetObjInstance),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001736 self,
1737 referrer);
1738
1739 EXPECT_EQ(res, reinterpret_cast<size_t>(val)) << "Value " << val;
1740
Mathieu Chartier3398c782016-09-30 10:27:43 -07001741 EXPECT_OBJ_PTR_EQ(val, f->GetObj(trg));
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001742}
1743#endif
1744
Mathieu Chartierc7853442015-03-27 14:35:38 -07001745static void GetSetObjInstance(Handle<mirror::Object>* obj, ArtField* f,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001746 Thread* self, ArtMethod* referrer, StubTest* test)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07001747 REQUIRES_SHARED(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001748#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1749 (defined(__x86_64__) && !defined(__APPLE__))
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001750 set_and_check_instance(f, obj->Get(), nullptr, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001751
1752 // Allocate a string object for simplicity.
1753 mirror::String* str = mirror::String::AllocFromModifiedUtf8(self, "Test");
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001754 set_and_check_instance(f, obj->Get(), str, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001755
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001756 set_and_check_instance(f, obj->Get(), nullptr, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001757#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001758 UNUSED(obj, f, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001759 LOG(INFO) << "Skipping setObjinstance as I don't know how to do that on " << kRuntimeISA;
1760 // Force-print to std::cout so it's also outside the logcat.
1761 std::cout << "Skipping setObjinstance as I don't know how to do that on " << kRuntimeISA << std::endl;
1762#endif
1763}
1764
1765
Calin Juravle872ab3f2015-10-02 07:27:51 +01001766// TODO: Complete these tests for 32b architectures
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001767
Mathieu Chartiere401d142015-04-22 13:56:20 -07001768static void GetSet64Static(ArtField* f, Thread* self, ArtMethod* referrer,
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001769 StubTest* test)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07001770 REQUIRES_SHARED(Locks::mutator_lock_) {
Calin Juravle6e399ac2015-10-02 23:56:06 +01001771#if (defined(__x86_64__) && !defined(__APPLE__)) || (defined(__mips__) && defined(__LP64__)) \
1772 || defined(__aarch64__)
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001773 uint64_t values[] = { 0, 1, 2, 255, 32768, 1000000, 0xFFFFFFFF, 0xFFFFFFFFFFFF };
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001774
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001775 for (size_t i = 0; i < arraysize(values); ++i) {
Calin Juravle6e399ac2015-10-02 23:56:06 +01001776 // 64 bit FieldSet stores the set value in the second register.
1777 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Calin Juravle24cc1b32015-10-06 11:46:58 +01001778 0U,
1779 values[i],
1780 StubTest::GetEntrypoint(self, kQuickSet64Static),
1781 self,
1782 referrer);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001783
Mathieu Chartierc7853442015-03-27 14:35:38 -07001784 size_t res = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001785 0U, 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001786 StubTest::GetEntrypoint(self, kQuickGet64Static),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001787 self,
1788 referrer);
1789
1790 EXPECT_EQ(res, values[i]) << "Iteration " << i;
1791 }
1792#else
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001793 UNUSED(f, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001794 LOG(INFO) << "Skipping set64static as I don't know how to do that on " << kRuntimeISA;
1795 // Force-print to std::cout so it's also outside the logcat.
1796 std::cout << "Skipping set64static as I don't know how to do that on " << kRuntimeISA << std::endl;
1797#endif
1798}
1799
1800
Mathieu Chartierc7853442015-03-27 14:35:38 -07001801static void GetSet64Instance(Handle<mirror::Object>* obj, ArtField* f,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001802 Thread* self, ArtMethod* referrer, StubTest* test)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07001803 REQUIRES_SHARED(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001804#if (defined(__x86_64__) && !defined(__APPLE__)) || (defined(__mips__) && defined(__LP64__)) || \
1805 defined(__aarch64__)
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001806 uint64_t values[] = { 0, 1, 2, 255, 32768, 1000000, 0xFFFFFFFF, 0xFFFFFFFFFFFF };
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001807
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001808 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001809 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001810 reinterpret_cast<size_t>(obj->Get()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001811 static_cast<size_t>(values[i]),
Andreas Gampe29b38412014-08-13 00:15:43 -07001812 StubTest::GetEntrypoint(self, kQuickSet64Instance),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001813 self,
1814 referrer);
1815
Mathieu Chartierc7853442015-03-27 14:35:38 -07001816 int64_t res = f->GetLong(obj->Get());
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001817 EXPECT_EQ(res, static_cast<int64_t>(values[i])) << "Iteration " << i;
1818
1819 res++;
Mathieu Chartierc7853442015-03-27 14:35:38 -07001820 f->SetLong<false>(obj->Get(), res);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001821
Mathieu Chartierc7853442015-03-27 14:35:38 -07001822 size_t res2 = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001823 reinterpret_cast<size_t>(obj->Get()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001824 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001825 StubTest::GetEntrypoint(self, kQuickGet64Instance),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001826 self,
1827 referrer);
1828 EXPECT_EQ(res, static_cast<int64_t>(res2));
1829 }
1830#else
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001831 UNUSED(obj, f, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001832 LOG(INFO) << "Skipping set64instance as I don't know how to do that on " << kRuntimeISA;
1833 // Force-print to std::cout so it's also outside the logcat.
1834 std::cout << "Skipping set64instance as I don't know how to do that on " << kRuntimeISA << std::endl;
1835#endif
1836}
1837
1838static void TestFields(Thread* self, StubTest* test, Primitive::Type test_type) {
1839 // garbage is created during ClassLinker::Init
1840
1841 JNIEnv* env = Thread::Current()->GetJniEnv();
1842 jclass jc = env->FindClass("AllFields");
Mathieu Chartier2cebb242015-04-21 16:50:40 -07001843 CHECK(jc != nullptr);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001844 jobject o = env->AllocObject(jc);
Mathieu Chartier2cebb242015-04-21 16:50:40 -07001845 CHECK(o != nullptr);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001846
1847 ScopedObjectAccess soa(self);
Mathieu Chartiere401d142015-04-22 13:56:20 -07001848 StackHandleScope<3> hs(self);
Mathieu Chartier0795f232016-09-27 18:43:30 -07001849 Handle<mirror::Object> obj(hs.NewHandle(soa.Decode<mirror::Object>(o)));
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001850 Handle<mirror::Class> c(hs.NewHandle(obj->GetClass()));
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001851 // Need a method as a referrer
Andreas Gampe542451c2016-07-26 09:02:02 -07001852 ArtMethod* m = c->GetDirectMethod(0, kRuntimePointerSize);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001853
1854 // Play with it...
1855
1856 // Static fields.
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001857 for (ArtField& f : c->GetSFields()) {
1858 Primitive::Type type = f.GetTypeAsPrimitiveType();
Mathieu Chartierc7853442015-03-27 14:35:38 -07001859 if (test_type != type) {
1860 continue;
1861 }
1862 switch (type) {
1863 case Primitive::Type::kPrimBoolean:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001864 GetSetBooleanStatic(&f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001865 break;
1866 case Primitive::Type::kPrimByte:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001867 GetSetByteStatic(&f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001868 break;
1869 case Primitive::Type::kPrimChar:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001870 GetSetCharStatic(&f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001871 break;
1872 case Primitive::Type::kPrimShort:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001873 GetSetShortStatic(&f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001874 break;
1875 case Primitive::Type::kPrimInt:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001876 GetSet32Static(&f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001877 break;
1878 case Primitive::Type::kPrimLong:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001879 GetSet64Static(&f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001880 break;
1881 case Primitive::Type::kPrimNot:
1882 // Don't try array.
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001883 if (f.GetTypeDescriptor()[0] != '[') {
1884 GetSetObjStatic(&f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001885 }
1886 break;
1887 default:
1888 break; // Skip.
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001889 }
1890 }
1891
1892 // Instance fields.
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001893 for (ArtField& f : c->GetIFields()) {
1894 Primitive::Type type = f.GetTypeAsPrimitiveType();
Mathieu Chartierc7853442015-03-27 14:35:38 -07001895 if (test_type != type) {
1896 continue;
1897 }
1898 switch (type) {
1899 case Primitive::Type::kPrimBoolean:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001900 GetSetBooleanInstance(&obj, &f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001901 break;
1902 case Primitive::Type::kPrimByte:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001903 GetSetByteInstance(&obj, &f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001904 break;
1905 case Primitive::Type::kPrimChar:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001906 GetSetCharInstance(&obj, &f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001907 break;
1908 case Primitive::Type::kPrimShort:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001909 GetSetShortInstance(&obj, &f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001910 break;
1911 case Primitive::Type::kPrimInt:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001912 GetSet32Instance(&obj, &f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001913 break;
1914 case Primitive::Type::kPrimLong:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001915 GetSet64Instance(&obj, &f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001916 break;
1917 case Primitive::Type::kPrimNot:
1918 // Don't try array.
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001919 if (f.GetTypeDescriptor()[0] != '[') {
1920 GetSetObjInstance(&obj, &f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001921 }
1922 break;
1923 default:
1924 break; // Skip.
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001925 }
1926 }
1927
1928 // TODO: Deallocate things.
1929}
1930
Fred Shih37f05ef2014-07-16 18:38:08 -07001931TEST_F(StubTest, Fields8) {
Fred Shih37f05ef2014-07-16 18:38:08 -07001932 Thread* self = Thread::Current();
1933
1934 self->TransitionFromSuspendedToRunnable();
1935 LoadDex("AllFields");
1936 bool started = runtime_->Start();
1937 CHECK(started);
1938
1939 TestFields(self, this, Primitive::Type::kPrimBoolean);
1940 TestFields(self, this, Primitive::Type::kPrimByte);
1941}
1942
1943TEST_F(StubTest, Fields16) {
Fred Shih37f05ef2014-07-16 18:38:08 -07001944 Thread* self = Thread::Current();
1945
1946 self->TransitionFromSuspendedToRunnable();
1947 LoadDex("AllFields");
1948 bool started = runtime_->Start();
1949 CHECK(started);
1950
1951 TestFields(self, this, Primitive::Type::kPrimChar);
1952 TestFields(self, this, Primitive::Type::kPrimShort);
1953}
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001954
1955TEST_F(StubTest, Fields32) {
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001956 Thread* self = Thread::Current();
1957
1958 self->TransitionFromSuspendedToRunnable();
1959 LoadDex("AllFields");
1960 bool started = runtime_->Start();
1961 CHECK(started);
1962
1963 TestFields(self, this, Primitive::Type::kPrimInt);
1964}
1965
1966TEST_F(StubTest, FieldsObj) {
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001967 Thread* self = Thread::Current();
1968
1969 self->TransitionFromSuspendedToRunnable();
1970 LoadDex("AllFields");
1971 bool started = runtime_->Start();
1972 CHECK(started);
1973
1974 TestFields(self, this, Primitive::Type::kPrimNot);
1975}
1976
1977TEST_F(StubTest, Fields64) {
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001978 Thread* self = Thread::Current();
1979
1980 self->TransitionFromSuspendedToRunnable();
1981 LoadDex("AllFields");
1982 bool started = runtime_->Start();
1983 CHECK(started);
1984
1985 TestFields(self, this, Primitive::Type::kPrimLong);
1986}
1987
Vladimir Marko9d07e3d2016-03-31 12:02:28 +01001988// Disabled, b/27991555 .
1989// FIXME: Hacking the entry point to point to art_quick_to_interpreter_bridge is broken.
1990// The bridge calls through to GetCalleeSaveMethodCaller() which looks up the pre-header
1991// and gets a bogus OatQuickMethodHeader* pointing into our assembly code just before
1992// the bridge and uses that to check for inlined frames, crashing in the process.
1993TEST_F(StubTest, DISABLED_IMT) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001994#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1995 (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe51f76352014-05-21 08:28:48 -07001996 Thread* self = Thread::Current();
1997
1998 ScopedObjectAccess soa(self);
1999 StackHandleScope<7> hs(self);
2000
2001 JNIEnv* env = Thread::Current()->GetJniEnv();
2002
2003 // ArrayList
2004
2005 // Load ArrayList and used methods (JNI).
2006 jclass arraylist_jclass = env->FindClass("java/util/ArrayList");
2007 ASSERT_NE(nullptr, arraylist_jclass);
2008 jmethodID arraylist_constructor = env->GetMethodID(arraylist_jclass, "<init>", "()V");
2009 ASSERT_NE(nullptr, arraylist_constructor);
Mathieu Chartiere401d142015-04-22 13:56:20 -07002010 jmethodID contains_jmethod = env->GetMethodID(
2011 arraylist_jclass, "contains", "(Ljava/lang/Object;)Z");
Andreas Gampe51f76352014-05-21 08:28:48 -07002012 ASSERT_NE(nullptr, contains_jmethod);
2013 jmethodID add_jmethod = env->GetMethodID(arraylist_jclass, "add", "(Ljava/lang/Object;)Z");
2014 ASSERT_NE(nullptr, add_jmethod);
2015
Mathieu Chartiere401d142015-04-22 13:56:20 -07002016 // Get representation.
2017 ArtMethod* contains_amethod = soa.DecodeMethod(contains_jmethod);
Andreas Gampe51f76352014-05-21 08:28:48 -07002018
2019 // Patch up ArrayList.contains.
Mathieu Chartiere401d142015-04-22 13:56:20 -07002020 if (contains_amethod->GetEntryPointFromQuickCompiledCode() == nullptr) {
2021 contains_amethod->SetEntryPointFromQuickCompiledCode(reinterpret_cast<void*>(
Andreas Gampe29b38412014-08-13 00:15:43 -07002022 StubTest::GetEntrypoint(self, kQuickQuickToInterpreterBridge)));
Andreas Gampe51f76352014-05-21 08:28:48 -07002023 }
2024
2025 // List
2026
2027 // Load List and used methods (JNI).
2028 jclass list_jclass = env->FindClass("java/util/List");
2029 ASSERT_NE(nullptr, list_jclass);
Mathieu Chartiere401d142015-04-22 13:56:20 -07002030 jmethodID inf_contains_jmethod = env->GetMethodID(
2031 list_jclass, "contains", "(Ljava/lang/Object;)Z");
Andreas Gampe51f76352014-05-21 08:28:48 -07002032 ASSERT_NE(nullptr, inf_contains_jmethod);
2033
2034 // Get mirror representation.
Mathieu Chartiere401d142015-04-22 13:56:20 -07002035 ArtMethod* inf_contains = soa.DecodeMethod(inf_contains_jmethod);
Andreas Gampe51f76352014-05-21 08:28:48 -07002036
2037 // Object
2038
2039 jclass obj_jclass = env->FindClass("java/lang/Object");
2040 ASSERT_NE(nullptr, obj_jclass);
2041 jmethodID obj_constructor = env->GetMethodID(obj_jclass, "<init>", "()V");
2042 ASSERT_NE(nullptr, obj_constructor);
2043
Andreas Gampe51f76352014-05-21 08:28:48 -07002044 // Create instances.
2045
2046 jobject jarray_list = env->NewObject(arraylist_jclass, arraylist_constructor);
2047 ASSERT_NE(nullptr, jarray_list);
Mathieu Chartier0795f232016-09-27 18:43:30 -07002048 Handle<mirror::Object> array_list(hs.NewHandle(soa.Decode<mirror::Object>(jarray_list)));
Andreas Gampe51f76352014-05-21 08:28:48 -07002049
2050 jobject jobj = env->NewObject(obj_jclass, obj_constructor);
2051 ASSERT_NE(nullptr, jobj);
Mathieu Chartier0795f232016-09-27 18:43:30 -07002052 Handle<mirror::Object> obj(hs.NewHandle(soa.Decode<mirror::Object>(jobj)));
Andreas Gampe51f76352014-05-21 08:28:48 -07002053
Andreas Gampe1a7e2922014-05-21 15:37:53 -07002054 // Invocation tests.
2055
2056 // 1. imt_conflict
2057
2058 // Contains.
Nicolas Geoffray1004faa2016-03-23 14:28:30 +00002059
2060 // We construct the ImtConflictTable ourselves, as we cannot go into the runtime stub
2061 // that will create it: the runtime stub expects to be called by compiled code.
Nicolas Geoffray1004faa2016-03-23 14:28:30 +00002062 LinearAlloc* linear_alloc = Runtime::Current()->GetLinearAlloc();
2063 ArtMethod* conflict_method = Runtime::Current()->CreateImtConflictMethod(linear_alloc);
Mathieu Chartiere42888f2016-04-14 10:49:19 -07002064 ImtConflictTable* empty_conflict_table =
2065 Runtime::Current()->GetClassLinker()->CreateImtConflictTable(/*count*/0u, linear_alloc);
Nicolas Geoffray1004faa2016-03-23 14:28:30 +00002066 void* data = linear_alloc->Alloc(
2067 self,
Andreas Gampe542451c2016-07-26 09:02:02 -07002068 ImtConflictTable::ComputeSizeWithOneMoreEntry(empty_conflict_table, kRuntimePointerSize));
Nicolas Geoffray1004faa2016-03-23 14:28:30 +00002069 ImtConflictTable* new_table = new (data) ImtConflictTable(
Andreas Gampe542451c2016-07-26 09:02:02 -07002070 empty_conflict_table, inf_contains, contains_amethod, kRuntimePointerSize);
2071 conflict_method->SetImtConflictTable(new_table, kRuntimePointerSize);
Nicolas Geoffray1004faa2016-03-23 14:28:30 +00002072
Andreas Gampe51f76352014-05-21 08:28:48 -07002073 size_t result =
Nicolas Geoffray1004faa2016-03-23 14:28:30 +00002074 Invoke3WithReferrerAndHidden(reinterpret_cast<size_t>(conflict_method),
2075 reinterpret_cast<size_t>(array_list.Get()),
Andreas Gampe51f76352014-05-21 08:28:48 -07002076 reinterpret_cast<size_t>(obj.Get()),
Andreas Gampe29b38412014-08-13 00:15:43 -07002077 StubTest::GetEntrypoint(self, kQuickQuickImtConflictTrampoline),
Nicolas Geoffray1004faa2016-03-23 14:28:30 +00002078 self,
2079 contains_amethod,
Mathieu Chartiere401d142015-04-22 13:56:20 -07002080 static_cast<size_t>(inf_contains->GetDexMethodIndex()));
Andreas Gampe51f76352014-05-21 08:28:48 -07002081
2082 ASSERT_FALSE(self->IsExceptionPending());
2083 EXPECT_EQ(static_cast<size_t>(JNI_FALSE), result);
2084
2085 // Add object.
2086
2087 env->CallBooleanMethod(jarray_list, add_jmethod, jobj);
2088
David Sehr709b0702016-10-13 09:12:37 -07002089 ASSERT_FALSE(self->IsExceptionPending()) << mirror::Object::PrettyTypeOf(self->GetException());
Andreas Gampe51f76352014-05-21 08:28:48 -07002090
Andreas Gampe1a7e2922014-05-21 15:37:53 -07002091 // Contains.
Andreas Gampe51f76352014-05-21 08:28:48 -07002092
Nicolas Geoffray1004faa2016-03-23 14:28:30 +00002093 result =
2094 Invoke3WithReferrerAndHidden(reinterpret_cast<size_t>(conflict_method),
2095 reinterpret_cast<size_t>(array_list.Get()),
2096 reinterpret_cast<size_t>(obj.Get()),
2097 StubTest::GetEntrypoint(self, kQuickQuickImtConflictTrampoline),
2098 self,
2099 contains_amethod,
2100 static_cast<size_t>(inf_contains->GetDexMethodIndex()));
Andreas Gampe51f76352014-05-21 08:28:48 -07002101
2102 ASSERT_FALSE(self->IsExceptionPending());
2103 EXPECT_EQ(static_cast<size_t>(JNI_TRUE), result);
Nicolas Geoffray1004faa2016-03-23 14:28:30 +00002104
Andreas Gampe1a7e2922014-05-21 15:37:53 -07002105 // 2. regular interface trampoline
2106
Nicolas Geoffray1004faa2016-03-23 14:28:30 +00002107 result = Invoke3WithReferrer(static_cast<size_t>(inf_contains->GetDexMethodIndex()),
2108 reinterpret_cast<size_t>(array_list.Get()),
2109 reinterpret_cast<size_t>(obj.Get()),
2110 StubTest::GetEntrypoint(self,
2111 kQuickInvokeInterfaceTrampolineWithAccessCheck),
2112 self, contains_amethod);
Andreas Gampe1a7e2922014-05-21 15:37:53 -07002113
2114 ASSERT_FALSE(self->IsExceptionPending());
Nicolas Geoffray1004faa2016-03-23 14:28:30 +00002115 EXPECT_EQ(static_cast<size_t>(JNI_TRUE), result);
Andreas Gampe1a7e2922014-05-21 15:37:53 -07002116
Mathieu Chartiere401d142015-04-22 13:56:20 -07002117 result = Invoke3WithReferrer(
2118 static_cast<size_t>(inf_contains->GetDexMethodIndex()),
2119 reinterpret_cast<size_t>(array_list.Get()), reinterpret_cast<size_t>(array_list.Get()),
2120 StubTest::GetEntrypoint(self, kQuickInvokeInterfaceTrampolineWithAccessCheck), self,
2121 contains_amethod);
Andreas Gampe1a7e2922014-05-21 15:37:53 -07002122
2123 ASSERT_FALSE(self->IsExceptionPending());
2124 EXPECT_EQ(static_cast<size_t>(JNI_FALSE), result);
Andreas Gampe51f76352014-05-21 08:28:48 -07002125#else
Andreas Gampe6aac3552014-06-09 14:55:53 -07002126 LOG(INFO) << "Skipping imt as I don't know how to do that on " << kRuntimeISA;
Andreas Gampe51f76352014-05-21 08:28:48 -07002127 // Force-print to std::cout so it's also outside the logcat.
Andreas Gampe6aac3552014-06-09 14:55:53 -07002128 std::cout << "Skipping imt as I don't know how to do that on " << kRuntimeISA << std::endl;
2129#endif
2130}
2131
Andreas Gampe6aac3552014-06-09 14:55:53 -07002132TEST_F(StubTest, StringIndexOf) {
Chris Larsencf283da2016-01-19 16:45:35 -08002133#if defined(__arm__) || defined(__aarch64__) || defined(__mips__)
Andreas Gampe6aac3552014-06-09 14:55:53 -07002134 Thread* self = Thread::Current();
2135 ScopedObjectAccess soa(self);
2136 // garbage is created during ClassLinker::Init
2137
2138 // Create some strings
2139 // Use array so we can index into it and use a matrix for expected results
2140 // Setup: The first half is standard. The second half uses a non-zero offset.
2141 // TODO: Shared backing arrays.
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07002142 const char* c_str[] = { "", "a", "ba", "cba", "dcba", "edcba", "asdfghjkl" };
2143 static constexpr size_t kStringCount = arraysize(c_str);
2144 const char c_char[] = { 'a', 'b', 'c', 'd', 'e' };
2145 static constexpr size_t kCharCount = arraysize(c_char);
Andreas Gampe6aac3552014-06-09 14:55:53 -07002146
2147 StackHandleScope<kStringCount> hs(self);
2148 Handle<mirror::String> s[kStringCount];
2149
2150 for (size_t i = 0; i < kStringCount; ++i) {
2151 s[i] = hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), c_str[i]));
2152 }
2153
2154 // Matrix of expectations. First component is first parameter. Note we only check against the
2155 // sign, not the value. As we are testing random offsets, we need to compute this and need to
2156 // rely on String::CompareTo being correct.
2157 static constexpr size_t kMaxLen = 9;
2158 DCHECK_LE(strlen(c_str[kStringCount-1]), kMaxLen) << "Please fix the indexof test.";
2159
2160 // Last dimension: start, offset by 1.
2161 int32_t expected[kStringCount][kCharCount][kMaxLen + 3];
2162 for (size_t x = 0; x < kStringCount; ++x) {
2163 for (size_t y = 0; y < kCharCount; ++y) {
2164 for (size_t z = 0; z <= kMaxLen + 2; ++z) {
2165 expected[x][y][z] = s[x]->FastIndexOf(c_char[y], static_cast<int32_t>(z) - 1);
2166 }
2167 }
2168 }
2169
2170 // Play with it...
2171
2172 for (size_t x = 0; x < kStringCount; ++x) {
2173 for (size_t y = 0; y < kCharCount; ++y) {
2174 for (size_t z = 0; z <= kMaxLen + 2; ++z) {
2175 int32_t start = static_cast<int32_t>(z) - 1;
2176
2177 // Test string_compareto x y
2178 size_t result = Invoke3(reinterpret_cast<size_t>(s[x].Get()), c_char[y], start,
Andreas Gampe29b38412014-08-13 00:15:43 -07002179 StubTest::GetEntrypoint(self, kQuickIndexOf), self);
Andreas Gampe6aac3552014-06-09 14:55:53 -07002180
2181 EXPECT_FALSE(self->IsExceptionPending());
2182
2183 // The result is a 32b signed integer
2184 union {
2185 size_t r;
2186 int32_t i;
2187 } conv;
2188 conv.r = result;
2189
2190 EXPECT_EQ(expected[x][y][z], conv.i) << "Wrong result for " << c_str[x] << " / " <<
2191 c_char[y] << " @ " << start;
2192 }
2193 }
2194 }
2195
2196 // TODO: Deallocate things.
2197
2198 // Tests done.
2199#else
2200 LOG(INFO) << "Skipping indexof as I don't know how to do that on " << kRuntimeISA;
2201 // Force-print to std::cout so it's also outside the logcat.
2202 std::cout << "Skipping indexof as I don't know how to do that on " << kRuntimeISA << std::endl;
Andreas Gampe51f76352014-05-21 08:28:48 -07002203#endif
2204}
2205
Roland Levillain02b75802016-07-13 11:54:35 +01002206// TODO: Exercise the ReadBarrierMarkRegX entry points.
2207
Man Cao1aee9002015-07-14 22:31:42 -07002208TEST_F(StubTest, ReadBarrier) {
2209#if defined(ART_USE_READ_BARRIER) && (defined(__i386__) || defined(__arm__) || \
2210 defined(__aarch64__) || defined(__mips__) || (defined(__x86_64__) && !defined(__APPLE__)))
2211 Thread* self = Thread::Current();
2212
2213 const uintptr_t readBarrierSlow = StubTest::GetEntrypoint(self, kQuickReadBarrierSlow);
2214
2215 // Create an object
2216 ScopedObjectAccess soa(self);
2217 // garbage is created during ClassLinker::Init
2218
2219 StackHandleScope<2> hs(soa.Self());
2220 Handle<mirror::Class> c(
2221 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "Ljava/lang/Object;")));
2222
2223 // Build an object instance
2224 Handle<mirror::Object> obj(hs.NewHandle(c->AllocObject(soa.Self())));
2225
2226 EXPECT_FALSE(self->IsExceptionPending());
2227
2228 size_t result = Invoke3(0U, reinterpret_cast<size_t>(obj.Get()),
2229 mirror::Object::ClassOffset().SizeValue(), readBarrierSlow, self);
2230
2231 EXPECT_FALSE(self->IsExceptionPending());
2232 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
2233 mirror::Class* klass = reinterpret_cast<mirror::Class*>(result);
2234 EXPECT_EQ(klass, obj->GetClass());
2235
2236 // Tests done.
2237#else
2238 LOG(INFO) << "Skipping read_barrier_slow";
2239 // Force-print to std::cout so it's also outside the logcat.
2240 std::cout << "Skipping read_barrier_slow" << std::endl;
2241#endif
2242}
2243
Roland Levillain0d5a2812015-11-13 10:07:31 +00002244TEST_F(StubTest, ReadBarrierForRoot) {
2245#if defined(ART_USE_READ_BARRIER) && (defined(__i386__) || defined(__arm__) || \
2246 defined(__aarch64__) || defined(__mips__) || (defined(__x86_64__) && !defined(__APPLE__)))
2247 Thread* self = Thread::Current();
2248
2249 const uintptr_t readBarrierForRootSlow =
2250 StubTest::GetEntrypoint(self, kQuickReadBarrierForRootSlow);
2251
2252 // Create an object
2253 ScopedObjectAccess soa(self);
2254 // garbage is created during ClassLinker::Init
2255
2256 StackHandleScope<1> hs(soa.Self());
2257
2258 Handle<mirror::String> obj(
2259 hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), "hello, world!")));
2260
2261 EXPECT_FALSE(self->IsExceptionPending());
2262
2263 GcRoot<mirror::Class>& root = mirror::String::java_lang_String_;
2264 size_t result = Invoke3(reinterpret_cast<size_t>(&root), 0U, 0U, readBarrierForRootSlow, self);
2265
2266 EXPECT_FALSE(self->IsExceptionPending());
2267 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
2268 mirror::Class* klass = reinterpret_cast<mirror::Class*>(result);
2269 EXPECT_EQ(klass, obj->GetClass());
2270
2271 // Tests done.
2272#else
2273 LOG(INFO) << "Skipping read_barrier_for_root_slow";
2274 // Force-print to std::cout so it's also outside the logcat.
2275 std::cout << "Skipping read_barrier_for_root_slow" << std::endl;
2276#endif
2277}
2278
Andreas Gampe525cde22014-04-22 15:44:50 -07002279} // namespace art