blob: c151f00289d99aa8d44e9a5a668b821b671aebd5 [file] [log] [blame]
Andreas Gampe525cde22014-04-22 15:44:50 -07001/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
Ian Rogerse63db272014-07-15 15:36:11 -070017#include <cstdio>
18
Mathieu Chartierc7853442015-03-27 14:35:38 -070019#include "art_field-inl.h"
Mathieu Chartiere401d142015-04-22 13:56:20 -070020#include "art_method-inl.h"
Andreas Gampe542451c2016-07-26 09:02:02 -070021#include "base/enums.h"
Vladimir Marko3481ba22015-04-13 12:22:36 +010022#include "class_linker-inl.h"
Andreas Gampe525cde22014-04-22 15:44:50 -070023#include "common_runtime_test.h"
Andreas Gampe29b38412014-08-13 00:15:43 -070024#include "entrypoints/quick/quick_entrypoints_enum.h"
Andreas Gampe75a7db62016-09-26 12:04:26 -070025#include "imt_conflict_table.h"
Nicolas Geoffray1004faa2016-03-23 14:28:30 +000026#include "linear_alloc.h"
Andreas Gampe51f76352014-05-21 08:28:48 -070027#include "mirror/class-inl.h"
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -070028#include "mirror/string-inl.h"
Mathieu Chartier0795f232016-09-27 18:43:30 -070029#include "scoped_thread_state_change-inl.h"
Andreas Gampe525cde22014-04-22 15:44:50 -070030
31namespace art {
32
33
34class StubTest : public CommonRuntimeTest {
35 protected:
36 // We need callee-save methods set up in the Runtime for exceptions.
37 void SetUp() OVERRIDE {
38 // Do the normal setup.
39 CommonRuntimeTest::SetUp();
40
41 {
42 // Create callee-save methods
43 ScopedObjectAccess soa(Thread::Current());
Vladimir Marko7624d252014-05-02 14:40:15 +010044 runtime_->SetInstructionSet(kRuntimeISA);
Andreas Gampe525cde22014-04-22 15:44:50 -070045 for (int i = 0; i < Runtime::kLastCalleeSaveType; i++) {
46 Runtime::CalleeSaveType type = Runtime::CalleeSaveType(i);
47 if (!runtime_->HasCalleeSaveMethod(type)) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -070048 runtime_->SetCalleeSaveMethod(runtime_->CreateCalleeSaveMethod(), type);
Andreas Gampe525cde22014-04-22 15:44:50 -070049 }
50 }
51 }
52 }
53
Ian Rogerse63db272014-07-15 15:36:11 -070054 void SetUpRuntimeOptions(RuntimeOptions *options) OVERRIDE {
Andreas Gampe00c1e6d2014-04-25 15:47:13 -070055 // Use a smaller heap
56 for (std::pair<std::string, const void*>& pair : *options) {
57 if (pair.first.find("-Xmx") == 0) {
58 pair.first = "-Xmx4M"; // Smallest we can go.
59 }
60 }
Andreas Gampe51f76352014-05-21 08:28:48 -070061 options->push_back(std::make_pair("-Xint", nullptr));
Andreas Gampe00c1e6d2014-04-25 15:47:13 -070062 }
Andreas Gampe525cde22014-04-22 15:44:50 -070063
Mathieu Chartier119c6bd2014-05-09 14:11:47 -070064 // Helper function needed since TEST_F makes a new class.
65 Thread::tls_ptr_sized_values* GetTlsPtr(Thread* self) {
66 return &self->tlsPtr_;
67 }
68
Andreas Gampe4fc046e2014-05-06 16:56:39 -070069 public:
Andreas Gampe525cde22014-04-22 15:44:50 -070070 size_t Invoke3(size_t arg0, size_t arg1, size_t arg2, uintptr_t code, Thread* self) {
Andreas Gampe6cf80102014-05-19 11:32:41 -070071 return Invoke3WithReferrer(arg0, arg1, arg2, code, self, nullptr);
Andreas Gampe525cde22014-04-22 15:44:50 -070072 }
Andreas Gampe6e4e59c2014-05-05 20:11:02 -070073
Andreas Gampe6e4e59c2014-05-05 20:11:02 -070074 // TODO: Set up a frame according to referrer's specs.
75 size_t Invoke3WithReferrer(size_t arg0, size_t arg1, size_t arg2, uintptr_t code, Thread* self,
Mathieu Chartiere401d142015-04-22 13:56:20 -070076 ArtMethod* referrer) {
Andreas Gampe9537ba22015-10-12 14:29:38 -070077 return Invoke3WithReferrerAndHidden(arg0, arg1, arg2, code, self, referrer, 0);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -070078 }
79
Andreas Gampe51f76352014-05-21 08:28:48 -070080 // TODO: Set up a frame according to referrer's specs.
81 size_t Invoke3WithReferrerAndHidden(size_t arg0, size_t arg1, size_t arg2, uintptr_t code,
Mathieu Chartiere401d142015-04-22 13:56:20 -070082 Thread* self, ArtMethod* referrer, size_t hidden) {
Andreas Gampe51f76352014-05-21 08:28:48 -070083 // Push a transition back into managed code onto the linked list in thread.
84 ManagedStack fragment;
85 self->PushManagedStackFragment(&fragment);
86
87 size_t result;
88 size_t fpr_result = 0;
89#if defined(__i386__)
90 // TODO: Set the thread?
Andreas Gampe9537ba22015-10-12 14:29:38 -070091#define PUSH(reg) "push " # reg "\n\t .cfi_adjust_cfa_offset 4\n\t"
92#define POP(reg) "pop " # reg "\n\t .cfi_adjust_cfa_offset -4\n\t"
Andreas Gampe51f76352014-05-21 08:28:48 -070093 __asm__ __volatile__(
Andreas Gampe9537ba22015-10-12 14:29:38 -070094 "movd %[hidden], %%xmm7\n\t" // This is a memory op, so do this early. If it is off of
95 // esp, then we won't be able to access it after spilling.
96
97 // Spill 6 registers.
98 PUSH(%%ebx)
99 PUSH(%%ecx)
100 PUSH(%%edx)
101 PUSH(%%esi)
102 PUSH(%%edi)
103 PUSH(%%ebp)
104
105 // Store the inputs to the stack, but keep the referrer up top, less work.
106 PUSH(%[referrer]) // Align stack.
107 PUSH(%[referrer]) // Store referrer
108
109 PUSH(%[arg0])
110 PUSH(%[arg1])
111 PUSH(%[arg2])
112 PUSH(%[code])
113 // Now read them back into the required registers.
114 POP(%%edi)
115 POP(%%edx)
116 POP(%%ecx)
117 POP(%%eax)
118 // Call is prepared now.
119
Andreas Gampe51f76352014-05-21 08:28:48 -0700120 "call *%%edi\n\t" // Call the stub
Andreas Gampe9537ba22015-10-12 14:29:38 -0700121 "addl $8, %%esp\n\t" // Pop referrer and padding.
122 ".cfi_adjust_cfa_offset -8\n\t"
123
124 // Restore 6 registers.
125 POP(%%ebp)
126 POP(%%edi)
127 POP(%%esi)
128 POP(%%edx)
129 POP(%%ecx)
130 POP(%%ebx)
131
Andreas Gampe51f76352014-05-21 08:28:48 -0700132 : "=a" (result)
133 // Use the result from eax
Andreas Gampe9537ba22015-10-12 14:29:38 -0700134 : [arg0] "r"(arg0), [arg1] "r"(arg1), [arg2] "r"(arg2), [code] "r"(code),
135 [referrer]"r"(referrer), [hidden]"m"(hidden)
Andreas Gampe2f6e3512014-06-07 01:32:33 -0700136 // This places code into edi, arg0 into eax, arg1 into ecx, and arg2 into edx
Andreas Gampe9537ba22015-10-12 14:29:38 -0700137 : "memory", "xmm7"); // clobber.
138#undef PUSH
139#undef POP
Andreas Gampe51f76352014-05-21 08:28:48 -0700140#elif defined(__arm__)
141 __asm__ __volatile__(
142 "push {r1-r12, lr}\n\t" // Save state, 13*4B = 52B
143 ".cfi_adjust_cfa_offset 52\n\t"
144 "push {r9}\n\t"
145 ".cfi_adjust_cfa_offset 4\n\t"
146 "mov r9, %[referrer]\n\n"
147 "str r9, [sp, #-8]!\n\t" // Push referrer, +8B padding so 16B aligned
148 ".cfi_adjust_cfa_offset 8\n\t"
149 "ldr r9, [sp, #8]\n\t"
150
151 // Push everything on the stack, so we don't rely on the order. What a mess. :-(
152 "sub sp, sp, #24\n\t"
153 "str %[arg0], [sp]\n\t"
154 "str %[arg1], [sp, #4]\n\t"
155 "str %[arg2], [sp, #8]\n\t"
156 "str %[code], [sp, #12]\n\t"
157 "str %[self], [sp, #16]\n\t"
158 "str %[hidden], [sp, #20]\n\t"
159 "ldr r0, [sp]\n\t"
160 "ldr r1, [sp, #4]\n\t"
161 "ldr r2, [sp, #8]\n\t"
162 "ldr r3, [sp, #12]\n\t"
163 "ldr r9, [sp, #16]\n\t"
164 "ldr r12, [sp, #20]\n\t"
165 "add sp, sp, #24\n\t"
166
167 "blx r3\n\t" // Call the stub
Mathieu Chartier2cebb242015-04-21 16:50:40 -0700168 "add sp, sp, #12\n\t" // Pop null and padding
Andreas Gampe51f76352014-05-21 08:28:48 -0700169 ".cfi_adjust_cfa_offset -12\n\t"
170 "pop {r1-r12, lr}\n\t" // Restore state
171 ".cfi_adjust_cfa_offset -52\n\t"
172 "mov %[result], r0\n\t" // Save the result
173 : [result] "=r" (result)
174 // Use the result from r0
Andreas Gampe2f6e3512014-06-07 01:32:33 -0700175 : [arg0] "r"(arg0), [arg1] "r"(arg1), [arg2] "r"(arg2), [code] "r"(code), [self] "r"(self),
176 [referrer] "r"(referrer), [hidden] "r"(hidden)
Andreas Gampeff7b1142015-08-03 10:25:06 -0700177 : "r0", "memory"); // clobber.
Andreas Gampe51f76352014-05-21 08:28:48 -0700178#elif defined(__aarch64__)
179 __asm__ __volatile__(
Andreas Gampef39b3782014-06-03 14:38:30 -0700180 // Spill x0-x7 which we say we don't clobber. May contain args.
Bilyan Borisov970e1912016-02-10 11:51:17 +0000181 "sub sp, sp, #80\n\t"
182 ".cfi_adjust_cfa_offset 80\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700183 "stp x0, x1, [sp]\n\t"
184 "stp x2, x3, [sp, #16]\n\t"
185 "stp x4, x5, [sp, #32]\n\t"
186 "stp x6, x7, [sp, #48]\n\t"
Bilyan Borisov970e1912016-02-10 11:51:17 +0000187 // To be extra defensive, store x20. We do this because some of the stubs might make a
188 // transition into the runtime via the blr instruction below and *not* save x20.
189 "str x20, [sp, #64]\n\t"
190 // 8 byte buffer
Andreas Gampe51f76352014-05-21 08:28:48 -0700191
Andreas Gampef39b3782014-06-03 14:38:30 -0700192 "sub sp, sp, #16\n\t" // Reserve stack space, 16B aligned
193 ".cfi_adjust_cfa_offset 16\n\t"
194 "str %[referrer], [sp]\n\t" // referrer
Andreas Gampe51f76352014-05-21 08:28:48 -0700195
196 // Push everything on the stack, so we don't rely on the order. What a mess. :-(
197 "sub sp, sp, #48\n\t"
198 ".cfi_adjust_cfa_offset 48\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700199 // All things are "r" constraints, so direct str/stp should work.
200 "stp %[arg0], %[arg1], [sp]\n\t"
201 "stp %[arg2], %[code], [sp, #16]\n\t"
202 "stp %[self], %[hidden], [sp, #32]\n\t"
Andreas Gampe51f76352014-05-21 08:28:48 -0700203
204 // Now we definitely have x0-x3 free, use it to garble d8 - d15
205 "movk x0, #0xfad0\n\t"
206 "movk x0, #0xebad, lsl #16\n\t"
207 "movk x0, #0xfad0, lsl #32\n\t"
208 "movk x0, #0xebad, lsl #48\n\t"
209 "fmov d8, x0\n\t"
210 "add x0, x0, 1\n\t"
211 "fmov d9, x0\n\t"
212 "add x0, x0, 1\n\t"
213 "fmov d10, x0\n\t"
214 "add x0, x0, 1\n\t"
215 "fmov d11, x0\n\t"
216 "add x0, x0, 1\n\t"
217 "fmov d12, x0\n\t"
218 "add x0, x0, 1\n\t"
219 "fmov d13, x0\n\t"
220 "add x0, x0, 1\n\t"
221 "fmov d14, x0\n\t"
222 "add x0, x0, 1\n\t"
223 "fmov d15, x0\n\t"
224
Andreas Gampef39b3782014-06-03 14:38:30 -0700225 // Load call params into the right registers.
226 "ldp x0, x1, [sp]\n\t"
227 "ldp x2, x3, [sp, #16]\n\t"
Serban Constantinescu9bd88b02015-04-22 16:24:46 +0100228 "ldp x19, x17, [sp, #32]\n\t"
Andreas Gampe51f76352014-05-21 08:28:48 -0700229 "add sp, sp, #48\n\t"
230 ".cfi_adjust_cfa_offset -48\n\t"
231
Andreas Gampe51f76352014-05-21 08:28:48 -0700232 "blr x3\n\t" // Call the stub
Andreas Gampef39b3782014-06-03 14:38:30 -0700233 "mov x8, x0\n\t" // Store result
234 "add sp, sp, #16\n\t" // Drop the quick "frame"
235 ".cfi_adjust_cfa_offset -16\n\t"
Andreas Gampe51f76352014-05-21 08:28:48 -0700236
237 // Test d8 - d15. We can use x1 and x2.
238 "movk x1, #0xfad0\n\t"
239 "movk x1, #0xebad, lsl #16\n\t"
240 "movk x1, #0xfad0, lsl #32\n\t"
241 "movk x1, #0xebad, lsl #48\n\t"
242 "fmov x2, d8\n\t"
243 "cmp x1, x2\n\t"
244 "b.ne 1f\n\t"
245 "add x1, x1, 1\n\t"
246
247 "fmov x2, d9\n\t"
248 "cmp x1, x2\n\t"
249 "b.ne 1f\n\t"
250 "add x1, x1, 1\n\t"
251
252 "fmov x2, d10\n\t"
253 "cmp x1, x2\n\t"
254 "b.ne 1f\n\t"
255 "add x1, x1, 1\n\t"
256
257 "fmov x2, d11\n\t"
258 "cmp x1, x2\n\t"
259 "b.ne 1f\n\t"
260 "add x1, x1, 1\n\t"
261
262 "fmov x2, d12\n\t"
263 "cmp x1, x2\n\t"
264 "b.ne 1f\n\t"
265 "add x1, x1, 1\n\t"
266
267 "fmov x2, d13\n\t"
268 "cmp x1, x2\n\t"
269 "b.ne 1f\n\t"
270 "add x1, x1, 1\n\t"
271
272 "fmov x2, d14\n\t"
273 "cmp x1, x2\n\t"
274 "b.ne 1f\n\t"
275 "add x1, x1, 1\n\t"
276
277 "fmov x2, d15\n\t"
278 "cmp x1, x2\n\t"
279 "b.ne 1f\n\t"
280
Andreas Gampef39b3782014-06-03 14:38:30 -0700281 "mov x9, #0\n\t" // Use x9 as flag, in clobber list
Andreas Gampe51f76352014-05-21 08:28:48 -0700282
283 // Finish up.
284 "2:\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700285 "ldp x0, x1, [sp]\n\t" // Restore stuff not named clobbered, may contain fpr_result
286 "ldp x2, x3, [sp, #16]\n\t"
287 "ldp x4, x5, [sp, #32]\n\t"
288 "ldp x6, x7, [sp, #48]\n\t"
Bilyan Borisov970e1912016-02-10 11:51:17 +0000289 "ldr x20, [sp, #64]\n\t"
290 "add sp, sp, #80\n\t" // Free stack space, now sp as on entry
291 ".cfi_adjust_cfa_offset -80\n\t"
Andreas Gampe51f76352014-05-21 08:28:48 -0700292
Andreas Gampef39b3782014-06-03 14:38:30 -0700293 "str x9, %[fpr_result]\n\t" // Store the FPR comparison result
294 "mov %[result], x8\n\t" // Store the call result
295
Andreas Gampe51f76352014-05-21 08:28:48 -0700296 "b 3f\n\t" // Goto end
297
298 // Failed fpr verification.
299 "1:\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700300 "mov x9, #1\n\t"
Andreas Gampe51f76352014-05-21 08:28:48 -0700301 "b 2b\n\t" // Goto finish-up
302
303 // End
304 "3:\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700305 : [result] "=r" (result)
306 // Use the result from r0
Andreas Gampe51f76352014-05-21 08:28:48 -0700307 : [arg0] "0"(arg0), [arg1] "r"(arg1), [arg2] "r"(arg2), [code] "r"(code), [self] "r"(self),
Andreas Gampef39b3782014-06-03 14:38:30 -0700308 [referrer] "r"(referrer), [hidden] "r"(hidden), [fpr_result] "m" (fpr_result)
Bilyan Borisov970e1912016-02-10 11:51:17 +0000309 // Leave one register unclobbered, which is needed for compiling with
310 // -fstack-protector-strong. According to AAPCS64 registers x9-x15 are caller-saved,
311 // which means we should unclobber one of the callee-saved registers that are unused.
312 // Here we use x20.
313 : "x8", "x9", "x10", "x11", "x12", "x13", "x14", "x15", "x16", "x17", "x18", "x19",
Andreas Gampef39b3782014-06-03 14:38:30 -0700314 "x21", "x22", "x23", "x24", "x25", "x26", "x27", "x28", "x30",
315 "d0", "d1", "d2", "d3", "d4", "d5", "d6", "d7",
316 "d8", "d9", "d10", "d11", "d12", "d13", "d14", "d15",
317 "d16", "d17", "d18", "d19", "d20", "d21", "d22", "d23",
Andreas Gampe2f6e3512014-06-07 01:32:33 -0700318 "d24", "d25", "d26", "d27", "d28", "d29", "d30", "d31",
Bilyan Borisov970e1912016-02-10 11:51:17 +0000319 "memory");
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200320#elif defined(__mips__) && !defined(__LP64__)
321 __asm__ __volatile__ (
322 // Spill a0-a3 and t0-t7 which we say we don't clobber. May contain args.
323 "addiu $sp, $sp, -64\n\t"
324 "sw $a0, 0($sp)\n\t"
325 "sw $a1, 4($sp)\n\t"
326 "sw $a2, 8($sp)\n\t"
327 "sw $a3, 12($sp)\n\t"
328 "sw $t0, 16($sp)\n\t"
329 "sw $t1, 20($sp)\n\t"
330 "sw $t2, 24($sp)\n\t"
331 "sw $t3, 28($sp)\n\t"
332 "sw $t4, 32($sp)\n\t"
333 "sw $t5, 36($sp)\n\t"
334 "sw $t6, 40($sp)\n\t"
335 "sw $t7, 44($sp)\n\t"
336 // Spill gp register since it is caller save.
337 "sw $gp, 52($sp)\n\t"
338
339 "addiu $sp, $sp, -16\n\t" // Reserve stack space, 16B aligned.
340 "sw %[referrer], 0($sp)\n\t"
341
342 // Push everything on the stack, so we don't rely on the order.
343 "addiu $sp, $sp, -24\n\t"
344 "sw %[arg0], 0($sp)\n\t"
345 "sw %[arg1], 4($sp)\n\t"
346 "sw %[arg2], 8($sp)\n\t"
347 "sw %[code], 12($sp)\n\t"
348 "sw %[self], 16($sp)\n\t"
349 "sw %[hidden], 20($sp)\n\t"
350
351 // Load call params into the right registers.
352 "lw $a0, 0($sp)\n\t"
353 "lw $a1, 4($sp)\n\t"
354 "lw $a2, 8($sp)\n\t"
355 "lw $t9, 12($sp)\n\t"
356 "lw $s1, 16($sp)\n\t"
357 "lw $t0, 20($sp)\n\t"
358 "addiu $sp, $sp, 24\n\t"
359
360 "jalr $t9\n\t" // Call the stub.
361 "nop\n\t"
362 "addiu $sp, $sp, 16\n\t" // Drop the quick "frame".
363
364 // Restore stuff not named clobbered.
365 "lw $a0, 0($sp)\n\t"
366 "lw $a1, 4($sp)\n\t"
367 "lw $a2, 8($sp)\n\t"
368 "lw $a3, 12($sp)\n\t"
369 "lw $t0, 16($sp)\n\t"
370 "lw $t1, 20($sp)\n\t"
371 "lw $t2, 24($sp)\n\t"
372 "lw $t3, 28($sp)\n\t"
373 "lw $t4, 32($sp)\n\t"
374 "lw $t5, 36($sp)\n\t"
375 "lw $t6, 40($sp)\n\t"
376 "lw $t7, 44($sp)\n\t"
377 // Restore gp.
378 "lw $gp, 52($sp)\n\t"
379 "addiu $sp, $sp, 64\n\t" // Free stack space, now sp as on entry.
380
381 "move %[result], $v0\n\t" // Store the call result.
382 : [result] "=r" (result)
383 : [arg0] "r"(arg0), [arg1] "r"(arg1), [arg2] "r"(arg2), [code] "r"(code), [self] "r"(self),
384 [referrer] "r"(referrer), [hidden] "r"(hidden)
385 : "at", "v0", "v1", "s0", "s1", "s2", "s3", "s4", "s5", "s6", "s7", "t8", "t9", "k0", "k1",
386 "fp", "ra",
Nicolas Geoffrayc5b4b322015-09-15 16:36:50 +0100387 "$f0", "$f1", "$f2", "$f3", "$f4", "$f5", "$f6", "$f7", "$f8", "$f9", "$f10", "$f11",
388 "$f12", "$f13", "$f14", "$f15", "$f16", "$f17", "$f18", "$f19", "$f20", "$f21", "$f22",
389 "$f23", "$f24", "$f25", "$f26", "$f27", "$f28", "$f29", "$f30", "$f31",
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200390 "memory"); // clobber.
391#elif defined(__mips__) && defined(__LP64__)
392 __asm__ __volatile__ (
Goran Jakovljevic745f3cd2016-01-15 14:08:47 +0100393 // Spill a0-a7 which we say we don't clobber. May contain args.
394 "daddiu $sp, $sp, -64\n\t"
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200395 "sd $a0, 0($sp)\n\t"
396 "sd $a1, 8($sp)\n\t"
397 "sd $a2, 16($sp)\n\t"
398 "sd $a3, 24($sp)\n\t"
399 "sd $a4, 32($sp)\n\t"
400 "sd $a5, 40($sp)\n\t"
401 "sd $a6, 48($sp)\n\t"
402 "sd $a7, 56($sp)\n\t"
403
404 "daddiu $sp, $sp, -16\n\t" // Reserve stack space, 16B aligned.
405 "sd %[referrer], 0($sp)\n\t"
406
407 // Push everything on the stack, so we don't rely on the order.
408 "daddiu $sp, $sp, -48\n\t"
409 "sd %[arg0], 0($sp)\n\t"
410 "sd %[arg1], 8($sp)\n\t"
411 "sd %[arg2], 16($sp)\n\t"
412 "sd %[code], 24($sp)\n\t"
413 "sd %[self], 32($sp)\n\t"
414 "sd %[hidden], 40($sp)\n\t"
415
416 // Load call params into the right registers.
417 "ld $a0, 0($sp)\n\t"
418 "ld $a1, 8($sp)\n\t"
419 "ld $a2, 16($sp)\n\t"
420 "ld $t9, 24($sp)\n\t"
421 "ld $s1, 32($sp)\n\t"
422 "ld $t0, 40($sp)\n\t"
423 "daddiu $sp, $sp, 48\n\t"
424
425 "jalr $t9\n\t" // Call the stub.
426 "nop\n\t"
427 "daddiu $sp, $sp, 16\n\t" // Drop the quick "frame".
428
429 // Restore stuff not named clobbered.
430 "ld $a0, 0($sp)\n\t"
431 "ld $a1, 8($sp)\n\t"
432 "ld $a2, 16($sp)\n\t"
433 "ld $a3, 24($sp)\n\t"
434 "ld $a4, 32($sp)\n\t"
435 "ld $a5, 40($sp)\n\t"
436 "ld $a6, 48($sp)\n\t"
437 "ld $a7, 56($sp)\n\t"
Goran Jakovljevic745f3cd2016-01-15 14:08:47 +0100438 "daddiu $sp, $sp, 64\n\t"
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200439
440 "move %[result], $v0\n\t" // Store the call result.
441 : [result] "=r" (result)
442 : [arg0] "r"(arg0), [arg1] "r"(arg1), [arg2] "r"(arg2), [code] "r"(code), [self] "r"(self),
443 [referrer] "r"(referrer), [hidden] "r"(hidden)
Goran Jakovljevic745f3cd2016-01-15 14:08:47 +0100444 // Instead aliases t0-t3, register names $12-$15 has been used in the clobber list because
445 // t0-t3 are ambiguous.
446 : "at", "v0", "v1", "$12", "$13", "$14", "$15", "s0", "s1", "s2", "s3", "s4", "s5", "s6",
447 "s7", "t8", "t9", "k0", "k1", "fp", "ra",
Goran Jakovljevic4d44e532015-11-27 11:20:20 +0100448 "$f0", "$f1", "$f2", "$f3", "$f4", "$f5", "$f6", "$f7", "$f8", "$f9", "$f10", "$f11",
449 "$f12", "$f13", "$f14", "$f15", "$f16", "$f17", "$f18", "$f19", "$f20", "$f21", "$f22",
450 "$f23", "$f24", "$f25", "$f26", "$f27", "$f28", "$f29", "$f30", "$f31",
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200451 "memory"); // clobber.
Andreas Gampe9537ba22015-10-12 14:29:38 -0700452#elif defined(__x86_64__) && !defined(__APPLE__)
453#define PUSH(reg) "pushq " # reg "\n\t .cfi_adjust_cfa_offset 8\n\t"
454#define POP(reg) "popq " # reg "\n\t .cfi_adjust_cfa_offset -8\n\t"
455 // Note: Uses the native convention. We do a callee-save regimen by manually spilling and
456 // restoring almost all registers.
Andreas Gampe51f76352014-05-21 08:28:48 -0700457 // TODO: Set the thread?
458 __asm__ __volatile__(
Andreas Gampe9537ba22015-10-12 14:29:38 -0700459 // Spill almost everything (except rax, rsp). 14 registers.
460 PUSH(%%rbx)
461 PUSH(%%rcx)
462 PUSH(%%rdx)
463 PUSH(%%rsi)
464 PUSH(%%rdi)
465 PUSH(%%rbp)
466 PUSH(%%r8)
467 PUSH(%%r9)
468 PUSH(%%r10)
469 PUSH(%%r11)
470 PUSH(%%r12)
471 PUSH(%%r13)
472 PUSH(%%r14)
473 PUSH(%%r15)
474
475 PUSH(%[referrer]) // Push referrer & 16B alignment padding
476 PUSH(%[referrer])
477
478 // Now juggle the input registers.
479 PUSH(%[arg0])
480 PUSH(%[arg1])
481 PUSH(%[arg2])
482 PUSH(%[hidden])
483 PUSH(%[code])
484 POP(%%r8)
485 POP(%%rax)
486 POP(%%rdx)
487 POP(%%rsi)
488 POP(%%rdi)
489
490 "call *%%r8\n\t" // Call the stub
491 "addq $16, %%rsp\n\t" // Pop null and padding
Andreas Gampe51f76352014-05-21 08:28:48 -0700492 ".cfi_adjust_cfa_offset -16\n\t"
Andreas Gampe9537ba22015-10-12 14:29:38 -0700493
494 POP(%%r15)
495 POP(%%r14)
496 POP(%%r13)
497 POP(%%r12)
498 POP(%%r11)
499 POP(%%r10)
500 POP(%%r9)
501 POP(%%r8)
502 POP(%%rbp)
503 POP(%%rdi)
504 POP(%%rsi)
505 POP(%%rdx)
506 POP(%%rcx)
507 POP(%%rbx)
508
Andreas Gampe51f76352014-05-21 08:28:48 -0700509 : "=a" (result)
510 // Use the result from rax
Andreas Gampe9537ba22015-10-12 14:29:38 -0700511 : [arg0] "r"(arg0), [arg1] "r"(arg1), [arg2] "r"(arg2), [code] "r"(code),
512 [referrer] "r"(referrer), [hidden] "r"(hidden)
513 // This places arg0 into rdi, arg1 into rsi, arg2 into rdx, and code into some other
514 // register. We can't use "b" (rbx), as ASAN uses this for the frame pointer.
515 : "memory"); // We spill and restore (almost) all registers, so only mention memory here.
516#undef PUSH
517#undef POP
Andreas Gampe51f76352014-05-21 08:28:48 -0700518#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -0800519 UNUSED(arg0, arg1, arg2, code, referrer, hidden);
Andreas Gampe51f76352014-05-21 08:28:48 -0700520 LOG(WARNING) << "Was asked to invoke for an architecture I do not understand.";
521 result = 0;
522#endif
523 // Pop transition.
524 self->PopManagedStackFragment(fragment);
525
526 fp_result = fpr_result;
527 EXPECT_EQ(0U, fp_result);
528
529 return result;
530 }
531
Andreas Gampe29b38412014-08-13 00:15:43 -0700532 static uintptr_t GetEntrypoint(Thread* self, QuickEntrypointEnum entrypoint) {
533 int32_t offset;
Andreas Gampe542451c2016-07-26 09:02:02 -0700534 offset = GetThreadOffset<kRuntimePointerSize>(entrypoint).Int32Value();
Andreas Gampe29b38412014-08-13 00:15:43 -0700535 return *reinterpret_cast<uintptr_t*>(reinterpret_cast<uint8_t*>(self) + offset);
536 }
537
Andreas Gampe6cf80102014-05-19 11:32:41 -0700538 protected:
539 size_t fp_result;
Andreas Gampe525cde22014-04-22 15:44:50 -0700540};
541
542
Andreas Gampe525cde22014-04-22 15:44:50 -0700543TEST_F(StubTest, Memcpy) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200544#if defined(__i386__) || (defined(__x86_64__) && !defined(__APPLE__)) || defined(__mips__)
Andreas Gampe525cde22014-04-22 15:44:50 -0700545 Thread* self = Thread::Current();
546
547 uint32_t orig[20];
548 uint32_t trg[20];
549 for (size_t i = 0; i < 20; ++i) {
550 orig[i] = i;
551 trg[i] = 0;
552 }
553
554 Invoke3(reinterpret_cast<size_t>(&trg[4]), reinterpret_cast<size_t>(&orig[4]),
Andreas Gampe29b38412014-08-13 00:15:43 -0700555 10 * sizeof(uint32_t), StubTest::GetEntrypoint(self, kQuickMemcpy), self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700556
557 EXPECT_EQ(orig[0], trg[0]);
558
559 for (size_t i = 1; i < 4; ++i) {
560 EXPECT_NE(orig[i], trg[i]);
561 }
562
563 for (size_t i = 4; i < 14; ++i) {
564 EXPECT_EQ(orig[i], trg[i]);
565 }
566
567 for (size_t i = 14; i < 20; ++i) {
568 EXPECT_NE(orig[i], trg[i]);
569 }
570
571 // TODO: Test overlapping?
572
573#else
574 LOG(INFO) << "Skipping memcpy as I don't know how to do that on " << kRuntimeISA;
575 // Force-print to std::cout so it's also outside the logcat.
576 std::cout << "Skipping memcpy as I don't know how to do that on " << kRuntimeISA << std::endl;
577#endif
578}
579
Andreas Gampe525cde22014-04-22 15:44:50 -0700580TEST_F(StubTest, LockObject) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200581#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
582 (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -0700583 static constexpr size_t kThinLockLoops = 100;
584
Andreas Gampe525cde22014-04-22 15:44:50 -0700585 Thread* self = Thread::Current();
Andreas Gampe29b38412014-08-13 00:15:43 -0700586
587 const uintptr_t art_quick_lock_object = StubTest::GetEntrypoint(self, kQuickLockObject);
588
Andreas Gampe525cde22014-04-22 15:44:50 -0700589 // Create an object
590 ScopedObjectAccess soa(self);
591 // garbage is created during ClassLinker::Init
592
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700593 StackHandleScope<2> hs(soa.Self());
594 Handle<mirror::String> obj(
595 hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), "hello, world!")));
Andreas Gampe525cde22014-04-22 15:44:50 -0700596 LockWord lock = obj->GetLockWord(false);
597 LockWord::LockState old_state = lock.GetState();
598 EXPECT_EQ(LockWord::LockState::kUnlocked, old_state);
599
Andreas Gampe29b38412014-08-13 00:15:43 -0700600 Invoke3(reinterpret_cast<size_t>(obj.Get()), 0U, 0U, art_quick_lock_object, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700601
602 LockWord lock_after = obj->GetLockWord(false);
603 LockWord::LockState new_state = lock_after.GetState();
604 EXPECT_EQ(LockWord::LockState::kThinLocked, new_state);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700605 EXPECT_EQ(lock_after.ThinLockCount(), 0U); // Thin lock starts count at zero
606
607 for (size_t i = 1; i < kThinLockLoops; ++i) {
Andreas Gampe29b38412014-08-13 00:15:43 -0700608 Invoke3(reinterpret_cast<size_t>(obj.Get()), 0U, 0U, art_quick_lock_object, self);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700609
610 // Check we're at lock count i
611
612 LockWord l_inc = obj->GetLockWord(false);
613 LockWord::LockState l_inc_state = l_inc.GetState();
614 EXPECT_EQ(LockWord::LockState::kThinLocked, l_inc_state);
615 EXPECT_EQ(l_inc.ThinLockCount(), i);
616 }
617
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700618 // Force a fat lock by running identity hashcode to fill up lock word.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700619 Handle<mirror::String> obj2(hs.NewHandle(
620 mirror::String::AllocFromModifiedUtf8(soa.Self(), "hello, world!")));
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700621
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700622 obj2->IdentityHashCode();
623
Andreas Gampe29b38412014-08-13 00:15:43 -0700624 Invoke3(reinterpret_cast<size_t>(obj2.Get()), 0U, 0U, art_quick_lock_object, self);
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700625
626 LockWord lock_after2 = obj2->GetLockWord(false);
627 LockWord::LockState new_state2 = lock_after2.GetState();
628 EXPECT_EQ(LockWord::LockState::kFatLocked, new_state2);
629 EXPECT_NE(lock_after2.FatLockMonitor(), static_cast<Monitor*>(nullptr));
630
631 // Test done.
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700632#else
633 LOG(INFO) << "Skipping lock_object as I don't know how to do that on " << kRuntimeISA;
634 // Force-print to std::cout so it's also outside the logcat.
635 std::cout << "Skipping lock_object as I don't know how to do that on " << kRuntimeISA << std::endl;
636#endif
637}
638
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700639
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700640class RandGen {
641 public:
642 explicit RandGen(uint32_t seed) : val_(seed) {}
643
644 uint32_t next() {
645 val_ = val_ * 48271 % 2147483647 + 13;
646 return val_;
647 }
648
649 uint32_t val_;
650};
651
652
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700653// NO_THREAD_SAFETY_ANALYSIS as we do not want to grab exclusive mutator lock for MonitorInfo.
654static void TestUnlockObject(StubTest* test) NO_THREAD_SAFETY_ANALYSIS {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200655#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
656 (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -0700657 static constexpr size_t kThinLockLoops = 100;
658
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700659 Thread* self = Thread::Current();
Andreas Gampe29b38412014-08-13 00:15:43 -0700660
661 const uintptr_t art_quick_lock_object = StubTest::GetEntrypoint(self, kQuickLockObject);
662 const uintptr_t art_quick_unlock_object = StubTest::GetEntrypoint(self, kQuickUnlockObject);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700663 // Create an object
664 ScopedObjectAccess soa(self);
665 // garbage is created during ClassLinker::Init
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700666 static constexpr size_t kNumberOfLocks = 10; // Number of objects = lock
667 StackHandleScope<kNumberOfLocks + 1> hs(self);
668 Handle<mirror::String> obj(
669 hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), "hello, world!")));
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700670 LockWord lock = obj->GetLockWord(false);
671 LockWord::LockState old_state = lock.GetState();
672 EXPECT_EQ(LockWord::LockState::kUnlocked, old_state);
673
Andreas Gampe29b38412014-08-13 00:15:43 -0700674 test->Invoke3(reinterpret_cast<size_t>(obj.Get()), 0U, 0U, art_quick_unlock_object, self);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700675 // This should be an illegal monitor state.
676 EXPECT_TRUE(self->IsExceptionPending());
677 self->ClearException();
678
679 LockWord lock_after = obj->GetLockWord(false);
680 LockWord::LockState new_state = lock_after.GetState();
681 EXPECT_EQ(LockWord::LockState::kUnlocked, new_state);
Andreas Gampe525cde22014-04-22 15:44:50 -0700682
Andreas Gampe29b38412014-08-13 00:15:43 -0700683 test->Invoke3(reinterpret_cast<size_t>(obj.Get()), 0U, 0U, art_quick_lock_object, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700684
685 LockWord lock_after2 = obj->GetLockWord(false);
686 LockWord::LockState new_state2 = lock_after2.GetState();
687 EXPECT_EQ(LockWord::LockState::kThinLocked, new_state2);
688
Andreas Gampe29b38412014-08-13 00:15:43 -0700689 test->Invoke3(reinterpret_cast<size_t>(obj.Get()), 0U, 0U, art_quick_unlock_object, self);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700690
691 LockWord lock_after3 = obj->GetLockWord(false);
692 LockWord::LockState new_state3 = lock_after3.GetState();
693 EXPECT_EQ(LockWord::LockState::kUnlocked, new_state3);
694
695 // Stress test:
696 // Keep a number of objects and their locks in flight. Randomly lock or unlock one of them in
697 // each step.
698
699 RandGen r(0x1234);
700
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700701 constexpr size_t kIterations = 10000; // Number of iterations
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700702 constexpr size_t kMoveToFat = 1000; // Chance of 1:kMoveFat to make a lock fat.
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700703
704 size_t counts[kNumberOfLocks];
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700705 bool fat[kNumberOfLocks]; // Whether a lock should be thin or fat.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700706 Handle<mirror::String> objects[kNumberOfLocks];
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700707
708 // Initialize = allocate.
709 for (size_t i = 0; i < kNumberOfLocks; ++i) {
710 counts[i] = 0;
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700711 fat[i] = false;
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700712 objects[i] = hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), ""));
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700713 }
714
715 for (size_t i = 0; i < kIterations; ++i) {
716 // Select which lock to update.
717 size_t index = r.next() % kNumberOfLocks;
718
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700719 // Make lock fat?
720 if (!fat[index] && (r.next() % kMoveToFat == 0)) {
721 fat[index] = true;
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700722 objects[index]->IdentityHashCode();
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700723
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700724 LockWord lock_iter = objects[index]->GetLockWord(false);
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700725 LockWord::LockState iter_state = lock_iter.GetState();
726 if (counts[index] == 0) {
727 EXPECT_EQ(LockWord::LockState::kHashCode, iter_state);
728 } else {
729 EXPECT_EQ(LockWord::LockState::kFatLocked, iter_state);
730 }
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700731 } else {
Andreas Gampe277ccbd2014-11-03 21:36:10 -0800732 bool take_lock; // Whether to lock or unlock in this step.
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700733 if (counts[index] == 0) {
Andreas Gampe277ccbd2014-11-03 21:36:10 -0800734 take_lock = true;
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700735 } else if (counts[index] == kThinLockLoops) {
Andreas Gampe277ccbd2014-11-03 21:36:10 -0800736 take_lock = false;
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700737 } else {
738 // Randomly.
Andreas Gampe277ccbd2014-11-03 21:36:10 -0800739 take_lock = r.next() % 2 == 0;
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700740 }
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700741
Andreas Gampe277ccbd2014-11-03 21:36:10 -0800742 if (take_lock) {
Andreas Gampe29b38412014-08-13 00:15:43 -0700743 test->Invoke3(reinterpret_cast<size_t>(objects[index].Get()), 0U, 0U, art_quick_lock_object,
744 self);
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700745 counts[index]++;
746 } else {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700747 test->Invoke3(reinterpret_cast<size_t>(objects[index].Get()), 0U, 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -0700748 art_quick_unlock_object, self);
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700749 counts[index]--;
750 }
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700751
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700752 EXPECT_FALSE(self->IsExceptionPending());
753
754 // Check the new state.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700755 LockWord lock_iter = objects[index]->GetLockWord(true);
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700756 LockWord::LockState iter_state = lock_iter.GetState();
757 if (fat[index]) {
758 // Abuse MonitorInfo.
759 EXPECT_EQ(LockWord::LockState::kFatLocked, iter_state) << index;
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700760 MonitorInfo info(objects[index].Get());
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700761 EXPECT_EQ(counts[index], info.entry_count_) << index;
762 } else {
763 if (counts[index] > 0) {
764 EXPECT_EQ(LockWord::LockState::kThinLocked, iter_state);
765 EXPECT_EQ(counts[index] - 1, lock_iter.ThinLockCount());
766 } else {
767 EXPECT_EQ(LockWord::LockState::kUnlocked, iter_state);
768 }
769 }
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700770 }
771 }
772
773 // Unlock the remaining count times and then check it's unlocked. Then deallocate.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700774 // Go reverse order to correctly handle Handles.
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700775 for (size_t i = 0; i < kNumberOfLocks; ++i) {
776 size_t index = kNumberOfLocks - 1 - i;
777 size_t count = counts[index];
778 while (count > 0) {
Andreas Gampe29b38412014-08-13 00:15:43 -0700779 test->Invoke3(reinterpret_cast<size_t>(objects[index].Get()), 0U, 0U, art_quick_unlock_object,
780 self);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700781 count--;
782 }
783
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700784 LockWord lock_after4 = objects[index]->GetLockWord(false);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700785 LockWord::LockState new_state4 = lock_after4.GetState();
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700786 EXPECT_TRUE(LockWord::LockState::kUnlocked == new_state4
787 || LockWord::LockState::kFatLocked == new_state4);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700788 }
789
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700790 // Test done.
Andreas Gampe525cde22014-04-22 15:44:50 -0700791#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -0800792 UNUSED(test);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700793 LOG(INFO) << "Skipping unlock_object as I don't know how to do that on " << kRuntimeISA;
Andreas Gampe525cde22014-04-22 15:44:50 -0700794 // Force-print to std::cout so it's also outside the logcat.
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700795 std::cout << "Skipping unlock_object as I don't know how to do that on " << kRuntimeISA << std::endl;
Andreas Gampe525cde22014-04-22 15:44:50 -0700796#endif
797}
798
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700799TEST_F(StubTest, UnlockObject) {
Andreas Gampe369810a2015-01-14 19:53:31 -0800800 // This will lead to monitor error messages in the log.
801 ScopedLogSeverity sls(LogSeverity::FATAL);
802
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700803 TestUnlockObject(this);
804}
Andreas Gampe525cde22014-04-22 15:44:50 -0700805
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200806#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
807 (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe525cde22014-04-22 15:44:50 -0700808extern "C" void art_quick_check_cast(void);
809#endif
810
811TEST_F(StubTest, CheckCast) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200812#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
813 (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe525cde22014-04-22 15:44:50 -0700814 Thread* self = Thread::Current();
Andreas Gampe29b38412014-08-13 00:15:43 -0700815
816 const uintptr_t art_quick_check_cast = StubTest::GetEntrypoint(self, kQuickCheckCast);
817
Andreas Gampe525cde22014-04-22 15:44:50 -0700818 // Find some classes.
819 ScopedObjectAccess soa(self);
820 // garbage is created during ClassLinker::Init
821
Mathieu Chartier2ecfd272016-11-01 10:10:05 -0700822 StackHandleScope<4> hs(soa.Self());
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700823 Handle<mirror::Class> c(
824 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "[Ljava/lang/Object;")));
825 Handle<mirror::Class> c2(
826 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "[Ljava/lang/String;")));
Mathieu Chartier2ecfd272016-11-01 10:10:05 -0700827 Handle<mirror::Class> list(
828 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "[Ljava/util/List;")));
829 Handle<mirror::Class> array_list(
830 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "[Ljava/util/ArrayList;")));
Andreas Gampe525cde22014-04-22 15:44:50 -0700831
832 EXPECT_FALSE(self->IsExceptionPending());
833
Mathieu Chartier2ecfd272016-11-01 10:10:05 -0700834 Invoke3(reinterpret_cast<size_t>(c.Get()),
835 reinterpret_cast<size_t>(c.Get()),
836 0U,
837 art_quick_check_cast,
838 self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700839 EXPECT_FALSE(self->IsExceptionPending());
840
Mathieu Chartier2ecfd272016-11-01 10:10:05 -0700841 Invoke3(reinterpret_cast<size_t>(c2.Get()),
842 reinterpret_cast<size_t>(c2.Get()),
843 0U,
844 art_quick_check_cast,
845 self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700846 EXPECT_FALSE(self->IsExceptionPending());
847
Mathieu Chartier2ecfd272016-11-01 10:10:05 -0700848 Invoke3(reinterpret_cast<size_t>(c.Get()),
849 reinterpret_cast<size_t>(c2.Get()),
850 0U,
851 art_quick_check_cast,
852 self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700853 EXPECT_FALSE(self->IsExceptionPending());
854
Mathieu Chartier2ecfd272016-11-01 10:10:05 -0700855 Invoke3(reinterpret_cast<size_t>(list.Get()),
856 reinterpret_cast<size_t>(array_list.Get()),
857 0U,
858 art_quick_check_cast,
859 self);
860 EXPECT_FALSE(self->IsExceptionPending());
861
862 Invoke3(reinterpret_cast<size_t>(list.Get()),
863 reinterpret_cast<size_t>(c2.Get()),
864 0U,
865 art_quick_check_cast,
866 self);
867 EXPECT_TRUE(self->IsExceptionPending());
868 self->ClearException();
869
Andreas Gampe525cde22014-04-22 15:44:50 -0700870 // TODO: Make the following work. But that would require correct managed frames.
Mathieu Chartier2ecfd272016-11-01 10:10:05 -0700871 Invoke3(reinterpret_cast<size_t>(c2.Get()),
872 reinterpret_cast<size_t>(c.Get()),
873 0U,
874 art_quick_check_cast,
875 self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700876 EXPECT_TRUE(self->IsExceptionPending());
877 self->ClearException();
878
879#else
880 LOG(INFO) << "Skipping check_cast as I don't know how to do that on " << kRuntimeISA;
881 // Force-print to std::cout so it's also outside the logcat.
882 std::cout << "Skipping check_cast as I don't know how to do that on " << kRuntimeISA << std::endl;
883#endif
884}
885
886
Andreas Gampe525cde22014-04-22 15:44:50 -0700887TEST_F(StubTest, APutObj) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +0200888#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
889 (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe525cde22014-04-22 15:44:50 -0700890 Thread* self = Thread::Current();
Andreas Gampe29b38412014-08-13 00:15:43 -0700891
892 // Do not check non-checked ones, we'd need handlers and stuff...
893 const uintptr_t art_quick_aput_obj_with_null_and_bound_check =
894 StubTest::GetEntrypoint(self, kQuickAputObjectWithNullAndBoundCheck);
895
Andreas Gampe525cde22014-04-22 15:44:50 -0700896 // Create an object
897 ScopedObjectAccess soa(self);
898 // garbage is created during ClassLinker::Init
899
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700900 StackHandleScope<5> hs(soa.Self());
901 Handle<mirror::Class> c(
902 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "Ljava/lang/Object;")));
903 Handle<mirror::Class> ca(
904 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "[Ljava/lang/String;")));
Andreas Gampe525cde22014-04-22 15:44:50 -0700905
906 // Build a string array of size 1
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700907 Handle<mirror::ObjectArray<mirror::Object>> array(
908 hs.NewHandle(mirror::ObjectArray<mirror::Object>::Alloc(soa.Self(), ca.Get(), 10)));
Andreas Gampe525cde22014-04-22 15:44:50 -0700909
910 // Build a string -> should be assignable
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700911 Handle<mirror::String> str_obj(
912 hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), "hello, world!")));
Andreas Gampe525cde22014-04-22 15:44:50 -0700913
914 // Build a generic object -> should fail assigning
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700915 Handle<mirror::Object> obj_obj(hs.NewHandle(c->AllocObject(soa.Self())));
Andreas Gampe525cde22014-04-22 15:44:50 -0700916
917 // Play with it...
918
919 // 1) Success cases
Andreas Gampef4e910b2014-04-29 16:55:52 -0700920 // 1.1) Assign str_obj to array[0..3]
Andreas Gampe525cde22014-04-22 15:44:50 -0700921
922 EXPECT_FALSE(self->IsExceptionPending());
923
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700924 Invoke3(reinterpret_cast<size_t>(array.Get()), 0U, reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampe29b38412014-08-13 00:15:43 -0700925 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700926
927 EXPECT_FALSE(self->IsExceptionPending());
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700928 EXPECT_EQ(str_obj.Get(), array->Get(0));
Andreas Gampe525cde22014-04-22 15:44:50 -0700929
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700930 Invoke3(reinterpret_cast<size_t>(array.Get()), 1U, reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampe29b38412014-08-13 00:15:43 -0700931 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampef4e910b2014-04-29 16:55:52 -0700932
933 EXPECT_FALSE(self->IsExceptionPending());
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700934 EXPECT_EQ(str_obj.Get(), array->Get(1));
Andreas Gampef4e910b2014-04-29 16:55:52 -0700935
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700936 Invoke3(reinterpret_cast<size_t>(array.Get()), 2U, reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampe29b38412014-08-13 00:15:43 -0700937 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampef4e910b2014-04-29 16:55:52 -0700938
939 EXPECT_FALSE(self->IsExceptionPending());
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700940 EXPECT_EQ(str_obj.Get(), array->Get(2));
Andreas Gampef4e910b2014-04-29 16:55:52 -0700941
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700942 Invoke3(reinterpret_cast<size_t>(array.Get()), 3U, reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampe29b38412014-08-13 00:15:43 -0700943 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampef4e910b2014-04-29 16:55:52 -0700944
945 EXPECT_FALSE(self->IsExceptionPending());
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700946 EXPECT_EQ(str_obj.Get(), array->Get(3));
Andreas Gampef4e910b2014-04-29 16:55:52 -0700947
948 // 1.2) Assign null to array[0..3]
Andreas Gampe525cde22014-04-22 15:44:50 -0700949
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700950 Invoke3(reinterpret_cast<size_t>(array.Get()), 0U, reinterpret_cast<size_t>(nullptr),
Andreas Gampe29b38412014-08-13 00:15:43 -0700951 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700952
953 EXPECT_FALSE(self->IsExceptionPending());
Andreas Gampef4e910b2014-04-29 16:55:52 -0700954 EXPECT_EQ(nullptr, array->Get(0));
955
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700956 Invoke3(reinterpret_cast<size_t>(array.Get()), 1U, reinterpret_cast<size_t>(nullptr),
Andreas Gampe29b38412014-08-13 00:15:43 -0700957 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampef4e910b2014-04-29 16:55:52 -0700958
959 EXPECT_FALSE(self->IsExceptionPending());
960 EXPECT_EQ(nullptr, array->Get(1));
961
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700962 Invoke3(reinterpret_cast<size_t>(array.Get()), 2U, reinterpret_cast<size_t>(nullptr),
Andreas Gampe29b38412014-08-13 00:15:43 -0700963 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampef4e910b2014-04-29 16:55:52 -0700964
965 EXPECT_FALSE(self->IsExceptionPending());
966 EXPECT_EQ(nullptr, array->Get(2));
967
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700968 Invoke3(reinterpret_cast<size_t>(array.Get()), 3U, reinterpret_cast<size_t>(nullptr),
Andreas Gampe29b38412014-08-13 00:15:43 -0700969 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampef4e910b2014-04-29 16:55:52 -0700970
971 EXPECT_FALSE(self->IsExceptionPending());
972 EXPECT_EQ(nullptr, array->Get(3));
Andreas Gampe525cde22014-04-22 15:44:50 -0700973
974 // TODO: Check _which_ exception is thrown. Then make 3) check that it's the right check order.
975
976 // 2) Failure cases (str into str[])
977 // 2.1) Array = null
978 // TODO: Throwing NPE needs actual DEX code
979
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700980// Invoke3(reinterpret_cast<size_t>(nullptr), 0U, reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampe525cde22014-04-22 15:44:50 -0700981// reinterpret_cast<uintptr_t>(&art_quick_aput_obj_with_null_and_bound_check), self);
982//
983// EXPECT_TRUE(self->IsExceptionPending());
984// self->ClearException();
985
986 // 2.2) Index < 0
987
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700988 Invoke3(reinterpret_cast<size_t>(array.Get()), static_cast<size_t>(-1),
989 reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampe29b38412014-08-13 00:15:43 -0700990 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700991
992 EXPECT_TRUE(self->IsExceptionPending());
993 self->ClearException();
994
995 // 2.3) Index > 0
996
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700997 Invoke3(reinterpret_cast<size_t>(array.Get()), 10U, reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampe29b38412014-08-13 00:15:43 -0700998 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700999
1000 EXPECT_TRUE(self->IsExceptionPending());
1001 self->ClearException();
1002
1003 // 3) Failure cases (obj into str[])
1004
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001005 Invoke3(reinterpret_cast<size_t>(array.Get()), 0U, reinterpret_cast<size_t>(obj_obj.Get()),
Andreas Gampe29b38412014-08-13 00:15:43 -07001006 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampe525cde22014-04-22 15:44:50 -07001007
1008 EXPECT_TRUE(self->IsExceptionPending());
1009 self->ClearException();
1010
1011 // Tests done.
1012#else
1013 LOG(INFO) << "Skipping aput_obj as I don't know how to do that on " << kRuntimeISA;
1014 // Force-print to std::cout so it's also outside the logcat.
1015 std::cout << "Skipping aput_obj as I don't know how to do that on " << kRuntimeISA << std::endl;
1016#endif
1017}
1018
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001019TEST_F(StubTest, AllocObject) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001020#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1021 (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe369810a2015-01-14 19:53:31 -08001022 // This will lead to OOM error messages in the log.
1023 ScopedLogSeverity sls(LogSeverity::FATAL);
1024
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001025 // TODO: Check the "Unresolved" allocation stubs
1026
1027 Thread* self = Thread::Current();
1028 // Create an object
1029 ScopedObjectAccess soa(self);
1030 // garbage is created during ClassLinker::Init
1031
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001032 StackHandleScope<2> hs(soa.Self());
1033 Handle<mirror::Class> c(
1034 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "Ljava/lang/Object;")));
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001035
1036 // Play with it...
1037
1038 EXPECT_FALSE(self->IsExceptionPending());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001039 {
1040 // Use an arbitrary method from c to use as referrer
1041 size_t result = Invoke3(static_cast<size_t>(c->GetDexTypeIndex()), // type_idx
Mathieu Chartiere401d142015-04-22 13:56:20 -07001042 // arbitrary
Andreas Gampe542451c2016-07-26 09:02:02 -07001043 reinterpret_cast<size_t>(c->GetVirtualMethod(0, kRuntimePointerSize)),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001044 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001045 StubTest::GetEntrypoint(self, kQuickAllocObject),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001046 self);
1047
1048 EXPECT_FALSE(self->IsExceptionPending());
1049 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
1050 mirror::Object* obj = reinterpret_cast<mirror::Object*>(result);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001051 EXPECT_EQ(c.Get(), obj->GetClass());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001052 VerifyObject(obj);
1053 }
1054
1055 {
Mathieu Chartier2cebb242015-04-21 16:50:40 -07001056 // We can use null in the second argument as we do not need a method here (not used in
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001057 // resolved/initialized cases)
Mathieu Chartiere401d142015-04-22 13:56:20 -07001058 size_t result = Invoke3(reinterpret_cast<size_t>(c.Get()), 0u, 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001059 StubTest::GetEntrypoint(self, kQuickAllocObjectResolved),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001060 self);
1061
1062 EXPECT_FALSE(self->IsExceptionPending());
1063 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
1064 mirror::Object* obj = reinterpret_cast<mirror::Object*>(result);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001065 EXPECT_EQ(c.Get(), obj->GetClass());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001066 VerifyObject(obj);
1067 }
1068
1069 {
Mathieu Chartier2cebb242015-04-21 16:50:40 -07001070 // We can use null in the second argument as we do not need a method here (not used in
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001071 // resolved/initialized cases)
Mathieu Chartiere401d142015-04-22 13:56:20 -07001072 size_t result = Invoke3(reinterpret_cast<size_t>(c.Get()), 0u, 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001073 StubTest::GetEntrypoint(self, kQuickAllocObjectInitialized),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001074 self);
1075
1076 EXPECT_FALSE(self->IsExceptionPending());
1077 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
1078 mirror::Object* obj = reinterpret_cast<mirror::Object*>(result);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001079 EXPECT_EQ(c.Get(), obj->GetClass());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001080 VerifyObject(obj);
1081 }
1082
1083 // Failure tests.
1084
1085 // Out-of-memory.
1086 {
1087 Runtime::Current()->GetHeap()->SetIdealFootprint(1 * GB);
1088
1089 // Array helps to fill memory faster.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001090 Handle<mirror::Class> ca(
1091 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "[Ljava/lang/Object;")));
1092
1093 // Use arbitrary large amount for now.
1094 static const size_t kMaxHandles = 1000000;
Ian Rogers700a4022014-05-19 16:49:03 -07001095 std::unique_ptr<StackHandleScope<kMaxHandles>> hsp(new StackHandleScope<kMaxHandles>(self));
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001096
1097 std::vector<Handle<mirror::Object>> handles;
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001098 // Start allocating with 128K
1099 size_t length = 128 * KB / 4;
1100 while (length > 10) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001101 Handle<mirror::Object> h(hsp->NewHandle<mirror::Object>(
1102 mirror::ObjectArray<mirror::Object>::Alloc(soa.Self(), ca.Get(), length / 4)));
1103 if (self->IsExceptionPending() || h.Get() == nullptr) {
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001104 self->ClearException();
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001105
1106 // Try a smaller length
1107 length = length / 8;
1108 // Use at most half the reported free space.
1109 size_t mem = Runtime::Current()->GetHeap()->GetFreeMemory();
1110 if (length * 8 > mem) {
1111 length = mem / 8;
1112 }
1113 } else {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001114 handles.push_back(h);
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001115 }
1116 }
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001117 LOG(INFO) << "Used " << handles.size() << " arrays to fill space.";
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001118
1119 // Allocate simple objects till it fails.
1120 while (!self->IsExceptionPending()) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001121 Handle<mirror::Object> h = hsp->NewHandle(c->AllocObject(soa.Self()));
1122 if (!self->IsExceptionPending() && h.Get() != nullptr) {
1123 handles.push_back(h);
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001124 }
1125 }
1126 self->ClearException();
1127
Mathieu Chartiere401d142015-04-22 13:56:20 -07001128 size_t result = Invoke3(reinterpret_cast<size_t>(c.Get()), 0u, 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001129 StubTest::GetEntrypoint(self, kQuickAllocObjectInitialized),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001130 self);
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001131 EXPECT_TRUE(self->IsExceptionPending());
1132 self->ClearException();
1133 EXPECT_EQ(reinterpret_cast<size_t>(nullptr), result);
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001134 }
1135
1136 // Tests done.
1137#else
1138 LOG(INFO) << "Skipping alloc_object as I don't know how to do that on " << kRuntimeISA;
1139 // Force-print to std::cout so it's also outside the logcat.
1140 std::cout << "Skipping alloc_object as I don't know how to do that on " << kRuntimeISA << std::endl;
1141#endif
1142}
1143
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001144TEST_F(StubTest, AllocObjectArray) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001145#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1146 (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001147 // TODO: Check the "Unresolved" allocation stubs
1148
Andreas Gampe369810a2015-01-14 19:53:31 -08001149 // This will lead to OOM error messages in the log.
1150 ScopedLogSeverity sls(LogSeverity::FATAL);
1151
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001152 Thread* self = Thread::Current();
1153 // Create an object
1154 ScopedObjectAccess soa(self);
1155 // garbage is created during ClassLinker::Init
1156
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001157 StackHandleScope<2> hs(self);
1158 Handle<mirror::Class> c(
1159 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "[Ljava/lang/Object;")));
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001160
1161 // Needed to have a linked method.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001162 Handle<mirror::Class> c_obj(
1163 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "Ljava/lang/Object;")));
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001164
1165 // Play with it...
1166
1167 EXPECT_FALSE(self->IsExceptionPending());
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001168
1169 // For some reason this does not work, as the type_idx is artificial and outside what the
1170 // resolved types of c_obj allow...
1171
Ian Rogerscf7f1912014-10-22 22:06:39 -07001172 if ((false)) {
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001173 // Use an arbitrary method from c to use as referrer
Andreas Gampe542451c2016-07-26 09:02:02 -07001174 size_t result = Invoke3(
1175 static_cast<size_t>(c->GetDexTypeIndex()), // type_idx
1176 10U,
1177 // arbitrary
1178 reinterpret_cast<size_t>(c_obj->GetVirtualMethod(0, kRuntimePointerSize)),
1179 StubTest::GetEntrypoint(self, kQuickAllocArray),
1180 self);
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001181
1182 EXPECT_FALSE(self->IsExceptionPending());
1183 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
1184 mirror::Array* obj = reinterpret_cast<mirror::Array*>(result);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001185 EXPECT_EQ(c.Get(), obj->GetClass());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001186 VerifyObject(obj);
1187 EXPECT_EQ(obj->GetLength(), 10);
1188 }
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001189
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001190 {
Mathieu Chartier2cebb242015-04-21 16:50:40 -07001191 // We can use null in the second argument as we do not need a method here (not used in
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001192 // resolved/initialized cases)
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08001193 size_t result = Invoke3(reinterpret_cast<size_t>(c.Get()), 10U,
1194 reinterpret_cast<size_t>(nullptr),
Andreas Gampe29b38412014-08-13 00:15:43 -07001195 StubTest::GetEntrypoint(self, kQuickAllocArrayResolved),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001196 self);
David Sehr709b0702016-10-13 09:12:37 -07001197 EXPECT_FALSE(self->IsExceptionPending()) << mirror::Object::PrettyTypeOf(self->GetException());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001198 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
1199 mirror::Object* obj = reinterpret_cast<mirror::Object*>(result);
1200 EXPECT_TRUE(obj->IsArrayInstance());
1201 EXPECT_TRUE(obj->IsObjectArray());
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001202 EXPECT_EQ(c.Get(), obj->GetClass());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001203 VerifyObject(obj);
1204 mirror::Array* array = reinterpret_cast<mirror::Array*>(result);
1205 EXPECT_EQ(array->GetLength(), 10);
1206 }
1207
1208 // Failure tests.
1209
1210 // Out-of-memory.
1211 {
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08001212 size_t result = Invoke3(reinterpret_cast<size_t>(c.Get()),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001213 GB, // that should fail...
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08001214 reinterpret_cast<size_t>(nullptr),
Andreas Gampe29b38412014-08-13 00:15:43 -07001215 StubTest::GetEntrypoint(self, kQuickAllocArrayResolved),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001216 self);
1217
1218 EXPECT_TRUE(self->IsExceptionPending());
1219 self->ClearException();
1220 EXPECT_EQ(reinterpret_cast<size_t>(nullptr), result);
1221 }
1222
1223 // Tests done.
1224#else
1225 LOG(INFO) << "Skipping alloc_array as I don't know how to do that on " << kRuntimeISA;
1226 // Force-print to std::cout so it's also outside the logcat.
1227 std::cout << "Skipping alloc_array as I don't know how to do that on " << kRuntimeISA << std::endl;
1228#endif
1229}
1230
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001231
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001232TEST_F(StubTest, StringCompareTo) {
jessicahandojo3aaa37b2016-07-29 14:46:37 -07001233 TEST_DISABLED_FOR_STRING_COMPRESSION();
Scott Wakelingc25cbf12016-04-18 09:00:11 +01001234 // There is no StringCompareTo runtime entrypoint for __arm__ or __aarch64__.
1235#if defined(__i386__) || defined(__mips__) || \
1236 (defined(__x86_64__) && !defined(__APPLE__))
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001237 // TODO: Check the "Unresolved" allocation stubs
1238
1239 Thread* self = Thread::Current();
Andreas Gampe29b38412014-08-13 00:15:43 -07001240
1241 const uintptr_t art_quick_string_compareto = StubTest::GetEntrypoint(self, kQuickStringCompareTo);
1242
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001243 ScopedObjectAccess soa(self);
1244 // garbage is created during ClassLinker::Init
1245
1246 // Create some strings
1247 // Use array so we can index into it and use a matrix for expected results
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -07001248 // Setup: The first half is standard. The second half uses a non-zero offset.
1249 // TODO: Shared backing arrays.
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001250 const char* c[] = { "", "", "a", "aa", "ab",
Serban Constantinescu86797a72014-06-19 16:17:56 +01001251 "aacaacaacaacaacaac", // This one's under the default limit to go to __memcmp16.
1252 "aacaacaacaacaacaacaacaacaacaacaacaac", // This one's over.
1253 "aacaacaacaacaacaacaacaacaacaacaacaaca" }; // As is this one. We need a separate one to
1254 // defeat object-equal optimizations.
Jeff Hao848f70a2014-01-15 13:49:50 -08001255 static constexpr size_t kStringCount = arraysize(c);
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001256
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001257 StackHandleScope<kStringCount> hs(self);
1258 Handle<mirror::String> s[kStringCount];
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001259
Jeff Hao848f70a2014-01-15 13:49:50 -08001260 for (size_t i = 0; i < kStringCount; ++i) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001261 s[i] = hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), c[i]));
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001262 }
1263
1264 // TODO: wide characters
1265
1266 // Matrix of expectations. First component is first parameter. Note we only check against the
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -07001267 // sign, not the value. As we are testing random offsets, we need to compute this and need to
1268 // rely on String::CompareTo being correct.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001269 int32_t expected[kStringCount][kStringCount];
1270 for (size_t x = 0; x < kStringCount; ++x) {
1271 for (size_t y = 0; y < kStringCount; ++y) {
1272 expected[x][y] = s[x]->CompareTo(s[y].Get());
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -07001273 }
1274 }
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001275
1276 // Play with it...
1277
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001278 for (size_t x = 0; x < kStringCount; ++x) {
1279 for (size_t y = 0; y < kStringCount; ++y) {
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001280 // Test string_compareto x y
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001281 size_t result = Invoke3(reinterpret_cast<size_t>(s[x].Get()),
1282 reinterpret_cast<size_t>(s[y].Get()), 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001283 art_quick_string_compareto, self);
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001284
1285 EXPECT_FALSE(self->IsExceptionPending());
1286
1287 // The result is a 32b signed integer
1288 union {
1289 size_t r;
1290 int32_t i;
1291 } conv;
1292 conv.r = result;
1293 int32_t e = expected[x][y];
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -07001294 EXPECT_TRUE(e == 0 ? conv.i == 0 : true) << "x=" << c[x] << " y=" << c[y] << " res=" <<
1295 conv.r;
1296 EXPECT_TRUE(e < 0 ? conv.i < 0 : true) << "x=" << c[x] << " y=" << c[y] << " res=" <<
1297 conv.r;
1298 EXPECT_TRUE(e > 0 ? conv.i > 0 : true) << "x=" << c[x] << " y=" << c[y] << " res=" <<
1299 conv.r;
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001300 }
1301 }
1302
Andreas Gampe7177d7c2014-05-02 12:10:02 -07001303 // TODO: Deallocate things.
1304
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001305 // Tests done.
1306#else
1307 LOG(INFO) << "Skipping string_compareto as I don't know how to do that on " << kRuntimeISA;
1308 // Force-print to std::cout so it's also outside the logcat.
1309 std::cout << "Skipping string_compareto as I don't know how to do that on " << kRuntimeISA <<
1310 std::endl;
1311#endif
1312}
1313
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001314
Mathieu Chartierc7853442015-03-27 14:35:38 -07001315static void GetSetBooleanStatic(ArtField* f, Thread* self,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001316 ArtMethod* referrer, StubTest* test)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07001317 REQUIRES_SHARED(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001318#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1319 (defined(__x86_64__) && !defined(__APPLE__))
Fred Shih37f05ef2014-07-16 18:38:08 -07001320 constexpr size_t num_values = 5;
1321 uint8_t values[num_values] = { 0, 1, 2, 128, 0xFF };
1322
1323 for (size_t i = 0; i < num_values; ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001324 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001325 static_cast<size_t>(values[i]),
1326 0U,
1327 StubTest::GetEntrypoint(self, kQuickSet8Static),
1328 self,
1329 referrer);
1330
Mathieu Chartierc7853442015-03-27 14:35:38 -07001331 size_t res = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001332 0U, 0U,
1333 StubTest::GetEntrypoint(self, kQuickGetBooleanStatic),
1334 self,
1335 referrer);
1336 // Boolean currently stores bools as uint8_t, be more zealous about asserting correct writes/gets.
1337 EXPECT_EQ(values[i], static_cast<uint8_t>(res)) << "Iteration " << i;
1338 }
1339#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001340 UNUSED(f, self, referrer, test);
Fred Shih37f05ef2014-07-16 18:38:08 -07001341 LOG(INFO) << "Skipping set_boolean_static as I don't know how to do that on " << kRuntimeISA;
1342 // Force-print to std::cout so it's also outside the logcat.
1343 std::cout << "Skipping set_boolean_static as I don't know how to do that on " << kRuntimeISA << std::endl;
1344#endif
1345}
Mathieu Chartiere401d142015-04-22 13:56:20 -07001346static void GetSetByteStatic(ArtField* f, Thread* self, ArtMethod* referrer,
Mathieu Chartierc7853442015-03-27 14:35:38 -07001347 StubTest* test)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07001348 REQUIRES_SHARED(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001349#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1350 (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001351 int8_t values[] = { -128, -64, 0, 64, 127 };
Fred Shih37f05ef2014-07-16 18:38:08 -07001352
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001353 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001354 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001355 static_cast<size_t>(values[i]),
1356 0U,
1357 StubTest::GetEntrypoint(self, kQuickSet8Static),
1358 self,
1359 referrer);
1360
Mathieu Chartierc7853442015-03-27 14:35:38 -07001361 size_t res = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001362 0U, 0U,
1363 StubTest::GetEntrypoint(self, kQuickGetByteStatic),
1364 self,
1365 referrer);
1366 EXPECT_EQ(values[i], static_cast<int8_t>(res)) << "Iteration " << i;
1367 }
1368#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001369 UNUSED(f, self, referrer, test);
Fred Shih37f05ef2014-07-16 18:38:08 -07001370 LOG(INFO) << "Skipping set_byte_static as I don't know how to do that on " << kRuntimeISA;
1371 // Force-print to std::cout so it's also outside the logcat.
1372 std::cout << "Skipping set_byte_static as I don't know how to do that on " << kRuntimeISA << std::endl;
1373#endif
1374}
1375
1376
Mathieu Chartierc7853442015-03-27 14:35:38 -07001377static void GetSetBooleanInstance(Handle<mirror::Object>* obj, ArtField* f, Thread* self,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001378 ArtMethod* referrer, StubTest* test)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07001379 REQUIRES_SHARED(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001380#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1381 (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001382 uint8_t values[] = { 0, true, 2, 128, 0xFF };
Fred Shih37f05ef2014-07-16 18:38:08 -07001383
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001384 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001385 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001386 reinterpret_cast<size_t>(obj->Get()),
1387 static_cast<size_t>(values[i]),
1388 StubTest::GetEntrypoint(self, kQuickSet8Instance),
1389 self,
1390 referrer);
1391
Mathieu Chartierc7853442015-03-27 14:35:38 -07001392 uint8_t res = f->GetBoolean(obj->Get());
Fred Shih37f05ef2014-07-16 18:38:08 -07001393 EXPECT_EQ(values[i], res) << "Iteration " << i;
1394
Mathieu Chartierc7853442015-03-27 14:35:38 -07001395 f->SetBoolean<false>(obj->Get(), res);
Fred Shih37f05ef2014-07-16 18:38:08 -07001396
Mathieu Chartierc7853442015-03-27 14:35:38 -07001397 size_t res2 = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001398 reinterpret_cast<size_t>(obj->Get()),
1399 0U,
1400 StubTest::GetEntrypoint(self, kQuickGetBooleanInstance),
1401 self,
1402 referrer);
1403 EXPECT_EQ(res, static_cast<uint8_t>(res2));
1404 }
1405#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001406 UNUSED(obj, f, self, referrer, test);
Fred Shih37f05ef2014-07-16 18:38:08 -07001407 LOG(INFO) << "Skipping set_boolean_instance as I don't know how to do that on " << kRuntimeISA;
1408 // Force-print to std::cout so it's also outside the logcat.
1409 std::cout << "Skipping set_boolean_instance as I don't know how to do that on " << kRuntimeISA << std::endl;
1410#endif
1411}
Mathieu Chartierc7853442015-03-27 14:35:38 -07001412static void GetSetByteInstance(Handle<mirror::Object>* obj, ArtField* f,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001413 Thread* self, ArtMethod* referrer, StubTest* test)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07001414 REQUIRES_SHARED(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001415#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1416 (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001417 int8_t values[] = { -128, -64, 0, 64, 127 };
Fred Shih37f05ef2014-07-16 18:38:08 -07001418
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001419 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001420 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001421 reinterpret_cast<size_t>(obj->Get()),
1422 static_cast<size_t>(values[i]),
1423 StubTest::GetEntrypoint(self, kQuickSet8Instance),
1424 self,
1425 referrer);
1426
Mathieu Chartierc7853442015-03-27 14:35:38 -07001427 int8_t res = f->GetByte(obj->Get());
Fred Shih37f05ef2014-07-16 18:38:08 -07001428 EXPECT_EQ(res, values[i]) << "Iteration " << i;
Mathieu Chartierc7853442015-03-27 14:35:38 -07001429 f->SetByte<false>(obj->Get(), ++res);
Fred Shih37f05ef2014-07-16 18:38:08 -07001430
Mathieu Chartierc7853442015-03-27 14:35:38 -07001431 size_t res2 = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001432 reinterpret_cast<size_t>(obj->Get()),
1433 0U,
1434 StubTest::GetEntrypoint(self, kQuickGetByteInstance),
1435 self,
1436 referrer);
1437 EXPECT_EQ(res, static_cast<int8_t>(res2));
1438 }
1439#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001440 UNUSED(obj, f, self, referrer, test);
Fred Shih37f05ef2014-07-16 18:38:08 -07001441 LOG(INFO) << "Skipping set_byte_instance as I don't know how to do that on " << kRuntimeISA;
1442 // Force-print to std::cout so it's also outside the logcat.
1443 std::cout << "Skipping set_byte_instance as I don't know how to do that on " << kRuntimeISA << std::endl;
1444#endif
1445}
1446
Mathieu Chartiere401d142015-04-22 13:56:20 -07001447static void GetSetCharStatic(ArtField* f, Thread* self, ArtMethod* referrer,
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001448 StubTest* test)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07001449 REQUIRES_SHARED(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001450#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1451 (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001452 uint16_t values[] = { 0, 1, 2, 255, 32768, 0xFFFF };
Fred Shih37f05ef2014-07-16 18:38:08 -07001453
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001454 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001455 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001456 static_cast<size_t>(values[i]),
1457 0U,
1458 StubTest::GetEntrypoint(self, kQuickSet16Static),
1459 self,
1460 referrer);
1461
Mathieu Chartierc7853442015-03-27 14:35:38 -07001462 size_t res = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001463 0U, 0U,
1464 StubTest::GetEntrypoint(self, kQuickGetCharStatic),
1465 self,
1466 referrer);
1467
1468 EXPECT_EQ(values[i], static_cast<uint16_t>(res)) << "Iteration " << i;
1469 }
1470#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001471 UNUSED(f, self, referrer, test);
Fred Shih37f05ef2014-07-16 18:38:08 -07001472 LOG(INFO) << "Skipping set_char_static as I don't know how to do that on " << kRuntimeISA;
1473 // Force-print to std::cout so it's also outside the logcat.
1474 std::cout << "Skipping set_char_static as I don't know how to do that on " << kRuntimeISA << std::endl;
1475#endif
1476}
Mathieu Chartierc7853442015-03-27 14:35:38 -07001477static void GetSetShortStatic(ArtField* f, Thread* self,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001478 ArtMethod* referrer, StubTest* test)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07001479 REQUIRES_SHARED(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001480#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1481 (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001482 int16_t values[] = { -0x7FFF, -32768, 0, 255, 32767, 0x7FFE };
Fred Shih37f05ef2014-07-16 18:38:08 -07001483
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001484 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001485 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001486 static_cast<size_t>(values[i]),
1487 0U,
1488 StubTest::GetEntrypoint(self, kQuickSet16Static),
1489 self,
1490 referrer);
1491
Mathieu Chartierc7853442015-03-27 14:35:38 -07001492 size_t res = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001493 0U, 0U,
1494 StubTest::GetEntrypoint(self, kQuickGetShortStatic),
1495 self,
1496 referrer);
1497
1498 EXPECT_EQ(static_cast<int16_t>(res), values[i]) << "Iteration " << i;
1499 }
1500#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001501 UNUSED(f, self, referrer, test);
Fred Shih37f05ef2014-07-16 18:38:08 -07001502 LOG(INFO) << "Skipping set_short_static as I don't know how to do that on " << kRuntimeISA;
1503 // Force-print to std::cout so it's also outside the logcat.
1504 std::cout << "Skipping set_short_static as I don't know how to do that on " << kRuntimeISA << std::endl;
1505#endif
1506}
1507
Mathieu Chartierc7853442015-03-27 14:35:38 -07001508static void GetSetCharInstance(Handle<mirror::Object>* obj, ArtField* f,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001509 Thread* self, ArtMethod* referrer, StubTest* test)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07001510 REQUIRES_SHARED(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001511#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1512 (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001513 uint16_t values[] = { 0, 1, 2, 255, 32768, 0xFFFF };
Fred Shih37f05ef2014-07-16 18:38:08 -07001514
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001515 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001516 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001517 reinterpret_cast<size_t>(obj->Get()),
1518 static_cast<size_t>(values[i]),
1519 StubTest::GetEntrypoint(self, kQuickSet16Instance),
1520 self,
1521 referrer);
1522
Mathieu Chartierc7853442015-03-27 14:35:38 -07001523 uint16_t res = f->GetChar(obj->Get());
Fred Shih37f05ef2014-07-16 18:38:08 -07001524 EXPECT_EQ(res, values[i]) << "Iteration " << i;
Mathieu Chartierc7853442015-03-27 14:35:38 -07001525 f->SetChar<false>(obj->Get(), ++res);
Fred Shih37f05ef2014-07-16 18:38:08 -07001526
Mathieu Chartierc7853442015-03-27 14:35:38 -07001527 size_t res2 = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001528 reinterpret_cast<size_t>(obj->Get()),
1529 0U,
1530 StubTest::GetEntrypoint(self, kQuickGetCharInstance),
1531 self,
1532 referrer);
1533 EXPECT_EQ(res, static_cast<uint16_t>(res2));
1534 }
1535#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001536 UNUSED(obj, f, self, referrer, test);
Fred Shih37f05ef2014-07-16 18:38:08 -07001537 LOG(INFO) << "Skipping set_char_instance as I don't know how to do that on " << kRuntimeISA;
1538 // Force-print to std::cout so it's also outside the logcat.
1539 std::cout << "Skipping set_char_instance as I don't know how to do that on " << kRuntimeISA << std::endl;
1540#endif
1541}
Mathieu Chartierc7853442015-03-27 14:35:38 -07001542static void GetSetShortInstance(Handle<mirror::Object>* obj, ArtField* f,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001543 Thread* self, ArtMethod* referrer, StubTest* test)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07001544 REQUIRES_SHARED(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001545#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1546 (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001547 int16_t values[] = { -0x7FFF, -32768, 0, 255, 32767, 0x7FFE };
Fred Shih37f05ef2014-07-16 18:38:08 -07001548
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001549 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001550 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001551 reinterpret_cast<size_t>(obj->Get()),
1552 static_cast<size_t>(values[i]),
1553 StubTest::GetEntrypoint(self, kQuickSet16Instance),
1554 self,
1555 referrer);
1556
Mathieu Chartierc7853442015-03-27 14:35:38 -07001557 int16_t res = f->GetShort(obj->Get());
Fred Shih37f05ef2014-07-16 18:38:08 -07001558 EXPECT_EQ(res, values[i]) << "Iteration " << i;
Mathieu Chartierc7853442015-03-27 14:35:38 -07001559 f->SetShort<false>(obj->Get(), ++res);
Fred Shih37f05ef2014-07-16 18:38:08 -07001560
Mathieu Chartierc7853442015-03-27 14:35:38 -07001561 size_t res2 = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Fred Shih37f05ef2014-07-16 18:38:08 -07001562 reinterpret_cast<size_t>(obj->Get()),
1563 0U,
1564 StubTest::GetEntrypoint(self, kQuickGetShortInstance),
1565 self,
1566 referrer);
1567 EXPECT_EQ(res, static_cast<int16_t>(res2));
1568 }
1569#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001570 UNUSED(obj, f, self, referrer, test);
Fred Shih37f05ef2014-07-16 18:38:08 -07001571 LOG(INFO) << "Skipping set_short_instance as I don't know how to do that on " << kRuntimeISA;
1572 // Force-print to std::cout so it's also outside the logcat.
1573 std::cout << "Skipping set_short_instance as I don't know how to do that on " << kRuntimeISA << std::endl;
1574#endif
1575}
1576
Mathieu Chartiere401d142015-04-22 13:56:20 -07001577static void GetSet32Static(ArtField* f, Thread* self, ArtMethod* referrer,
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001578 StubTest* test)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07001579 REQUIRES_SHARED(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001580#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1581 (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001582 uint32_t values[] = { 0, 1, 2, 255, 32768, 1000000, 0xFFFFFFFF };
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001583
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001584 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001585 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001586 static_cast<size_t>(values[i]),
1587 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001588 StubTest::GetEntrypoint(self, kQuickSet32Static),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001589 self,
1590 referrer);
1591
Mathieu Chartierc7853442015-03-27 14:35:38 -07001592 size_t res = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001593 0U, 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001594 StubTest::GetEntrypoint(self, kQuickGet32Static),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001595 self,
1596 referrer);
1597
Goran Jakovljevic04568812015-04-23 15:27:23 +02001598#if defined(__mips__) && defined(__LP64__)
1599 EXPECT_EQ(static_cast<uint32_t>(res), values[i]) << "Iteration " << i;
1600#else
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001601 EXPECT_EQ(res, values[i]) << "Iteration " << i;
Goran Jakovljevic04568812015-04-23 15:27:23 +02001602#endif
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001603 }
1604#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001605 UNUSED(f, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001606 LOG(INFO) << "Skipping set32static as I don't know how to do that on " << kRuntimeISA;
1607 // Force-print to std::cout so it's also outside the logcat.
1608 std::cout << "Skipping set32static as I don't know how to do that on " << kRuntimeISA << std::endl;
1609#endif
1610}
1611
1612
Mathieu Chartierc7853442015-03-27 14:35:38 -07001613static void GetSet32Instance(Handle<mirror::Object>* obj, ArtField* f,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001614 Thread* self, ArtMethod* referrer, StubTest* test)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07001615 REQUIRES_SHARED(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001616#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1617 (defined(__x86_64__) && !defined(__APPLE__))
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001618 uint32_t values[] = { 0, 1, 2, 255, 32768, 1000000, 0xFFFFFFFF };
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001619
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001620 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001621 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001622 reinterpret_cast<size_t>(obj->Get()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001623 static_cast<size_t>(values[i]),
Andreas Gampe29b38412014-08-13 00:15:43 -07001624 StubTest::GetEntrypoint(self, kQuickSet32Instance),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001625 self,
1626 referrer);
1627
Mathieu Chartierc7853442015-03-27 14:35:38 -07001628 int32_t res = f->GetInt(obj->Get());
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001629 EXPECT_EQ(res, static_cast<int32_t>(values[i])) << "Iteration " << i;
1630
1631 res++;
Mathieu Chartierc7853442015-03-27 14:35:38 -07001632 f->SetInt<false>(obj->Get(), res);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001633
Mathieu Chartierc7853442015-03-27 14:35:38 -07001634 size_t res2 = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001635 reinterpret_cast<size_t>(obj->Get()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001636 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001637 StubTest::GetEntrypoint(self, kQuickGet32Instance),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001638 self,
1639 referrer);
1640 EXPECT_EQ(res, static_cast<int32_t>(res2));
1641 }
1642#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001643 UNUSED(obj, f, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001644 LOG(INFO) << "Skipping set32instance as I don't know how to do that on " << kRuntimeISA;
1645 // Force-print to std::cout so it's also outside the logcat.
1646 std::cout << "Skipping set32instance as I don't know how to do that on " << kRuntimeISA << std::endl;
1647#endif
1648}
1649
1650
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001651#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1652 (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001653
1654static void set_and_check_static(uint32_t f_idx, mirror::Object* val, Thread* self,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001655 ArtMethod* referrer, StubTest* test)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07001656 REQUIRES_SHARED(Locks::mutator_lock_) {
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001657 test->Invoke3WithReferrer(static_cast<size_t>(f_idx),
1658 reinterpret_cast<size_t>(val),
1659 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001660 StubTest::GetEntrypoint(self, kQuickSetObjStatic),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001661 self,
1662 referrer);
1663
1664 size_t res = test->Invoke3WithReferrer(static_cast<size_t>(f_idx),
1665 0U, 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001666 StubTest::GetEntrypoint(self, kQuickGetObjStatic),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001667 self,
1668 referrer);
1669
1670 EXPECT_EQ(res, reinterpret_cast<size_t>(val)) << "Value " << val;
1671}
1672#endif
1673
Mathieu Chartiere401d142015-04-22 13:56:20 -07001674static void GetSetObjStatic(ArtField* f, Thread* self, ArtMethod* referrer,
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001675 StubTest* test)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07001676 REQUIRES_SHARED(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001677#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1678 (defined(__x86_64__) && !defined(__APPLE__))
Mathieu Chartierc7853442015-03-27 14:35:38 -07001679 set_and_check_static(f->GetDexFieldIndex(), nullptr, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001680
1681 // Allocate a string object for simplicity.
1682 mirror::String* str = mirror::String::AllocFromModifiedUtf8(self, "Test");
Mathieu Chartierc7853442015-03-27 14:35:38 -07001683 set_and_check_static(f->GetDexFieldIndex(), str, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001684
Mathieu Chartierc7853442015-03-27 14:35:38 -07001685 set_and_check_static(f->GetDexFieldIndex(), nullptr, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001686#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001687 UNUSED(f, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001688 LOG(INFO) << "Skipping setObjstatic as I don't know how to do that on " << kRuntimeISA;
1689 // Force-print to std::cout so it's also outside the logcat.
1690 std::cout << "Skipping setObjstatic as I don't know how to do that on " << kRuntimeISA << std::endl;
1691#endif
1692}
1693
1694
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001695#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1696 (defined(__x86_64__) && !defined(__APPLE__))
Mathieu Chartierc7853442015-03-27 14:35:38 -07001697static void set_and_check_instance(ArtField* f, mirror::Object* trg,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001698 mirror::Object* val, Thread* self, ArtMethod* referrer,
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001699 StubTest* test)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07001700 REQUIRES_SHARED(Locks::mutator_lock_) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001701 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001702 reinterpret_cast<size_t>(trg),
1703 reinterpret_cast<size_t>(val),
Andreas Gampe29b38412014-08-13 00:15:43 -07001704 StubTest::GetEntrypoint(self, kQuickSetObjInstance),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001705 self,
1706 referrer);
1707
Mathieu Chartierc7853442015-03-27 14:35:38 -07001708 size_t res = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001709 reinterpret_cast<size_t>(trg),
1710 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001711 StubTest::GetEntrypoint(self, kQuickGetObjInstance),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001712 self,
1713 referrer);
1714
1715 EXPECT_EQ(res, reinterpret_cast<size_t>(val)) << "Value " << val;
1716
Mathieu Chartier3398c782016-09-30 10:27:43 -07001717 EXPECT_OBJ_PTR_EQ(val, f->GetObj(trg));
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001718}
1719#endif
1720
Mathieu Chartierc7853442015-03-27 14:35:38 -07001721static void GetSetObjInstance(Handle<mirror::Object>* obj, ArtField* f,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001722 Thread* self, ArtMethod* referrer, StubTest* test)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07001723 REQUIRES_SHARED(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001724#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1725 (defined(__x86_64__) && !defined(__APPLE__))
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001726 set_and_check_instance(f, obj->Get(), nullptr, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001727
1728 // Allocate a string object for simplicity.
1729 mirror::String* str = mirror::String::AllocFromModifiedUtf8(self, "Test");
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001730 set_and_check_instance(f, obj->Get(), str, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001731
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001732 set_and_check_instance(f, obj->Get(), nullptr, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001733#else
Andreas Gampe7c3952f2015-02-19 18:21:24 -08001734 UNUSED(obj, f, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001735 LOG(INFO) << "Skipping setObjinstance as I don't know how to do that on " << kRuntimeISA;
1736 // Force-print to std::cout so it's also outside the logcat.
1737 std::cout << "Skipping setObjinstance as I don't know how to do that on " << kRuntimeISA << std::endl;
1738#endif
1739}
1740
1741
Calin Juravle872ab3f2015-10-02 07:27:51 +01001742// TODO: Complete these tests for 32b architectures
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001743
Mathieu Chartiere401d142015-04-22 13:56:20 -07001744static void GetSet64Static(ArtField* f, Thread* self, ArtMethod* referrer,
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001745 StubTest* test)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07001746 REQUIRES_SHARED(Locks::mutator_lock_) {
Calin Juravle6e399ac2015-10-02 23:56:06 +01001747#if (defined(__x86_64__) && !defined(__APPLE__)) || (defined(__mips__) && defined(__LP64__)) \
1748 || defined(__aarch64__)
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001749 uint64_t values[] = { 0, 1, 2, 255, 32768, 1000000, 0xFFFFFFFF, 0xFFFFFFFFFFFF };
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001750
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001751 for (size_t i = 0; i < arraysize(values); ++i) {
Calin Juravle6e399ac2015-10-02 23:56:06 +01001752 // 64 bit FieldSet stores the set value in the second register.
1753 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Calin Juravle24cc1b32015-10-06 11:46:58 +01001754 0U,
1755 values[i],
1756 StubTest::GetEntrypoint(self, kQuickSet64Static),
1757 self,
1758 referrer);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001759
Mathieu Chartierc7853442015-03-27 14:35:38 -07001760 size_t res = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001761 0U, 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001762 StubTest::GetEntrypoint(self, kQuickGet64Static),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001763 self,
1764 referrer);
1765
1766 EXPECT_EQ(res, values[i]) << "Iteration " << i;
1767 }
1768#else
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001769 UNUSED(f, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001770 LOG(INFO) << "Skipping set64static as I don't know how to do that on " << kRuntimeISA;
1771 // Force-print to std::cout so it's also outside the logcat.
1772 std::cout << "Skipping set64static as I don't know how to do that on " << kRuntimeISA << std::endl;
1773#endif
1774}
1775
1776
Mathieu Chartierc7853442015-03-27 14:35:38 -07001777static void GetSet64Instance(Handle<mirror::Object>* obj, ArtField* f,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001778 Thread* self, ArtMethod* referrer, StubTest* test)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07001779 REQUIRES_SHARED(Locks::mutator_lock_) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001780#if (defined(__x86_64__) && !defined(__APPLE__)) || (defined(__mips__) && defined(__LP64__)) || \
1781 defined(__aarch64__)
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001782 uint64_t values[] = { 0, 1, 2, 255, 32768, 1000000, 0xFFFFFFFF, 0xFFFFFFFFFFFF };
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001783
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001784 for (size_t i = 0; i < arraysize(values); ++i) {
Mathieu Chartierc7853442015-03-27 14:35:38 -07001785 test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001786 reinterpret_cast<size_t>(obj->Get()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001787 static_cast<size_t>(values[i]),
Andreas Gampe29b38412014-08-13 00:15:43 -07001788 StubTest::GetEntrypoint(self, kQuickSet64Instance),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001789 self,
1790 referrer);
1791
Mathieu Chartierc7853442015-03-27 14:35:38 -07001792 int64_t res = f->GetLong(obj->Get());
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001793 EXPECT_EQ(res, static_cast<int64_t>(values[i])) << "Iteration " << i;
1794
1795 res++;
Mathieu Chartierc7853442015-03-27 14:35:38 -07001796 f->SetLong<false>(obj->Get(), res);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001797
Mathieu Chartierc7853442015-03-27 14:35:38 -07001798 size_t res2 = test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001799 reinterpret_cast<size_t>(obj->Get()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001800 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001801 StubTest::GetEntrypoint(self, kQuickGet64Instance),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001802 self,
1803 referrer);
1804 EXPECT_EQ(res, static_cast<int64_t>(res2));
1805 }
1806#else
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001807 UNUSED(obj, f, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001808 LOG(INFO) << "Skipping set64instance as I don't know how to do that on " << kRuntimeISA;
1809 // Force-print to std::cout so it's also outside the logcat.
1810 std::cout << "Skipping set64instance as I don't know how to do that on " << kRuntimeISA << std::endl;
1811#endif
1812}
1813
1814static void TestFields(Thread* self, StubTest* test, Primitive::Type test_type) {
1815 // garbage is created during ClassLinker::Init
1816
1817 JNIEnv* env = Thread::Current()->GetJniEnv();
1818 jclass jc = env->FindClass("AllFields");
Mathieu Chartier2cebb242015-04-21 16:50:40 -07001819 CHECK(jc != nullptr);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001820 jobject o = env->AllocObject(jc);
Mathieu Chartier2cebb242015-04-21 16:50:40 -07001821 CHECK(o != nullptr);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001822
1823 ScopedObjectAccess soa(self);
Mathieu Chartiere401d142015-04-22 13:56:20 -07001824 StackHandleScope<3> hs(self);
Mathieu Chartier0795f232016-09-27 18:43:30 -07001825 Handle<mirror::Object> obj(hs.NewHandle(soa.Decode<mirror::Object>(o)));
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001826 Handle<mirror::Class> c(hs.NewHandle(obj->GetClass()));
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001827 // Need a method as a referrer
Andreas Gampe542451c2016-07-26 09:02:02 -07001828 ArtMethod* m = c->GetDirectMethod(0, kRuntimePointerSize);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001829
1830 // Play with it...
1831
1832 // Static fields.
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001833 for (ArtField& f : c->GetSFields()) {
1834 Primitive::Type type = f.GetTypeAsPrimitiveType();
Mathieu Chartierc7853442015-03-27 14:35:38 -07001835 if (test_type != type) {
1836 continue;
1837 }
1838 switch (type) {
1839 case Primitive::Type::kPrimBoolean:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001840 GetSetBooleanStatic(&f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001841 break;
1842 case Primitive::Type::kPrimByte:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001843 GetSetByteStatic(&f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001844 break;
1845 case Primitive::Type::kPrimChar:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001846 GetSetCharStatic(&f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001847 break;
1848 case Primitive::Type::kPrimShort:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001849 GetSetShortStatic(&f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001850 break;
1851 case Primitive::Type::kPrimInt:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001852 GetSet32Static(&f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001853 break;
1854 case Primitive::Type::kPrimLong:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001855 GetSet64Static(&f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001856 break;
1857 case Primitive::Type::kPrimNot:
1858 // Don't try array.
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001859 if (f.GetTypeDescriptor()[0] != '[') {
1860 GetSetObjStatic(&f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001861 }
1862 break;
1863 default:
1864 break; // Skip.
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001865 }
1866 }
1867
1868 // Instance fields.
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001869 for (ArtField& f : c->GetIFields()) {
1870 Primitive::Type type = f.GetTypeAsPrimitiveType();
Mathieu Chartierc7853442015-03-27 14:35:38 -07001871 if (test_type != type) {
1872 continue;
1873 }
1874 switch (type) {
1875 case Primitive::Type::kPrimBoolean:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001876 GetSetBooleanInstance(&obj, &f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001877 break;
1878 case Primitive::Type::kPrimByte:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001879 GetSetByteInstance(&obj, &f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001880 break;
1881 case Primitive::Type::kPrimChar:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001882 GetSetCharInstance(&obj, &f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001883 break;
1884 case Primitive::Type::kPrimShort:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001885 GetSetShortInstance(&obj, &f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001886 break;
1887 case Primitive::Type::kPrimInt:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001888 GetSet32Instance(&obj, &f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001889 break;
1890 case Primitive::Type::kPrimLong:
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001891 GetSet64Instance(&obj, &f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001892 break;
1893 case Primitive::Type::kPrimNot:
1894 // Don't try array.
Mathieu Chartier54d220e2015-07-30 16:20:06 -07001895 if (f.GetTypeDescriptor()[0] != '[') {
1896 GetSetObjInstance(&obj, &f, self, m, test);
Mathieu Chartierc7853442015-03-27 14:35:38 -07001897 }
1898 break;
1899 default:
1900 break; // Skip.
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001901 }
1902 }
1903
1904 // TODO: Deallocate things.
1905}
1906
Fred Shih37f05ef2014-07-16 18:38:08 -07001907TEST_F(StubTest, Fields8) {
Fred Shih37f05ef2014-07-16 18:38:08 -07001908 Thread* self = Thread::Current();
1909
1910 self->TransitionFromSuspendedToRunnable();
1911 LoadDex("AllFields");
1912 bool started = runtime_->Start();
1913 CHECK(started);
1914
1915 TestFields(self, this, Primitive::Type::kPrimBoolean);
1916 TestFields(self, this, Primitive::Type::kPrimByte);
1917}
1918
1919TEST_F(StubTest, Fields16) {
Fred Shih37f05ef2014-07-16 18:38:08 -07001920 Thread* self = Thread::Current();
1921
1922 self->TransitionFromSuspendedToRunnable();
1923 LoadDex("AllFields");
1924 bool started = runtime_->Start();
1925 CHECK(started);
1926
1927 TestFields(self, this, Primitive::Type::kPrimChar);
1928 TestFields(self, this, Primitive::Type::kPrimShort);
1929}
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001930
1931TEST_F(StubTest, Fields32) {
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001932 Thread* self = Thread::Current();
1933
1934 self->TransitionFromSuspendedToRunnable();
1935 LoadDex("AllFields");
1936 bool started = runtime_->Start();
1937 CHECK(started);
1938
1939 TestFields(self, this, Primitive::Type::kPrimInt);
1940}
1941
1942TEST_F(StubTest, FieldsObj) {
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001943 Thread* self = Thread::Current();
1944
1945 self->TransitionFromSuspendedToRunnable();
1946 LoadDex("AllFields");
1947 bool started = runtime_->Start();
1948 CHECK(started);
1949
1950 TestFields(self, this, Primitive::Type::kPrimNot);
1951}
1952
1953TEST_F(StubTest, Fields64) {
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001954 Thread* self = Thread::Current();
1955
1956 self->TransitionFromSuspendedToRunnable();
1957 LoadDex("AllFields");
1958 bool started = runtime_->Start();
1959 CHECK(started);
1960
1961 TestFields(self, this, Primitive::Type::kPrimLong);
1962}
1963
Vladimir Marko9d07e3d2016-03-31 12:02:28 +01001964// Disabled, b/27991555 .
1965// FIXME: Hacking the entry point to point to art_quick_to_interpreter_bridge is broken.
1966// The bridge calls through to GetCalleeSaveMethodCaller() which looks up the pre-header
1967// and gets a bogus OatQuickMethodHeader* pointing into our assembly code just before
1968// the bridge and uses that to check for inlined frames, crashing in the process.
1969TEST_F(StubTest, DISABLED_IMT) {
Goran Jakovljevic4ef69be2015-04-22 14:10:53 +02001970#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
1971 (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe51f76352014-05-21 08:28:48 -07001972 Thread* self = Thread::Current();
1973
1974 ScopedObjectAccess soa(self);
1975 StackHandleScope<7> hs(self);
1976
1977 JNIEnv* env = Thread::Current()->GetJniEnv();
1978
1979 // ArrayList
1980
1981 // Load ArrayList and used methods (JNI).
1982 jclass arraylist_jclass = env->FindClass("java/util/ArrayList");
1983 ASSERT_NE(nullptr, arraylist_jclass);
1984 jmethodID arraylist_constructor = env->GetMethodID(arraylist_jclass, "<init>", "()V");
1985 ASSERT_NE(nullptr, arraylist_constructor);
Mathieu Chartiere401d142015-04-22 13:56:20 -07001986 jmethodID contains_jmethod = env->GetMethodID(
1987 arraylist_jclass, "contains", "(Ljava/lang/Object;)Z");
Andreas Gampe51f76352014-05-21 08:28:48 -07001988 ASSERT_NE(nullptr, contains_jmethod);
1989 jmethodID add_jmethod = env->GetMethodID(arraylist_jclass, "add", "(Ljava/lang/Object;)Z");
1990 ASSERT_NE(nullptr, add_jmethod);
1991
Mathieu Chartiere401d142015-04-22 13:56:20 -07001992 // Get representation.
1993 ArtMethod* contains_amethod = soa.DecodeMethod(contains_jmethod);
Andreas Gampe51f76352014-05-21 08:28:48 -07001994
1995 // Patch up ArrayList.contains.
Mathieu Chartiere401d142015-04-22 13:56:20 -07001996 if (contains_amethod->GetEntryPointFromQuickCompiledCode() == nullptr) {
1997 contains_amethod->SetEntryPointFromQuickCompiledCode(reinterpret_cast<void*>(
Andreas Gampe29b38412014-08-13 00:15:43 -07001998 StubTest::GetEntrypoint(self, kQuickQuickToInterpreterBridge)));
Andreas Gampe51f76352014-05-21 08:28:48 -07001999 }
2000
2001 // List
2002
2003 // Load List and used methods (JNI).
2004 jclass list_jclass = env->FindClass("java/util/List");
2005 ASSERT_NE(nullptr, list_jclass);
Mathieu Chartiere401d142015-04-22 13:56:20 -07002006 jmethodID inf_contains_jmethod = env->GetMethodID(
2007 list_jclass, "contains", "(Ljava/lang/Object;)Z");
Andreas Gampe51f76352014-05-21 08:28:48 -07002008 ASSERT_NE(nullptr, inf_contains_jmethod);
2009
2010 // Get mirror representation.
Mathieu Chartiere401d142015-04-22 13:56:20 -07002011 ArtMethod* inf_contains = soa.DecodeMethod(inf_contains_jmethod);
Andreas Gampe51f76352014-05-21 08:28:48 -07002012
2013 // Object
2014
2015 jclass obj_jclass = env->FindClass("java/lang/Object");
2016 ASSERT_NE(nullptr, obj_jclass);
2017 jmethodID obj_constructor = env->GetMethodID(obj_jclass, "<init>", "()V");
2018 ASSERT_NE(nullptr, obj_constructor);
2019
Andreas Gampe51f76352014-05-21 08:28:48 -07002020 // Create instances.
2021
2022 jobject jarray_list = env->NewObject(arraylist_jclass, arraylist_constructor);
2023 ASSERT_NE(nullptr, jarray_list);
Mathieu Chartier0795f232016-09-27 18:43:30 -07002024 Handle<mirror::Object> array_list(hs.NewHandle(soa.Decode<mirror::Object>(jarray_list)));
Andreas Gampe51f76352014-05-21 08:28:48 -07002025
2026 jobject jobj = env->NewObject(obj_jclass, obj_constructor);
2027 ASSERT_NE(nullptr, jobj);
Mathieu Chartier0795f232016-09-27 18:43:30 -07002028 Handle<mirror::Object> obj(hs.NewHandle(soa.Decode<mirror::Object>(jobj)));
Andreas Gampe51f76352014-05-21 08:28:48 -07002029
Andreas Gampe1a7e2922014-05-21 15:37:53 -07002030 // Invocation tests.
2031
2032 // 1. imt_conflict
2033
2034 // Contains.
Nicolas Geoffray1004faa2016-03-23 14:28:30 +00002035
2036 // We construct the ImtConflictTable ourselves, as we cannot go into the runtime stub
2037 // that will create it: the runtime stub expects to be called by compiled code.
Nicolas Geoffray1004faa2016-03-23 14:28:30 +00002038 LinearAlloc* linear_alloc = Runtime::Current()->GetLinearAlloc();
2039 ArtMethod* conflict_method = Runtime::Current()->CreateImtConflictMethod(linear_alloc);
Mathieu Chartiere42888f2016-04-14 10:49:19 -07002040 ImtConflictTable* empty_conflict_table =
2041 Runtime::Current()->GetClassLinker()->CreateImtConflictTable(/*count*/0u, linear_alloc);
Nicolas Geoffray1004faa2016-03-23 14:28:30 +00002042 void* data = linear_alloc->Alloc(
2043 self,
Andreas Gampe542451c2016-07-26 09:02:02 -07002044 ImtConflictTable::ComputeSizeWithOneMoreEntry(empty_conflict_table, kRuntimePointerSize));
Nicolas Geoffray1004faa2016-03-23 14:28:30 +00002045 ImtConflictTable* new_table = new (data) ImtConflictTable(
Andreas Gampe542451c2016-07-26 09:02:02 -07002046 empty_conflict_table, inf_contains, contains_amethod, kRuntimePointerSize);
2047 conflict_method->SetImtConflictTable(new_table, kRuntimePointerSize);
Nicolas Geoffray1004faa2016-03-23 14:28:30 +00002048
Andreas Gampe51f76352014-05-21 08:28:48 -07002049 size_t result =
Nicolas Geoffray1004faa2016-03-23 14:28:30 +00002050 Invoke3WithReferrerAndHidden(reinterpret_cast<size_t>(conflict_method),
2051 reinterpret_cast<size_t>(array_list.Get()),
Andreas Gampe51f76352014-05-21 08:28:48 -07002052 reinterpret_cast<size_t>(obj.Get()),
Andreas Gampe29b38412014-08-13 00:15:43 -07002053 StubTest::GetEntrypoint(self, kQuickQuickImtConflictTrampoline),
Nicolas Geoffray1004faa2016-03-23 14:28:30 +00002054 self,
2055 contains_amethod,
Mathieu Chartiere401d142015-04-22 13:56:20 -07002056 static_cast<size_t>(inf_contains->GetDexMethodIndex()));
Andreas Gampe51f76352014-05-21 08:28:48 -07002057
2058 ASSERT_FALSE(self->IsExceptionPending());
2059 EXPECT_EQ(static_cast<size_t>(JNI_FALSE), result);
2060
2061 // Add object.
2062
2063 env->CallBooleanMethod(jarray_list, add_jmethod, jobj);
2064
David Sehr709b0702016-10-13 09:12:37 -07002065 ASSERT_FALSE(self->IsExceptionPending()) << mirror::Object::PrettyTypeOf(self->GetException());
Andreas Gampe51f76352014-05-21 08:28:48 -07002066
Andreas Gampe1a7e2922014-05-21 15:37:53 -07002067 // Contains.
Andreas Gampe51f76352014-05-21 08:28:48 -07002068
Nicolas Geoffray1004faa2016-03-23 14:28:30 +00002069 result =
2070 Invoke3WithReferrerAndHidden(reinterpret_cast<size_t>(conflict_method),
2071 reinterpret_cast<size_t>(array_list.Get()),
2072 reinterpret_cast<size_t>(obj.Get()),
2073 StubTest::GetEntrypoint(self, kQuickQuickImtConflictTrampoline),
2074 self,
2075 contains_amethod,
2076 static_cast<size_t>(inf_contains->GetDexMethodIndex()));
Andreas Gampe51f76352014-05-21 08:28:48 -07002077
2078 ASSERT_FALSE(self->IsExceptionPending());
2079 EXPECT_EQ(static_cast<size_t>(JNI_TRUE), result);
Nicolas Geoffray1004faa2016-03-23 14:28:30 +00002080
Andreas Gampe1a7e2922014-05-21 15:37:53 -07002081 // 2. regular interface trampoline
2082
Nicolas Geoffray1004faa2016-03-23 14:28:30 +00002083 result = Invoke3WithReferrer(static_cast<size_t>(inf_contains->GetDexMethodIndex()),
2084 reinterpret_cast<size_t>(array_list.Get()),
2085 reinterpret_cast<size_t>(obj.Get()),
2086 StubTest::GetEntrypoint(self,
2087 kQuickInvokeInterfaceTrampolineWithAccessCheck),
2088 self, contains_amethod);
Andreas Gampe1a7e2922014-05-21 15:37:53 -07002089
2090 ASSERT_FALSE(self->IsExceptionPending());
Nicolas Geoffray1004faa2016-03-23 14:28:30 +00002091 EXPECT_EQ(static_cast<size_t>(JNI_TRUE), result);
Andreas Gampe1a7e2922014-05-21 15:37:53 -07002092
Mathieu Chartiere401d142015-04-22 13:56:20 -07002093 result = Invoke3WithReferrer(
2094 static_cast<size_t>(inf_contains->GetDexMethodIndex()),
2095 reinterpret_cast<size_t>(array_list.Get()), reinterpret_cast<size_t>(array_list.Get()),
2096 StubTest::GetEntrypoint(self, kQuickInvokeInterfaceTrampolineWithAccessCheck), self,
2097 contains_amethod);
Andreas Gampe1a7e2922014-05-21 15:37:53 -07002098
2099 ASSERT_FALSE(self->IsExceptionPending());
2100 EXPECT_EQ(static_cast<size_t>(JNI_FALSE), result);
Andreas Gampe51f76352014-05-21 08:28:48 -07002101#else
Andreas Gampe6aac3552014-06-09 14:55:53 -07002102 LOG(INFO) << "Skipping imt as I don't know how to do that on " << kRuntimeISA;
Andreas Gampe51f76352014-05-21 08:28:48 -07002103 // Force-print to std::cout so it's also outside the logcat.
Andreas Gampe6aac3552014-06-09 14:55:53 -07002104 std::cout << "Skipping imt as I don't know how to do that on " << kRuntimeISA << std::endl;
2105#endif
2106}
2107
Andreas Gampe6aac3552014-06-09 14:55:53 -07002108TEST_F(StubTest, StringIndexOf) {
Chris Larsencf283da2016-01-19 16:45:35 -08002109#if defined(__arm__) || defined(__aarch64__) || defined(__mips__)
Andreas Gampe6aac3552014-06-09 14:55:53 -07002110 Thread* self = Thread::Current();
2111 ScopedObjectAccess soa(self);
2112 // garbage is created during ClassLinker::Init
2113
2114 // Create some strings
2115 // Use array so we can index into it and use a matrix for expected results
2116 // Setup: The first half is standard. The second half uses a non-zero offset.
2117 // TODO: Shared backing arrays.
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07002118 const char* c_str[] = { "", "a", "ba", "cba", "dcba", "edcba", "asdfghjkl" };
2119 static constexpr size_t kStringCount = arraysize(c_str);
2120 const char c_char[] = { 'a', 'b', 'c', 'd', 'e' };
2121 static constexpr size_t kCharCount = arraysize(c_char);
Andreas Gampe6aac3552014-06-09 14:55:53 -07002122
2123 StackHandleScope<kStringCount> hs(self);
2124 Handle<mirror::String> s[kStringCount];
2125
2126 for (size_t i = 0; i < kStringCount; ++i) {
2127 s[i] = hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), c_str[i]));
2128 }
2129
2130 // Matrix of expectations. First component is first parameter. Note we only check against the
2131 // sign, not the value. As we are testing random offsets, we need to compute this and need to
2132 // rely on String::CompareTo being correct.
2133 static constexpr size_t kMaxLen = 9;
2134 DCHECK_LE(strlen(c_str[kStringCount-1]), kMaxLen) << "Please fix the indexof test.";
2135
2136 // Last dimension: start, offset by 1.
2137 int32_t expected[kStringCount][kCharCount][kMaxLen + 3];
2138 for (size_t x = 0; x < kStringCount; ++x) {
2139 for (size_t y = 0; y < kCharCount; ++y) {
2140 for (size_t z = 0; z <= kMaxLen + 2; ++z) {
2141 expected[x][y][z] = s[x]->FastIndexOf(c_char[y], static_cast<int32_t>(z) - 1);
2142 }
2143 }
2144 }
2145
2146 // Play with it...
2147
2148 for (size_t x = 0; x < kStringCount; ++x) {
2149 for (size_t y = 0; y < kCharCount; ++y) {
2150 for (size_t z = 0; z <= kMaxLen + 2; ++z) {
2151 int32_t start = static_cast<int32_t>(z) - 1;
2152
2153 // Test string_compareto x y
2154 size_t result = Invoke3(reinterpret_cast<size_t>(s[x].Get()), c_char[y], start,
Andreas Gampe29b38412014-08-13 00:15:43 -07002155 StubTest::GetEntrypoint(self, kQuickIndexOf), self);
Andreas Gampe6aac3552014-06-09 14:55:53 -07002156
2157 EXPECT_FALSE(self->IsExceptionPending());
2158
2159 // The result is a 32b signed integer
2160 union {
2161 size_t r;
2162 int32_t i;
2163 } conv;
2164 conv.r = result;
2165
2166 EXPECT_EQ(expected[x][y][z], conv.i) << "Wrong result for " << c_str[x] << " / " <<
2167 c_char[y] << " @ " << start;
2168 }
2169 }
2170 }
2171
2172 // TODO: Deallocate things.
2173
2174 // Tests done.
2175#else
2176 LOG(INFO) << "Skipping indexof as I don't know how to do that on " << kRuntimeISA;
2177 // Force-print to std::cout so it's also outside the logcat.
2178 std::cout << "Skipping indexof as I don't know how to do that on " << kRuntimeISA << std::endl;
Andreas Gampe51f76352014-05-21 08:28:48 -07002179#endif
2180}
2181
Roland Levillain02b75802016-07-13 11:54:35 +01002182// TODO: Exercise the ReadBarrierMarkRegX entry points.
2183
Man Cao1aee9002015-07-14 22:31:42 -07002184TEST_F(StubTest, ReadBarrier) {
2185#if defined(ART_USE_READ_BARRIER) && (defined(__i386__) || defined(__arm__) || \
2186 defined(__aarch64__) || defined(__mips__) || (defined(__x86_64__) && !defined(__APPLE__)))
2187 Thread* self = Thread::Current();
2188
2189 const uintptr_t readBarrierSlow = StubTest::GetEntrypoint(self, kQuickReadBarrierSlow);
2190
2191 // Create an object
2192 ScopedObjectAccess soa(self);
2193 // garbage is created during ClassLinker::Init
2194
2195 StackHandleScope<2> hs(soa.Self());
2196 Handle<mirror::Class> c(
2197 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "Ljava/lang/Object;")));
2198
2199 // Build an object instance
2200 Handle<mirror::Object> obj(hs.NewHandle(c->AllocObject(soa.Self())));
2201
2202 EXPECT_FALSE(self->IsExceptionPending());
2203
2204 size_t result = Invoke3(0U, reinterpret_cast<size_t>(obj.Get()),
2205 mirror::Object::ClassOffset().SizeValue(), readBarrierSlow, self);
2206
2207 EXPECT_FALSE(self->IsExceptionPending());
2208 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
2209 mirror::Class* klass = reinterpret_cast<mirror::Class*>(result);
2210 EXPECT_EQ(klass, obj->GetClass());
2211
2212 // Tests done.
2213#else
2214 LOG(INFO) << "Skipping read_barrier_slow";
2215 // Force-print to std::cout so it's also outside the logcat.
2216 std::cout << "Skipping read_barrier_slow" << std::endl;
2217#endif
2218}
2219
Roland Levillain0d5a2812015-11-13 10:07:31 +00002220TEST_F(StubTest, ReadBarrierForRoot) {
2221#if defined(ART_USE_READ_BARRIER) && (defined(__i386__) || defined(__arm__) || \
2222 defined(__aarch64__) || defined(__mips__) || (defined(__x86_64__) && !defined(__APPLE__)))
2223 Thread* self = Thread::Current();
2224
2225 const uintptr_t readBarrierForRootSlow =
2226 StubTest::GetEntrypoint(self, kQuickReadBarrierForRootSlow);
2227
2228 // Create an object
2229 ScopedObjectAccess soa(self);
2230 // garbage is created during ClassLinker::Init
2231
2232 StackHandleScope<1> hs(soa.Self());
2233
2234 Handle<mirror::String> obj(
2235 hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), "hello, world!")));
2236
2237 EXPECT_FALSE(self->IsExceptionPending());
2238
2239 GcRoot<mirror::Class>& root = mirror::String::java_lang_String_;
2240 size_t result = Invoke3(reinterpret_cast<size_t>(&root), 0U, 0U, readBarrierForRootSlow, self);
2241
2242 EXPECT_FALSE(self->IsExceptionPending());
2243 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
2244 mirror::Class* klass = reinterpret_cast<mirror::Class*>(result);
2245 EXPECT_EQ(klass, obj->GetClass());
2246
2247 // Tests done.
2248#else
2249 LOG(INFO) << "Skipping read_barrier_for_root_slow";
2250 // Force-print to std::cout so it's also outside the logcat.
2251 std::cout << "Skipping read_barrier_for_root_slow" << std::endl;
2252#endif
2253}
2254
Andreas Gampe525cde22014-04-22 15:44:50 -07002255} // namespace art